Chan-Y commited on
Commit
3c7ba68
·
verified ·
1 Parent(s): c9f19e7

Create ChatEngine.py

Browse files
Files changed (1) hide show
  1. ChatEngine.py +44 -0
ChatEngine.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from llama_index.core.base.llms.types import ChatMessage, MessageRole
2
+ import logging
3
+ logging.basicConfig(level=logging.INFO)
4
+
5
+
6
+ class ChatEngine:
7
+ def __init__(self, retriever):
8
+ """
9
+ Initializes the ChatEngine with a retriever and a language model.
10
+
11
+ Args:
12
+ retriever (HybridRetriever): An instance of a retriever to fetch relevant documents.
13
+ model_name (str): The name of the language model to be used.
14
+ context_window (int, optional): The maximum context window size for the language model. Defaults to 32000.
15
+ temperature (float, optional): The temperature setting for the language model. Defaults to 0.
16
+ """
17
+
18
+ self.retriever = retriever
19
+
20
+ def ask_question(self, question, llm):
21
+ """
22
+ Asks a question to the language model, using the retriever to fetch relevant documents.
23
+
24
+ Args:
25
+ question (str): The question to be asked.
26
+
27
+ Returns:
28
+ response (str): The response from the language model in markdown format.
29
+ """
30
+
31
+ question = "[INST]" + question + "[/INST]"
32
+
33
+ results = self.retriever.best_docs(question)
34
+ document = [doc.text for doc, sc in results]
35
+ logging.info(f"Created Document - len docs:{len(document)}")
36
+
37
+ chat_history = f"Question: {question}\n\nDocument: {document}"
38
+ logging.info("Created Chat History")
39
+
40
+ logging.info("Asking LLM")
41
+ response = llm.generate(chat_history, self.params)
42
+
43
+ logging.info("Got Response from LLM, Returning")
44
+ return response