Update app.py
Browse files
app.py
CHANGED
@@ -129,7 +129,7 @@ def initialize_llmchain(temperature, max_tokens, top_k, vector_db, progress=gr.P
|
|
129 |
|
130 |
progress(0.75, desc="Defining buffer memory...")
|
131 |
#memory = ConversationBufferMemory(memory_key="chat_history",output_key='answer',return_messages=True)
|
132 |
-
memory = ConversationBufferWindowMemory(memory_key = '
|
133 |
retriever=vector_db.as_retriever()
|
134 |
progress(0.8, desc="Defining retrieval chain...")
|
135 |
qa_chain = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,chain_type="stuff",
|
@@ -176,20 +176,20 @@ def initialize_LLM(llm_temperature, max_tokens, top_k, vector_db, progress=gr.Pr
|
|
176 |
return qa_chain, "Complete!"
|
177 |
|
178 |
|
179 |
-
def format_chat_history(message,
|
180 |
formatted_chat_history = []
|
181 |
-
for user_message, bot_message in
|
182 |
formatted_chat_history.append(f"User: {user_message}")
|
183 |
formatted_chat_history.append(f"Assistant: {bot_message}")
|
184 |
return formatted_chat_history
|
185 |
|
186 |
|
187 |
-
def conversation(qa_chain, message,
|
188 |
-
formatted_chat_history = format_chat_history(message,
|
189 |
#print("formatted_chat_history",formatted_chat_history)
|
190 |
|
191 |
# Generate response using QA chain
|
192 |
-
response = qa_chain({"question": message, "
|
193 |
response_answer = response["answer"]
|
194 |
if response_answer.find("Helpful Answer:") != -1:
|
195 |
response_answer = response_answer.split("Helpful Answer:")[-1]
|
|
|
129 |
|
130 |
progress(0.75, desc="Defining buffer memory...")
|
131 |
#memory = ConversationBufferMemory(memory_key="chat_history",output_key='answer',return_messages=True)
|
132 |
+
memory = ConversationBufferWindowMemory(memory_key = 'chat_history', k=3)
|
133 |
retriever=vector_db.as_retriever()
|
134 |
progress(0.8, desc="Defining retrieval chain...")
|
135 |
qa_chain = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,chain_type="stuff",
|
|
|
176 |
return qa_chain, "Complete!"
|
177 |
|
178 |
|
179 |
+
def format_chat_history(message, chat_history):
|
180 |
formatted_chat_history = []
|
181 |
+
for user_message, bot_message in chat_history:
|
182 |
formatted_chat_history.append(f"User: {user_message}")
|
183 |
formatted_chat_history.append(f"Assistant: {bot_message}")
|
184 |
return formatted_chat_history
|
185 |
|
186 |
|
187 |
+
def conversation(qa_chain, message, chat_history):
|
188 |
+
formatted_chat_history = format_chat_history(message, chat_history)
|
189 |
#print("formatted_chat_history",formatted_chat_history)
|
190 |
|
191 |
# Generate response using QA chain
|
192 |
+
response = qa_chain({"question": message, "chat_history": formatted_chat_history})
|
193 |
response_answer = response["answer"]
|
194 |
if response_answer.find("Helpful Answer:") != -1:
|
195 |
response_answer = response_answer.split("Helpful Answer:")[-1]
|