Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -105,6 +105,8 @@ Question: {query}
|
|
105 |
|
106 |
Generated_chat = LLMChain(llm=llama3, prompt=prompt)
|
107 |
response = Generated_chat.invoke({'retrieved_documents': retrieved_documents, 'query': query})
|
|
|
|
|
108 |
history.append((query, response['text']))
|
109 |
return response['text'], history
|
110 |
|
|
|
105 |
|
106 |
Generated_chat = LLMChain(llm=llama3, prompt=prompt)
|
107 |
response = Generated_chat.invoke({'retrieved_documents': retrieved_documents, 'query': query})
|
108 |
+
|
109 |
+
# Append the query and the response to history as a tuple
|
110 |
history.append((query, response['text']))
|
111 |
return response['text'], history
|
112 |
|