Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -42,8 +42,9 @@ def chatresponse(message, history):
|
|
42 |
# history_langchain_format.append(HumanMessage(content=message))
|
43 |
|
44 |
data_vectorstore = Chroma.from_documents(documents = data, embedding = embeddings)
|
45 |
-
history_vectorstore = Chroma.from_documents(documents = history, embedding = embeddings)
|
46 |
-
vectorstore = data_vectorstore + history_vectorstore
|
|
|
47 |
retriever = vectorstore.as_retriever()
|
48 |
|
49 |
# from langchain.prompts import PromptTemplate
|
@@ -58,8 +59,6 @@ def chatresponse(message, history):
|
|
58 |
|
59 |
CONTEXT: {context}
|
60 |
|
61 |
-
HISTORY: {history}
|
62 |
-
|
63 |
QUESTION: {question}""")
|
64 |
|
65 |
from langchain_core.runnables import RunnablePassthrough
|
|
|
42 |
# history_langchain_format.append(HumanMessage(content=message))
|
43 |
|
44 |
data_vectorstore = Chroma.from_documents(documents = data, embedding = embeddings)
|
45 |
+
# history_vectorstore = Chroma.from_documents(documents = history, embedding = embeddings)
|
46 |
+
# vectorstore = data_vectorstore + history_vectorstore
|
47 |
+
vectorstore = data_vectorstore
|
48 |
retriever = vectorstore.as_retriever()
|
49 |
|
50 |
# from langchain.prompts import PromptTemplate
|
|
|
59 |
|
60 |
CONTEXT: {context}
|
61 |
|
|
|
|
|
62 |
QUESTION: {question}""")
|
63 |
|
64 |
from langchain_core.runnables import RunnablePassthrough
|