Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -53,7 +53,7 @@ search_type="similarity_score_threshold"
|
|
53 |
retriever = vectorstore.as_retriever(k = 4, )
|
54 |
|
55 |
prompt = hub.pull("rlm/rag-prompt")
|
56 |
-
llm = HuggingFaceEndpoint(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", stop_sequences=["Human:"])
|
57 |
rag_chain = (
|
58 |
{"context": retriever | format_docs, "question": RunnablePassthrough()}
|
59 |
| prompt
|
@@ -144,7 +144,7 @@ async def echo(websocket):
|
|
144 |
m = data["message"] + "\n\nAssistant: "
|
145 |
token = data["token"]
|
146 |
docs = retriever.get_relevant_documents(m)
|
147 |
-
rawresponse = conversational_rag_chain.
|
148 |
{"input": m},
|
149 |
config={
|
150 |
"configurable": {"session_id": token}
|
|
|
53 |
retriever = vectorstore.as_retriever(k = 4, )
|
54 |
|
55 |
prompt = hub.pull("rlm/rag-prompt")
|
56 |
+
llm = HuggingFaceEndpoint(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1", stop_sequences=["Human:"], max_new_tokens=8192)
|
57 |
rag_chain = (
|
58 |
{"context": retriever | format_docs, "question": RunnablePassthrough()}
|
59 |
| prompt
|
|
|
144 |
m = data["message"] + "\n\nAssistant: "
|
145 |
token = data["token"]
|
146 |
docs = retriever.get_relevant_documents(m)
|
147 |
+
rawresponse = conversational_rag_chain.ainvoke(
|
148 |
{"input": m},
|
149 |
config={
|
150 |
"configurable": {"session_id": token}
|