Update app.py
Browse files
app.py
CHANGED
@@ -77,15 +77,8 @@ def initialize_llmchain(vector_db):
|
|
77 |
|
78 |
memory = ConversationBufferMemory(memory_key="chat_history", output_key='answer', return_messages=True)
|
79 |
retriever=vector_db.as_retriever()
|
80 |
-
|
81 |
-
|
82 |
-
retriever=retriever,
|
83 |
-
chain_type="stuff",
|
84 |
-
memory=memory,
|
85 |
-
return_source_documents=True,
|
86 |
-
verbose=False,
|
87 |
-
)
|
88 |
-
return qa_chain
|
89 |
|
90 |
|
91 |
|
@@ -161,13 +154,16 @@ def conversation(qa_chain, message, history):
|
|
161 |
|
162 |
#document = os.listdir(list_file_obj)
|
163 |
vector_db, collection_name = initialize_database(list_file_obj)
|
164 |
-
qa_chain =
|
|
|
|
|
|
|
165 |
print('qa chain and vector_db done')
|
166 |
|
167 |
def demo():
|
168 |
with gr.Blocks(theme='base') as demo:
|
169 |
vector_db = gr.State()
|
170 |
-
qa_chain = gr.State()
|
171 |
collection_name = gr.State()
|
172 |
|
173 |
chatbot = gr.Chatbot(height=300)
|
|
|
77 |
|
78 |
memory = ConversationBufferMemory(memory_key="chat_history", output_key='answer', return_messages=True)
|
79 |
retriever=vector_db.as_retriever()
|
80 |
+
|
81 |
+
#return qa_chain
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
82 |
|
83 |
|
84 |
|
|
|
154 |
|
155 |
#document = os.listdir(list_file_obj)
|
156 |
vector_db, collection_name = initialize_database(list_file_obj)
|
157 |
+
#qa_chain =
|
158 |
+
initialize_llmchain(vector_db)
|
159 |
+
qa_chain = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,chain_type="stuff",
|
160 |
+
memory=memory,return_source_documents=True,verbose=False,)
|
161 |
print('qa chain and vector_db done')
|
162 |
|
163 |
def demo():
|
164 |
with gr.Blocks(theme='base') as demo:
|
165 |
vector_db = gr.State()
|
166 |
+
#qa_chain = gr.State()
|
167 |
collection_name = gr.State()
|
168 |
|
169 |
chatbot = gr.Chatbot(height=300)
|