Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -160,6 +160,12 @@ def demo():
|
|
160 |
|
161 |
gr.Markdown("<b>Chat with Your Document</b>")
|
162 |
chatbot = gr.Chatbot(height=505)
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
msg = gr.Textbox(placeholder="Ask a question", container=True)
|
164 |
submit_btn = gr.Button("Submit")
|
165 |
clear_btn = gr.ClearButton([msg, chatbot], value="Clear")
|
@@ -168,14 +174,14 @@ def demo():
|
|
168 |
db_btn.click(initialize_database, inputs=[document], outputs=[vector_db, db_progress])
|
169 |
qachain_btn.click(initialize_LLM, inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db], outputs=[qa_chain, llm_progress]).then(lambda: [None, "", 0, "", 0, "", 0],
|
170 |
inputs=None,
|
171 |
-
outputs=[chatbot,
|
172 |
queue=False)
|
173 |
|
174 |
-
msg.submit(conversation, inputs=[qa_chain, msg, chatbot, persona_text], outputs=[qa_chain, msg, chatbot,
|
175 |
-
submit_btn.click(conversation, inputs=[qa_chain, msg, chatbot, persona_text], outputs=[qa_chain, msg, chatbot,
|
176 |
-
clear_btn.click(lambda: [None, "", 0, "", 0, "", 0], inputs=None, outputs=[chatbot])
|
177 |
|
178 |
demo.queue().launch(debug=True)
|
179 |
|
180 |
if __name__ == "__main__":
|
181 |
-
demo()
|
|
|
160 |
|
161 |
gr.Markdown("<b>Chat with Your Document</b>")
|
162 |
chatbot = gr.Chatbot(height=505)
|
163 |
+
doc_source1 = gr.Textbox(label="Reference 1", lines=2, interactive=False)
|
164 |
+
source1_page = gr.Number(label="Page", interactive=False)
|
165 |
+
doc_source2 = gr.Textbox(label="Reference 2", lines=2, interactive=False)
|
166 |
+
source2_page = gr.Number(label="Page", interactive=False)
|
167 |
+
doc_source3 = gr.Textbox(label="Reference 3", lines=2, interactive=False)
|
168 |
+
source3_page = gr.Number(label="Page", interactive=False)
|
169 |
msg = gr.Textbox(placeholder="Ask a question", container=True)
|
170 |
submit_btn = gr.Button("Submit")
|
171 |
clear_btn = gr.ClearButton([msg, chatbot], value="Clear")
|
|
|
174 |
db_btn.click(initialize_database, inputs=[document], outputs=[vector_db, db_progress])
|
175 |
qachain_btn.click(initialize_LLM, inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db], outputs=[qa_chain, llm_progress]).then(lambda: [None, "", 0, "", 0, "", 0],
|
176 |
inputs=None,
|
177 |
+
outputs=[chatbot, doc_source1, source1_page, doc_source2, source2_page, doc_source3, source3_page],
|
178 |
queue=False)
|
179 |
|
180 |
+
msg.submit(conversation, inputs=[qa_chain, msg, chatbot, persona_text], outputs=[qa_chain, msg, chatbot, doc_source1, source1_page, doc_source2, source2_page, doc_source3, source3_page], queue=False)
|
181 |
+
submit_btn.click(conversation, inputs=[qa_chain, msg, chatbot, persona_text], outputs=[qa_chain, msg, chatbot, doc_source1, source1_page, doc_source2, source2_page, doc_source3, source3_page], queue=False)
|
182 |
+
clear_btn.click(lambda: [None, "", 0, "", 0, "", 0], inputs=None, outputs=[chatbot, doc_source1, source1_page, doc_source2, source2_page, doc_source3, source3_page])
|
183 |
|
184 |
demo.queue().launch(debug=True)
|
185 |
|
186 |
if __name__ == "__main__":
|
187 |
+
demo()
|