la04 commited on
Commit
a135ba9
·
verified ·
1 Parent(s): 0a1e2b9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -9
app.py CHANGED
@@ -67,11 +67,13 @@ def initialize_llm_chain(temperature, max_tokens, vector_db):
67
  def conversation(qa_chain, message, history):
68
  if qa_chain is None:
69
  return None, "Der QA-Chain wurde nicht initialisiert!", history
 
 
70
  try:
71
  response = qa_chain({"question": message, "chat_history": history})
72
  response_text = response["answer"]
73
  sources = [doc.metadata["source"] for doc in response["source_documents"]]
74
- sources_text = "\n".join(sources)
75
  return qa_chain, f"{response_text}\n\n**Quellen:**\n{sources_text}", history + [(message, response_text)]
76
  except Exception as e:
77
  return qa_chain, f"Fehler: {str(e)}", history
@@ -79,8 +81,9 @@ def conversation(qa_chain, message, history):
79
  # **Gradio-Demo erstellen**
80
  def demo():
81
  with gr.Blocks() as demo:
82
- vector_db = gr.State()
83
- qa_chain = gr.State()
 
84
 
85
  gr.HTML("<center><h1>RAG Chatbot mit FAISS und lokalen Modellen</h1></center>")
86
  with gr.Row():
@@ -93,14 +96,28 @@ def demo():
93
  qachain_btn = gr.Button("Initialisiere QA-Chatbot")
94
 
95
  with gr.Column():
96
- chatbot = gr.Chatbot(type='messages', height=400)
97
- msg = gr.Textbox(placeholder="Frage eingeben...")
98
  submit_btn = gr.Button("Absenden")
99
 
100
- # Button-Events definieren
101
- db_btn.click(initialize_database, [document], [vector_db, db_status])
102
- qachain_btn.click(initialize_llm_chain_wrapper, [slider_temperature, slider_max_tokens, vector_db], [qa_chain])
103
- submit_btn.click(conversation, [qa_chain, msg, []], [qa_chain, "message", "history"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
104
 
105
  demo.launch(debug=True, enable_queue=True)
106
 
 
67
  def conversation(qa_chain, message, history):
68
  if qa_chain is None:
69
  return None, "Der QA-Chain wurde nicht initialisiert!", history
70
+ if not message.strip():
71
+ return qa_chain, "Bitte eine Frage eingeben!", history
72
  try:
73
  response = qa_chain({"question": message, "chat_history": history})
74
  response_text = response["answer"]
75
  sources = [doc.metadata["source"] for doc in response["source_documents"]]
76
+ sources_text = "\n".join(sources) if sources else "Keine Quellen verfügbar"
77
  return qa_chain, f"{response_text}\n\n**Quellen:**\n{sources_text}", history + [(message, response_text)]
78
  except Exception as e:
79
  return qa_chain, f"Fehler: {str(e)}", history
 
81
  # **Gradio-Demo erstellen**
82
  def demo():
83
  with gr.Blocks() as demo:
84
+ vector_db = gr.State() # Zustand für die Vektordatenbank
85
+ qa_chain = gr.State() # Zustand für den QA-Chain
86
+ chat_history = gr.State([]) # Chatverlauf speichern
87
 
88
  gr.HTML("<center><h1>RAG Chatbot mit FAISS und lokalen Modellen</h1></center>")
89
  with gr.Row():
 
96
  qachain_btn = gr.Button("Initialisiere QA-Chatbot")
97
 
98
  with gr.Column():
99
+ chatbot = gr.Chatbot(label="Chatbot", type='messages', height=400)
100
+ msg = gr.Textbox(label="Deine Frage:", placeholder="Frage eingeben...")
101
  submit_btn = gr.Button("Absenden")
102
 
103
+ # **Button-Events definieren**
104
+ db_btn.click(
105
+ initialize_database,
106
+ inputs=[document], # Eingabe der hochgeladenen Dokumente
107
+ outputs=[vector_db, db_status] # Ausgabe: Vektor-Datenbank und Status
108
+ )
109
+
110
+ qachain_btn.click(
111
+ initialize_llm_chain_wrapper,
112
+ inputs=[slider_temperature, slider_max_tokens, vector_db],
113
+ outputs=[qa_chain, db_status]
114
+ )
115
+
116
+ submit_btn.click(
117
+ conversation,
118
+ inputs=[qa_chain, msg, chat_history], # Chatkette, Nutzerfrage, Chatverlauf
119
+ outputs=[qa_chain, chatbot, chat_history] # Antwort der Kette, Chatbot-Ausgabe, neuer Verlauf
120
+ )
121
 
122
  demo.launch(debug=True, enable_queue=True)
123