treasuremars commited on
Commit
ba4dea2
·
verified ·
1 Parent(s): 94ce4e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -25
app.py CHANGED
@@ -69,32 +69,11 @@ rag_chain = (
69
 
70
  import gradio as gr
71
 
72
- # # Function to stream responses
73
- # def rag_memory_stream(message, history):
74
- # partial_text = ""
75
- # for new_text in rag_chain.stream(message): # Assuming rag_chain is pre-defined
76
- # partial_text += new_text
77
- # yield partial_text
78
-
79
- # Example rag_memory_stream function with history handling
80
- def rag_memory_stream(messages, history=[]):
81
- """
82
- A generator-based function that processes messages, maintains history,
83
- and streams responses for interaction with the chatbot.
84
- """
85
- # Ensure messages is a list of strings
86
- if isinstance(messages, list) and all(isinstance(msg, str) for msg in messages):
87
- user_message = messages[-1] # Extract the latest user message
88
- else:
89
- raise ValueError("Expected messages to be a list of strings.")
90
-
91
  partial_text = ""
92
- history.append({"user": user_message, "bot": ""}) # Add to history
93
-
94
- # Simulate response generation (replace with actual rag_chain logic)
95
- for new_text in rag_chain.stream(user_message): # Assuming rag_chain is pre-defined
96
  partial_text += new_text
97
- history[-1]["bot"] = partial_text # Update bot response in history
98
  yield partial_text
99
 
100
  examples = [
@@ -133,7 +112,7 @@ Disclaimer: This chatbot is for informational purposes only and not a substitute
133
  # )
134
 
135
  demo = gr.ChatInterface(fn=rag_memory_stream,
136
- type="list",
137
  title=title,
138
  description=description,
139
  fill_height=True,
 
69
 
70
  import gradio as gr
71
 
72
+ # Function to stream responses
73
+ def rag_memory_stream(message, history):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  partial_text = ""
75
+ for new_text in rag_chain.stream(message): # Assuming rag_chain is pre-defined
 
 
 
76
  partial_text += new_text
 
77
  yield partial_text
78
 
79
  examples = [
 
112
  # )
113
 
114
  demo = gr.ChatInterface(fn=rag_memory_stream,
115
+ type="messages",
116
  title=title,
117
  description=description,
118
  fill_height=True,