Phoenix21 commited on
Commit
5a5fa80
·
verified ·
1 Parent(s): c450e62

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -29
app.py CHANGED
@@ -1,45 +1,24 @@
1
  import gradio as gr
2
  from my_memory_logic import run_with_session_memory
3
 
4
- def chat_interface_fn(message, history, session_id):
5
  """
6
  Multi-turn chat function for Gradio's ChatInterface.
7
- 'session_id' is used to store conversation across turns.
8
- Deduplicates consecutive repeated Q&A pairs to avoid repetition.
9
  """
10
  # Initialize history if None
11
- if history is None:
12
- history = []
13
-
14
- # Ensure we're working with the correct history format
15
- # Gradio 3.x sends history as a list of tuples (user, assistant)
16
- if isinstance(history, list):
17
- if len(history) > 0 and isinstance(history[0], tuple):
18
- history = [
19
- {"role": "user" if i % 2 == 0 else "assistant", "content": msg}
20
- for tup in history
21
- for i, msg in enumerate([tup[0], tup[1]])
22
- ]
23
 
24
  # Get answer from the session-based memory pipeline
25
  try:
26
- answer = run_with_session_memory(message, session_id)
27
  except Exception as e:
28
  print(f"Error in run_with_session_memory: {str(e)}")
29
  answer = "I apologize, but I encountered an error processing your request."
30
 
31
- # Format for Gradio ChatInterface
32
- # Gradio expects a tuple of (new_chat_history, internal_history)
33
- new_history = history + [
34
- {"role": "user", "content": message},
35
- {"role": "assistant", "content": answer}
36
- ]
37
-
38
- # Convert history to format expected by Gradio
39
- chat_history = [(msg["content"], hist["content"])
40
- for msg, hist in zip(new_history[::2], new_history[1::2])]
41
-
42
- return chat_history, new_history
43
 
44
  # Custom CSS for chat interface
45
  my_chat_css = """
@@ -60,7 +39,7 @@ with gr.Blocks(css=my_chat_css) as demo:
60
  session_id_box = gr.Textbox(label="Session ID", value="abc123", interactive=True)
61
 
62
  chat_interface = gr.ChatInterface(
63
- fn=lambda msg, hist: chat_interface_fn(msg, hist, session_id_box.value),
64
  title="DailyWellnessAI (Session-based Memory)",
65
  description="Ask your questions. The session_id determines your stored memory."
66
  )
 
1
  import gradio as gr
2
  from my_memory_logic import run_with_session_memory
3
 
4
+ def chat_interface_fn(message, history):
5
  """
6
  Multi-turn chat function for Gradio's ChatInterface.
7
+ Returns response and history in the format expected by Gradio.
 
8
  """
9
  # Initialize history if None
10
+ history = history or []
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  # Get answer from the session-based memory pipeline
13
  try:
14
+ answer = run_with_session_memory(message, session_id_box.value)
15
  except Exception as e:
16
  print(f"Error in run_with_session_memory: {str(e)}")
17
  answer = "I apologize, but I encountered an error processing your request."
18
 
19
+ # Return the answer and append to history
20
+ history.append((message, answer))
21
+ return history
 
 
 
 
 
 
 
 
 
22
 
23
  # Custom CSS for chat interface
24
  my_chat_css = """
 
39
  session_id_box = gr.Textbox(label="Session ID", value="abc123", interactive=True)
40
 
41
  chat_interface = gr.ChatInterface(
42
+ fn=chat_interface_fn,
43
  title="DailyWellnessAI (Session-based Memory)",
44
  description="Ask your questions. The session_id determines your stored memory."
45
  )