Phoenix21 commited on
Commit
c450e62
·
verified ·
1 Parent(s): 3d23e89

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -23
app.py CHANGED
@@ -7,32 +7,39 @@ def chat_interface_fn(message, history, session_id):
7
  'session_id' is used to store conversation across turns.
8
  Deduplicates consecutive repeated Q&A pairs to avoid repetition.
9
  """
10
- # Ensure history is a list of dictionaries
11
- if history and isinstance(history[0], tuple):
12
- print("DEBUG: Converting history from tuple format to dictionary format.")
13
- history = [
14
- msg for h in history
15
- for msg in [
16
- {"role": "user", "content": h[0]},
17
- {"role": "assistant", "content": h[1]}
 
 
 
 
18
  ]
19
- ]
20
 
21
- # 1) Get answer from the session-based memory pipeline
22
- answer = run_with_session_memory(message, session_id)
 
 
 
 
23
 
24
- # 2) Deduplicate consecutive identical exchanges
25
- if not history or history[-1]["content"] != answer:
26
- history.append({"role": "user", "content": message})
27
- history.append({"role": "assistant", "content": answer})
 
 
28
 
29
- # 3) Convert history to message dictionaries for display
30
- message_dicts = []
31
- for msg in history:
32
- message_dicts.append(msg)
33
 
34
- # Return the message dicts and updated history
35
- return message_dicts, history
36
 
37
  # Custom CSS for chat interface
38
  my_chat_css = """
@@ -58,5 +65,5 @@ with gr.Blocks(css=my_chat_css) as demo:
58
  description="Ask your questions. The session_id determines your stored memory."
59
  )
60
 
61
- # Launch the Gradio interface
62
- demo.launch()
 
7
  'session_id' is used to store conversation across turns.
8
  Deduplicates consecutive repeated Q&A pairs to avoid repetition.
9
  """
10
+ # Initialize history if None
11
+ if history is None:
12
+ history = []
13
+
14
+ # Ensure we're working with the correct history format
15
+ # Gradio 3.x sends history as a list of tuples (user, assistant)
16
+ if isinstance(history, list):
17
+ if len(history) > 0 and isinstance(history[0], tuple):
18
+ history = [
19
+ {"role": "user" if i % 2 == 0 else "assistant", "content": msg}
20
+ for tup in history
21
+ for i, msg in enumerate([tup[0], tup[1]])
22
  ]
 
23
 
24
+ # Get answer from the session-based memory pipeline
25
+ try:
26
+ answer = run_with_session_memory(message, session_id)
27
+ except Exception as e:
28
+ print(f"Error in run_with_session_memory: {str(e)}")
29
+ answer = "I apologize, but I encountered an error processing your request."
30
 
31
+ # Format for Gradio ChatInterface
32
+ # Gradio expects a tuple of (new_chat_history, internal_history)
33
+ new_history = history + [
34
+ {"role": "user", "content": message},
35
+ {"role": "assistant", "content": answer}
36
+ ]
37
 
38
+ # Convert history to format expected by Gradio
39
+ chat_history = [(msg["content"], hist["content"])
40
+ for msg, hist in zip(new_history[::2], new_history[1::2])]
 
41
 
42
+ return chat_history, new_history
 
43
 
44
  # Custom CSS for chat interface
45
  my_chat_css = """
 
65
  description="Ask your questions. The session_id determines your stored memory."
66
  )
67
 
68
+ # Launch the Gradio interface with sharing enabled
69
+ demo.launch(share=True)