Phoenix21 commited on
Commit
a8d22a4
·
verified ·
1 Parent(s): c3235ac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -23
app.py CHANGED
@@ -1,47 +1,58 @@
 
1
  import gradio as gr
 
 
2
  from pipeline import run_with_chain
 
 
3
  from my_memory_logic import memory, restatement_chain
4
 
5
  def chat_history_fn(user_input, history):
6
  """
7
- Gradio calls this function each time the user submits a message.
8
- 'history' is a list of (user_msg, ai_msg) pairs from previous turns.
9
- We'll add them to memory, restate the user_input if needed,
10
- run the pipeline, store the new turn, then return a list of
11
- message dicts that Gradio's ChatInterface accepts.
12
  """
13
- # 1) Convert existing history into memory
14
- for user_msg, ai_msg in history:
15
- memory.chat_memory.add_user_message(user_msg)
16
- memory.chat_memory.add_ai_message(ai_msg)
17
-
18
- # 2) Restate the new user question with chat history
19
  reformulated_q = restatement_chain.run({
20
  "chat_history": memory.chat_memory.messages,
21
  "input": user_input
22
  })
23
-
24
- # 3) Pass the reformulated question to your pipeline
25
  answer = run_with_chain(reformulated_q)
26
-
27
- # 4) Update memory with the new turn
 
 
 
28
  memory.chat_memory.add_user_message(user_input)
29
  memory.chat_memory.add_ai_message(answer)
30
-
31
- # 5) Update the 'history' list with (user, ai)
32
  history.append((user_input, answer))
33
-
34
- # 6) Gradio's ChatInterface expects a list of message dicts:
35
- # [{"role": "user"|"assistant", "content": "..."} ...]
36
- # We'll build that from our (user_msg, ai_msg) pairs in 'history'.
37
  message_dicts = []
38
  for usr_msg, ai_msg in history:
 
 
 
 
 
39
  message_dicts.append({"role": "user", "content": usr_msg})
40
  message_dicts.append({"role": "assistant", "content": ai_msg})
41
-
42
- # 7) Return the message dictionary list
43
  return message_dicts
44
 
 
45
  demo = gr.ChatInterface(
46
  fn=chat_history_fn,
47
  title="DailyWellnessAI with Memory",
 
1
+ import os
2
  import gradio as gr
3
+
4
+ # Suppose 'run_with_chain' is your pipeline function from pipeline.py
5
  from pipeline import run_with_chain
6
+
7
+ # Suppose 'memory' and 'restatement_chain' come from my_memory_logic.py
8
  from my_memory_logic import memory, restatement_chain
9
 
10
  def chat_history_fn(user_input, history):
11
  """
12
+ Rely on LangChain memory to store the entire conversation across calls.
13
+ DO NOT re-add old messages from 'history' each time.
14
+ Also, handle potential None or invalid strings for user_input/answer
15
+ to avoid Pydantic validation errors.
 
16
  """
17
+ # -- 0) Sanitize user_input to ensure it's a valid string
18
+ if not user_input or not isinstance(user_input, str):
19
+ user_input = "" if user_input is None else str(user_input)
20
+
21
+ # -- 1) Restate the new user question using existing LangChain memory
 
22
  reformulated_q = restatement_chain.run({
23
  "chat_history": memory.chat_memory.messages,
24
  "input": user_input
25
  })
26
+
27
+ # -- 2) Pass the reformulated question into your pipeline
28
  answer = run_with_chain(reformulated_q)
29
+ # also sanitize if needed
30
+ if answer is None or not isinstance(answer, str):
31
+ answer = "" if answer is None else str(answer)
32
+
33
+ # -- 3) Add this new user->assistant turn to memory
34
  memory.chat_memory.add_user_message(user_input)
35
  memory.chat_memory.add_ai_message(answer)
36
+
37
+ # -- 4) Update Gradio’s 'history' so the UI shows the new turn
38
  history.append((user_input, answer))
39
+
40
+ # -- 5) Convert the entire 'history' to message dictionaries:
41
+ # [{"role":"user","content":...},{"role":"assistant","content":...},...]
 
42
  message_dicts = []
43
  for usr_msg, ai_msg in history:
44
+ if not isinstance(usr_msg, str):
45
+ usr_msg = str(usr_msg) if usr_msg else ""
46
+ if not isinstance(ai_msg, str):
47
+ ai_msg = str(ai_msg) if ai_msg else ""
48
+
49
  message_dicts.append({"role": "user", "content": usr_msg})
50
  message_dicts.append({"role": "assistant", "content": ai_msg})
51
+
52
+ # -- 6) Return the message dictionary list
53
  return message_dicts
54
 
55
+ # Build your ChatInterface with the function
56
  demo = gr.ChatInterface(
57
  fn=chat_history_fn,
58
  title="DailyWellnessAI with Memory",