File size: 2,358 Bytes
a8d22a4 f36f021 a8d22a4 f36f021 a8d22a4 f6aa366 f36f021 f6aa366 c3235ac a8d22a4 c3235ac a8d22a4 f6aa366 a8d22a4 f6aa366 a8d22a4 f6aa366 a8d22a4 f6aa366 a8d22a4 c585131 c3235ac a8d22a4 c3235ac c585131 a8d22a4 c585131 f36f021 a8d22a4 f6aa366 f36f021 c585131 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
import os
import gradio as gr
# Suppose 'run_with_chain' is your pipeline function from pipeline.py
from pipeline import run_with_chain
# Suppose 'memory' and 'restatement_chain' come from my_memory_logic.py
from my_memory_logic import memory, restatement_chain
def chat_history_fn(user_input, history):
"""
Rely on LangChain memory to store the entire conversation across calls.
DO NOT re-add old messages from 'history' each time.
Also, handle potential None or invalid strings for user_input/answer
to avoid Pydantic validation errors.
"""
# -- 0) Sanitize user_input to ensure it's a valid string
if not user_input or not isinstance(user_input, str):
user_input = "" if user_input is None else str(user_input)
# -- 1) Restate the new user question using existing LangChain memory
reformulated_q = restatement_chain.run({
"chat_history": memory.chat_memory.messages,
"input": user_input
})
# -- 2) Pass the reformulated question into your pipeline
answer = run_with_chain(reformulated_q)
# also sanitize if needed
if answer is None or not isinstance(answer, str):
answer = "" if answer is None else str(answer)
# -- 3) Add this new user->assistant turn to memory
memory.chat_memory.add_user_message(user_input)
memory.chat_memory.add_ai_message(answer)
# -- 4) Update Gradio’s 'history' so the UI shows the new turn
history.append((user_input, answer))
# -- 5) Convert the entire 'history' to message dictionaries:
# [{"role":"user","content":...},{"role":"assistant","content":...},...]
message_dicts = []
for usr_msg, ai_msg in history:
if not isinstance(usr_msg, str):
usr_msg = str(usr_msg) if usr_msg else ""
if not isinstance(ai_msg, str):
ai_msg = str(ai_msg) if ai_msg else ""
message_dicts.append({"role": "user", "content": usr_msg})
message_dicts.append({"role": "assistant", "content": ai_msg})
# -- 6) Return the message dictionary list
return message_dicts
# Build your ChatInterface with the function
demo = gr.ChatInterface(
fn=chat_history_fn,
title="DailyWellnessAI with Memory",
description="A chat bot that remembers context using memory + question restatement."
)
if __name__ == "__main__":
demo.launch()
|