|
import os |
|
import gradio as gr |
|
|
|
|
|
from pipeline import run_with_chain |
|
|
|
|
|
from my_memory_logic import memory, restatement_chain |
|
|
|
def chat_history_fn(user_input, history): |
|
""" |
|
Rely on LangChain memory to store the entire conversation across calls. |
|
DO NOT re-add old messages from 'history' each time. |
|
Also, handle potential None or invalid strings for user_input/answer |
|
to avoid Pydantic validation errors. |
|
""" |
|
|
|
if not user_input or not isinstance(user_input, str): |
|
user_input = "" if user_input is None else str(user_input) |
|
|
|
|
|
reformulated_q = restatement_chain.run({ |
|
"chat_history": memory.chat_memory.messages, |
|
"input": user_input |
|
}) |
|
|
|
|
|
answer = run_with_chain(reformulated_q) |
|
|
|
if answer is None or not isinstance(answer, str): |
|
answer = "" if answer is None else str(answer) |
|
|
|
|
|
memory.chat_memory.add_user_message(user_input) |
|
memory.chat_memory.add_ai_message(answer) |
|
|
|
|
|
history.append((user_input, answer)) |
|
|
|
|
|
|
|
message_dicts = [] |
|
for usr_msg, ai_msg in history: |
|
if not isinstance(usr_msg, str): |
|
usr_msg = str(usr_msg) if usr_msg else "" |
|
if not isinstance(ai_msg, str): |
|
ai_msg = str(ai_msg) if ai_msg else "" |
|
|
|
message_dicts.append({"role": "user", "content": usr_msg}) |
|
message_dicts.append({"role": "assistant", "content": ai_msg}) |
|
|
|
|
|
return message_dicts |
|
|
|
|
|
demo = gr.ChatInterface( |
|
fn=chat_history_fn, |
|
title="DailyWellnessAI with Memory", |
|
description="A chat bot that remembers context using memory + question restatement." |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|