|
import gradio as gr |
|
from my_memory_logic import run_with_session_memory |
|
|
|
def chat_interface_fn(message, history, session_id): |
|
""" |
|
Multi-turn chat function for Gradio's ChatInterface. |
|
'session_id' is used to store conversation across turns. |
|
Deduplicates consecutive repeated Q&A pairs to avoid repetition. |
|
""" |
|
|
|
if history is None: |
|
history = [] |
|
|
|
|
|
|
|
if isinstance(history, list): |
|
if len(history) > 0 and isinstance(history[0], tuple): |
|
history = [ |
|
{"role": "user" if i % 2 == 0 else "assistant", "content": msg} |
|
for tup in history |
|
for i, msg in enumerate([tup[0], tup[1]]) |
|
] |
|
|
|
|
|
try: |
|
answer = run_with_session_memory(message, session_id) |
|
except Exception as e: |
|
print(f"Error in run_with_session_memory: {str(e)}") |
|
answer = "I apologize, but I encountered an error processing your request." |
|
|
|
|
|
|
|
new_history = history + [ |
|
{"role": "user", "content": message}, |
|
{"role": "assistant", "content": answer} |
|
] |
|
|
|
|
|
chat_history = [(msg["content"], hist["content"]) |
|
for msg, hist in zip(new_history[::2], new_history[1::2])] |
|
|
|
return chat_history, new_history |
|
|
|
|
|
my_chat_css = """ |
|
.gradio-container { |
|
margin: auto; |
|
} |
|
.user .wrap { |
|
text-align: right !important; |
|
} |
|
.assistant .wrap { |
|
text-align: left !important; |
|
} |
|
""" |
|
|
|
|
|
with gr.Blocks(css=my_chat_css) as demo: |
|
gr.Markdown("### DailyWellnessAI (User on right, Assistant on left)") |
|
session_id_box = gr.Textbox(label="Session ID", value="abc123", interactive=True) |
|
|
|
chat_interface = gr.ChatInterface( |
|
fn=lambda msg, hist: chat_interface_fn(msg, hist, session_id_box.value), |
|
title="DailyWellnessAI (Session-based Memory)", |
|
description="Ask your questions. The session_id determines your stored memory." |
|
) |
|
|
|
|
|
demo.launch(share=True) |