|
import gradio as gr |
|
from my_memory_logic import run_with_session_memory |
|
|
|
def chat_interface_fn(message, history, session_id): |
|
""" |
|
Multi-turn chat function for Gradio's ChatInterface. |
|
'session_id' is used to store conversation across turns. |
|
Deduplicates consecutive repeated Q&A pairs to avoid repetition. |
|
""" |
|
|
|
if history and isinstance(history[0], tuple): |
|
print("DEBUG: Converting history from tuple format to dictionary format.") |
|
history = [ |
|
msg for h in history |
|
for msg in [ |
|
{"role": "user", "content": h[0]}, |
|
{"role": "assistant", "content": h[1]} |
|
] |
|
] |
|
|
|
|
|
answer = run_with_session_memory(message, session_id) |
|
|
|
|
|
if not history or history[-1]["content"] != answer: |
|
history.append({"role": "user", "content": message}) |
|
history.append({"role": "assistant", "content": answer}) |
|
|
|
|
|
message_dicts = [] |
|
for msg in history: |
|
message_dicts.append(msg) |
|
|
|
|
|
return message_dicts, history |
|
|
|
|
|
my_chat_css = """ |
|
.gradio-container { |
|
margin: auto; |
|
} |
|
.user .wrap { |
|
text-align: right !important; |
|
} |
|
.assistant .wrap { |
|
text-align: left !important; |
|
} |
|
""" |
|
|
|
|
|
with gr.Blocks(css=my_chat_css) as demo: |
|
gr.Markdown("### DailyWellnessAI (User on right, Assistant on left)") |
|
session_id_box = gr.Textbox(label="Session ID", value="abc123", interactive=True) |
|
|
|
chat_interface = gr.ChatInterface( |
|
fn=lambda msg, hist: chat_interface_fn(msg, hist, session_id_box.value), |
|
title="DailyWellnessAI (Session-based Memory)", |
|
description="Ask your questions. The session_id determines your stored memory." |
|
) |
|
|
|
|
|
demo.launch() |