File size: 2,063 Bytes
f36f021 87cd864 a8d22a4 87cd864 e4ef93a 3e00622 e4ef93a 01e8804 3d23e89 957f93b 87cd864 3d23e89 3e00622 3d23e89 3e00622 c585131 3e00622 01e8804 3d23e89 957f93b 8972636 8a646af 3d23e89 8a646af 3d23e89 8a646af 3d23e89 8a646af 8972636 8a646af 87cd864 2cc6ea2 87cd864 2cc6ea2 87cd864 8a646af 87cd864 8972636 3d23e89 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import gradio as gr
from my_memory_logic import run_with_session_memory
def chat_interface_fn(message, history, session_id):
"""
Multi-turn chat function for Gradio's ChatInterface.
'session_id' is used to store conversation across turns.
Deduplicates consecutive repeated Q&A pairs to avoid repetition.
"""
# Ensure history is a list of dictionaries
if history and isinstance(history[0], tuple):
print("DEBUG: Converting history from tuple format to dictionary format.")
history = [
msg for h in history
for msg in [
{"role": "user", "content": h[0]},
{"role": "assistant", "content": h[1]}
]
]
# 1) Get answer from the session-based memory pipeline
answer = run_with_session_memory(message, session_id)
# 2) Deduplicate consecutive identical exchanges
if not history or history[-1]["content"] != answer:
history.append({"role": "user", "content": message})
history.append({"role": "assistant", "content": answer})
# 3) Convert history to message dictionaries for display
message_dicts = []
for msg in history:
message_dicts.append(msg)
# Return the message dicts and updated history
return message_dicts, history
# Custom CSS for chat interface
my_chat_css = """
.gradio-container {
margin: auto;
}
.user .wrap {
text-align: right !important;
}
.assistant .wrap {
text-align: left !important;
}
"""
# Set up Gradio interface
with gr.Blocks(css=my_chat_css) as demo:
gr.Markdown("### DailyWellnessAI (User on right, Assistant on left)")
session_id_box = gr.Textbox(label="Session ID", value="abc123", interactive=True)
chat_interface = gr.ChatInterface(
fn=lambda msg, hist: chat_interface_fn(msg, hist, session_id_box.value),
title="DailyWellnessAI (Session-based Memory)",
description="Ask your questions. The session_id determines your stored memory."
)
# Launch the Gradio interface
demo.launch() |