File size: 2,404 Bytes
f36f021 87cd864 a8d22a4 87cd864 e4ef93a 3e00622 e4ef93a c450e62 3d23e89 c450e62 3d23e89 c450e62 3d23e89 c450e62 3d23e89 c450e62 8972636 8a646af 3d23e89 8a646af 3d23e89 8a646af 3d23e89 8a646af 8972636 8a646af 87cd864 2cc6ea2 87cd864 2cc6ea2 87cd864 8a646af 87cd864 c450e62 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
import gradio as gr
from my_memory_logic import run_with_session_memory
def chat_interface_fn(message, history, session_id):
"""
Multi-turn chat function for Gradio's ChatInterface.
'session_id' is used to store conversation across turns.
Deduplicates consecutive repeated Q&A pairs to avoid repetition.
"""
# Initialize history if None
if history is None:
history = []
# Ensure we're working with the correct history format
# Gradio 3.x sends history as a list of tuples (user, assistant)
if isinstance(history, list):
if len(history) > 0 and isinstance(history[0], tuple):
history = [
{"role": "user" if i % 2 == 0 else "assistant", "content": msg}
for tup in history
for i, msg in enumerate([tup[0], tup[1]])
]
# Get answer from the session-based memory pipeline
try:
answer = run_with_session_memory(message, session_id)
except Exception as e:
print(f"Error in run_with_session_memory: {str(e)}")
answer = "I apologize, but I encountered an error processing your request."
# Format for Gradio ChatInterface
# Gradio expects a tuple of (new_chat_history, internal_history)
new_history = history + [
{"role": "user", "content": message},
{"role": "assistant", "content": answer}
]
# Convert history to format expected by Gradio
chat_history = [(msg["content"], hist["content"])
for msg, hist in zip(new_history[::2], new_history[1::2])]
return chat_history, new_history
# Custom CSS for chat interface
my_chat_css = """
.gradio-container {
margin: auto;
}
.user .wrap {
text-align: right !important;
}
.assistant .wrap {
text-align: left !important;
}
"""
# Set up Gradio interface
with gr.Blocks(css=my_chat_css) as demo:
gr.Markdown("### DailyWellnessAI (User on right, Assistant on left)")
session_id_box = gr.Textbox(label="Session ID", value="abc123", interactive=True)
chat_interface = gr.ChatInterface(
fn=lambda msg, hist: chat_interface_fn(msg, hist, session_id_box.value),
title="DailyWellnessAI (Session-based Memory)",
description="Ask your questions. The session_id determines your stored memory."
)
# Launch the Gradio interface with sharing enabled
demo.launch(share=True) |