saneowl's picture
Update app.py
3ad05c5 verified
raw
history blame
9.61 kB
import gradio as gr
import requests
import json
import os
import re
from datetime import datetime
API_ENDPOINT = os.getenv("API_ENDPOINT", "none")
API_TOKEN = os.getenv("API_TOKEN")
def get_ai_response(message, history):
"""Fetch AI response from the API using the modern messages format."""
messages = [
{"role": "system", "content": "You are a helpful assistant."}
] + history + [{"role": "user", "content": message}]
payload = {
"model": "RekaAI/reka-flash-3",
"messages": messages,
"stream": False,
"max_tokens": 1024,
"temperature": 0.7
}
headers = {
"Authorization": f"Bearer {API_TOKEN}",
"Content-Type": "application/json"
}
try:
response = requests.post(API_ENDPOINT, headers=headers, json=payload)
response.raise_for_status()
raw_response = response.json()["choices"][0]["message"]["content"]
# Convert reasoning tags to collapsible HTML
html_response = convert_reasoning_to_collapsible(raw_response)
return html_response
except Exception as e:
return f"Error: {str(e)}"
def convert_reasoning_to_collapsible(text):
"""Convert reasoning tags to collapsible HTML sections."""
# Find all reasoning sections
reasoning_pattern = re.compile(r'<reasoning>(.*?)</reasoning>', re.DOTALL)
# Function to replace each reasoning section with collapsible HTML
def replace_with_collapsible(match):
reasoning_content = match.group(1).strip()
return f'<details><summary><strong>See reasoning</strong></summary><div class="reasoning-content">{reasoning_content}</div></details>'
# Replace reasoning tags with collapsible sections
html_response = reasoning_pattern.sub(replace_with_collapsible, text)
# Remove <sep> tags
html_response = re.sub(r'<sep>.*?</sep>', '', html_response, flags=re.DOTALL)
html_response = html_response.replace('<sep>', '').replace('</sep>', '')
return html_response
def chat_interface(message, history, stored_history):
"""Handle chat interactions and update history."""
if not history:
history = []
# Convert history to the format expected by the API
api_history = []
for user_msg, ai_msg in history:
# Remove HTML tags for API history
clean_ai_msg = re.sub(r'<details>.*?</details>', '', ai_msg, flags=re.DOTALL)
clean_ai_msg = re.sub(r'<[^>]*>', '', clean_ai_msg)
api_history.append({"role": "user", "content": user_msg})
api_history.append({"role": "assistant", "content": clean_ai_msg})
ai_response = get_ai_response(message, api_history)
# Update history in the format expected by Gradio chatbot
history.append((message, ai_response))
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
if stored_history is None:
stored_history = []
# Store a plain text version for history display
plain_response = re.sub(r'<details>.*?</details>', '[Reasoning available]', ai_response, flags=re.DOTALL)
plain_response = re.sub(r'<[^>]*>', '', plain_response)
stored_history.insert(0, {
"timestamp": timestamp,
"user": message,
"ai": plain_response
})
return history, stored_history
def update_history_display(stored_history):
"""Generate HTML for history display and save to local storage."""
if not stored_history:
html = "<p>No history yet</p>"
else:
html = "<ul id='history_list'>"
for item in stored_history[:10]: # Limit to last 10 conversations
html += f"""
<li class='history-item'>
<small>{item['timestamp']}</small><br>
<strong>You:</strong> {item['user'][:50]}{'...' if len(item['user']) > 50 else ''}<br>
<strong>AI:</strong> {item['ai'][:50]}{'...' if len(item['ai']) > 50 else ''}
</li>
"""
html += "</ul>"
# Embed script to save history to local storage
html += f"<script>localStorage.setItem('chat_history', JSON.stringify({json.dumps(stored_history)}))</script>"
return html
def load_history_from_storage():
"""Function to load history from JavaScript"""
# This is a placeholder that will be replaced by the JavaScript function
return []
# Modern CSS for a clean UI
custom_css = """
body { background-color: #1a1a1a; color: #ffffff; font-family: 'Arial', sans-serif; }
#chatbot { height: 60vh; background-color: #2d2d2d; border: 1px solid #404040; border-radius: 8px; }
#sidebar { background-color: #242424; padding: 10px; border-right: 1px solid #404040; height: 80vh; overflow-y: auto; }
#history_list { list-style: none; padding: 0; }
.history-item { background-color: #333333; margin: 5px 0; padding: 10px; border-radius: 5px; cursor: pointer; }
.history-item:hover { background-color: #404040; }
input, button { background-color: #333333; color: #ffffff; border: 1px solid #404040; border-radius: 5px; }
button:hover { background-color: #404040; }
details { background-color: #333333; padding: 10px; margin: 5px 0; border-radius: 5px; }
summary { cursor: pointer; color: #70a9e6; }
.reasoning-content { padding: 10px; margin-top: 5px; background-color: #404040; border-radius: 5px; }
"""
# HTML head for rendering HTML in chatbot
html_head = """
<head>
<style>
details { background-color: #333333; padding: 10px; margin: 5px 0; border-radius: 5px; }
summary { cursor: pointer; color: #70a9e6; }
.reasoning-content { padding: 10px; margin-top: 5px; background-color: #404040; border-radius: 5px; }
</style>
</head>
"""
# Build the Gradio app
with gr.Blocks(css=custom_css, title="AI Assistant with Collapsible Reasoning") as demo:
with gr.Row():
# Sidebar for history
with gr.Column(scale=1, min_width=300, elem_id="sidebar"):
gr.Markdown("## Chat History")
history_display = gr.HTML(label="Previous Conversations")
clear_history_btn = gr.Button("Clear History")
# Main chat area
with gr.Column(scale=3):
gr.Markdown("## AI Assistant")
gr.Markdown("This assistant shows reasoning in collapsible sections.")
chatbot = gr.Chatbot(elem_id="chatbot", render_markdown=False, bubble_full_width=True)
with gr.Row():
message = gr.Textbox(placeholder="Type your message...", show_label=False, container=False)
submit_btn = gr.Button("Send", size="sm")
clear_chat_btn = gr.Button("Clear Chat")
# State management
chat_state = gr.State([]) # Current chat history
history_state = gr.State([]) # Stored history across sessions
# JavaScript for loading history from local storage
load_history_js = """
function() {
const history = localStorage.getItem('chat_history');
return history ? JSON.parse(history) : [];
}
"""
# JavaScript for enabling HTML in chatbot
js = """
function() {
// Add event listener for when new messages are added
const observer = new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (mutation.addedNodes.length) {
document.querySelectorAll('#chatbot .message:not(.processed)').forEach(msg => {
msg.classList.add('processed');
// Replace content with innerHTML to render HTML
const content = msg.querySelector('.content');
if (content) {
content.innerHTML = content.textContent;
}
});
}
});
});
// Start observing chatbot for changes
const chatbot = document.getElementById('chatbot');
if (chatbot) {
observer.observe(chatbot, { childList: true, subtree: true });
}
return [];
}
"""
# Event handlers
submit_btn.click(
chat_interface,
[message, chat_state, history_state],
[chat_state, history_state]
).then(
lambda history: history,
chat_state,
chatbot
).then(
update_history_display,
history_state,
history_display
).then(
lambda: "", # Clear the input box
None,
message
)
# Message submit via Enter key
message.submit(
chat_interface,
[message, chat_state, history_state],
[chat_state, history_state]
).then(
lambda history: history,
chat_state,
chatbot
).then(
update_history_display,
history_state,
history_display
).then(
lambda: "", # Clear the input box
None,
message
)
clear_chat_btn.click(
lambda: ([], None),
None,
[chat_state, chatbot]
)
clear_history_btn.click(
lambda: [],
None,
history_state
).then(
update_history_display,
history_state,
history_display
)
# Load initial history from local storage using JavaScript
demo.load(
fn=load_history_from_storage,
inputs=None,
outputs=history_state,
js=load_history_js
).then(
update_history_display,
history_state,
history_display
).then(
fn=load_history_from_storage,
inputs=None,
outputs=None,
js=js
)
demo.launch()