File size: 9,608 Bytes
0588a72
 
 
 
3ad05c5
0588a72
 
995a0e5
0588a72
 
 
 
079c63d
 
 
 
0588a72
079c63d
0588a72
 
 
 
 
 
 
 
 
 
 
 
3ad05c5
 
 
 
 
0588a72
 
 
3ad05c5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0588a72
079c63d
aa05415
0588a72
aa05415
 
 
 
3ad05c5
 
 
 
aa05415
3ad05c5
aa05415
 
 
 
 
 
0588a72
 
 
3ad05c5
 
 
 
 
079c63d
0588a72
 
3ad05c5
0588a72
aa05415
079c63d
 
 
 
 
 
 
 
 
 
 
3ad05c5
 
079c63d
 
 
 
 
 
0588a72
aa05415
 
 
 
995a0e5
079c63d
0588a72
079c63d
 
 
 
 
 
 
 
3ad05c5
 
 
 
 
 
 
 
 
 
 
 
 
 
0588a72
 
079c63d
3ad05c5
0588a72
 
 
 
 
 
 
 
 
3ad05c5
 
 
0588a72
079c63d
0588a72
 
 
079c63d
 
 
0588a72
aa05415
 
 
 
 
 
 
3ad05c5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
aa05415
0588a72
 
 
 
aa05415
 
 
 
 
0588a72
 
 
 
 
079c63d
0588a72
 
 
 
3ad05c5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0588a72
 
 
aa05415
0588a72
 
 
aa05415
0588a72
aa05415
0588a72
 
 
 
 
 
aa05415
0588a72
aa05415
995a0e5
aa05415
 
0588a72
 
aa05415
 
3ad05c5
 
 
 
 
0588a72
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
import gradio as gr
import requests
import json
import os
import re
from datetime import datetime


API_ENDPOINT = os.getenv("API_ENDPOINT", "none")
API_TOKEN = os.getenv("API_TOKEN")

def get_ai_response(message, history):
    """Fetch AI response from the API using the modern messages format."""
    messages = [
        {"role": "system", "content": "You are a helpful assistant."}
    ] + history + [{"role": "user", "content": message}]
    payload = {
        "model": "RekaAI/reka-flash-3",  
        "messages": messages,
        "stream": False,
        "max_tokens": 1024,
        "temperature": 0.7
    }
    headers = {
        "Authorization": f"Bearer {API_TOKEN}",
        "Content-Type": "application/json"
    }
    try:
        response = requests.post(API_ENDPOINT, headers=headers, json=payload)
        response.raise_for_status()
        raw_response = response.json()["choices"][0]["message"]["content"]
        
        # Convert reasoning tags to collapsible HTML
        html_response = convert_reasoning_to_collapsible(raw_response)
        return html_response
    except Exception as e:
        return f"Error: {str(e)}"

def convert_reasoning_to_collapsible(text):
    """Convert reasoning tags to collapsible HTML sections."""
    # Find all reasoning sections
    reasoning_pattern = re.compile(r'<reasoning>(.*?)</reasoning>', re.DOTALL)
    
    # Function to replace each reasoning section with collapsible HTML
    def replace_with_collapsible(match):
        reasoning_content = match.group(1).strip()
        return f'<details><summary><strong>See reasoning</strong></summary><div class="reasoning-content">{reasoning_content}</div></details>'
    
    # Replace reasoning tags with collapsible sections
    html_response = reasoning_pattern.sub(replace_with_collapsible, text)
    
    # Remove <sep> tags
    html_response = re.sub(r'<sep>.*?</sep>', '', html_response, flags=re.DOTALL)
    html_response = html_response.replace('<sep>', '').replace('</sep>', '')
    
    return html_response

def chat_interface(message, history, stored_history):
    """Handle chat interactions and update history."""
    if not history:
        history = []
    
    # Convert history to the format expected by the API
    api_history = []
    for user_msg, ai_msg in history:
        # Remove HTML tags for API history
        clean_ai_msg = re.sub(r'<details>.*?</details>', '', ai_msg, flags=re.DOTALL)
        clean_ai_msg = re.sub(r'<[^>]*>', '', clean_ai_msg)
        
        api_history.append({"role": "user", "content": user_msg})
        api_history.append({"role": "assistant", "content": clean_ai_msg})
    
    ai_response = get_ai_response(message, api_history)
    
    # Update history in the format expected by Gradio chatbot
    history.append((message, ai_response))
    
    timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    if stored_history is None:
        stored_history = []
    
    # Store a plain text version for history display
    plain_response = re.sub(r'<details>.*?</details>', '[Reasoning available]', ai_response, flags=re.DOTALL)
    plain_response = re.sub(r'<[^>]*>', '', plain_response)
    
    stored_history.insert(0, {
        "timestamp": timestamp,
        "user": message,
        "ai": plain_response
    })
    return history, stored_history

def update_history_display(stored_history):
    """Generate HTML for history display and save to local storage."""
    if not stored_history:
        html = "<p>No history yet</p>"
    else:
        html = "<ul id='history_list'>"
        for item in stored_history[:10]:  # Limit to last 10 conversations
            html += f"""
                <li class='history-item'>
                    <small>{item['timestamp']}</small><br>
                    <strong>You:</strong> {item['user'][:50]}{'...' if len(item['user']) > 50 else ''}<br>
                    <strong>AI:</strong> {item['ai'][:50]}{'...' if len(item['ai']) > 50 else ''}
                </li>
            """
        html += "</ul>"
    # Embed script to save history to local storage
    html += f"<script>localStorage.setItem('chat_history', JSON.stringify({json.dumps(stored_history)}))</script>"
    return html

def load_history_from_storage():
    """Function to load history from JavaScript"""
    # This is a placeholder that will be replaced by the JavaScript function
    return []

# Modern CSS for a clean UI
custom_css = """
body { background-color: #1a1a1a; color: #ffffff; font-family: 'Arial', sans-serif; }
#chatbot { height: 60vh; background-color: #2d2d2d; border: 1px solid #404040; border-radius: 8px; }
#sidebar { background-color: #242424; padding: 10px; border-right: 1px solid #404040; height: 80vh; overflow-y: auto; }
#history_list { list-style: none; padding: 0; }
.history-item { background-color: #333333; margin: 5px 0; padding: 10px; border-radius: 5px; cursor: pointer; }
.history-item:hover { background-color: #404040; }
input, button { background-color: #333333; color: #ffffff; border: 1px solid #404040; border-radius: 5px; }
button:hover { background-color: #404040; }
details { background-color: #333333; padding: 10px; margin: 5px 0; border-radius: 5px; }
summary { cursor: pointer; color: #70a9e6; }
.reasoning-content { padding: 10px; margin-top: 5px; background-color: #404040; border-radius: 5px; }
"""

# HTML head for rendering HTML in chatbot
html_head = """
<head>
<style>
details { background-color: #333333; padding: 10px; margin: 5px 0; border-radius: 5px; }
summary { cursor: pointer; color: #70a9e6; }
.reasoning-content { padding: 10px; margin-top: 5px; background-color: #404040; border-radius: 5px; }
</style>
</head>
"""

# Build the Gradio app
with gr.Blocks(css=custom_css, title="AI Assistant with Collapsible Reasoning") as demo:
    with gr.Row():
        # Sidebar for history
        with gr.Column(scale=1, min_width=300, elem_id="sidebar"):
            gr.Markdown("## Chat History")
            history_display = gr.HTML(label="Previous Conversations")
            clear_history_btn = gr.Button("Clear History")
        
        # Main chat area
        with gr.Column(scale=3):
            gr.Markdown("## AI Assistant")
            gr.Markdown("This assistant shows reasoning in collapsible sections.")
            chatbot = gr.Chatbot(elem_id="chatbot", render_markdown=False, bubble_full_width=True)
            with gr.Row():
                message = gr.Textbox(placeholder="Type your message...", show_label=False, container=False)
                submit_btn = gr.Button("Send", size="sm")
            clear_chat_btn = gr.Button("Clear Chat")
    
    # State management
    chat_state = gr.State([])    # Current chat history
    history_state = gr.State([]) # Stored history across sessions

    # JavaScript for loading history from local storage
    load_history_js = """
    function() {
        const history = localStorage.getItem('chat_history');
        return history ? JSON.parse(history) : [];
    }
    """
    
    # JavaScript for enabling HTML in chatbot
    js = """
    function() {
        // Add event listener for when new messages are added
        const observer = new MutationObserver(function(mutations) {
            mutations.forEach(function(mutation) {
                if (mutation.addedNodes.length) {
                    document.querySelectorAll('#chatbot .message:not(.processed)').forEach(msg => {
                        msg.classList.add('processed');
                        // Replace content with innerHTML to render HTML
                        const content = msg.querySelector('.content');
                        if (content) {
                            content.innerHTML = content.textContent;
                        }
                    });
                }
            });
        });
        
        // Start observing chatbot for changes
        const chatbot = document.getElementById('chatbot');
        if (chatbot) {
            observer.observe(chatbot, { childList: true, subtree: true });
        }

        return [];
    }
    """

    # Event handlers
    submit_btn.click(
        chat_interface,
        [message, chat_state, history_state],
        [chat_state, history_state]
    ).then(
        lambda history: history,
        chat_state,
        chatbot
    ).then(
        update_history_display,
        history_state,
        history_display
    ).then(
        lambda: "",  # Clear the input box
        None,
        message
    )

    # Message submit via Enter key
    message.submit(
        chat_interface,
        [message, chat_state, history_state],
        [chat_state, history_state]
    ).then(
        lambda history: history,
        chat_state,
        chatbot
    ).then(
        update_history_display,
        history_state,
        history_display
    ).then(
        lambda: "",  # Clear the input box
        None,
        message
    )

    clear_chat_btn.click(
        lambda: ([], None),
        None,
        [chat_state, chatbot]
    )

    clear_history_btn.click(
        lambda: [],
        None,
        history_state
    ).then(
        update_history_display,
        history_state,
        history_display
    )

    # Load initial history from local storage using JavaScript
    demo.load(
        fn=load_history_from_storage,
        inputs=None,
        outputs=history_state,
        js=load_history_js
    ).then(
        update_history_display,
        history_state,
        history_display
    ).then(
        fn=load_history_from_storage,
        inputs=None,
        outputs=None,
        js=js
    )

demo.launch()