Spaces:
Sleeping
Sleeping
import gradio as gr | |
import requests | |
import json | |
import os | |
API_URL = "https://host.palple.polrambora.com/pmsq" | |
API_TOKEN = os.getenv("POLLY") | |
headers = { | |
"Authorization": f"{API_TOKEN}", | |
"Content-Type": "application/json", | |
} | |
ASSISTANT_PIC_PATH = "https://huggingface.co/spaces/PLRMB/P-MSQ-API-PREVIEW/resolve/main/API.png" | |
USER_PIC_PATH = "https://huggingface.co/spaces/PLRMB/P-MSQ-API-PREVIEW/resolve/main/usr.png" | |
def respond(message, history, system_message, max_tokens, top_p, temperature): | |
messages = [] | |
for user_message, assistant_message, user_profile, assistant_profile, user_pic, assistant_pic in history: | |
if user_message: | |
messages.append({ | |
"role": "user", | |
"content": user_message, | |
"profile": user_profile, | |
"picture": user_pic | |
}) | |
if assistant_message: | |
messages.append({ | |
"role": "assistant", | |
"content": assistant_message, | |
"profile": assistant_profile, | |
"picture": assistant_pic | |
}) | |
data = { | |
"preferences": { | |
"max_char": max_tokens, | |
"temperature": temperature, | |
"top_p": top_p, | |
"system_message": system_message | |
}, | |
"conversation_history": messages, | |
"input": message | |
} | |
response = requests.post(API_URL, headers=headers, data=json.dumps(data)) | |
if response.status_code == 200: | |
response_json = response.json() | |
assistant_reply = response_json["msq"]["message"][0] | |
history.append((message, assistant_reply, "You", "P-ALPLE", USER_PIC_PATH, ASSISTANT_PIC_PATH)) | |
return history, assistant_reply | |
else: | |
return history, "Error: " + response.json().get("error", "Unknown error occurred.") | |
def render_message(history): | |
messages_html = "" | |
for user_message, assistant_message, user_profile, assistant_profile, user_pic, assistant_pic in history: | |
if user_message: | |
messages_html += f"<div style='display: flex; align-items: center; margin-bottom: 10px;'>" | |
if user_pic: | |
messages_html += f"<img src='{user_pic}' style='width: 40px; height: 40px; border-radius: 50%; margin-right: 10px;'>" | |
messages_html += f"{user_message}</div><br>" | |
if assistant_message: | |
messages_html += f"<div style='display: flex; align-items: center; margin-bottom: 10px;'>" | |
if assistant_pic: | |
messages_html += f"<img src='{assistant_pic}' style='width: 40px; height: 40px; border-radius: 50%; margin-right: 10px;'>" | |
messages_html += f"{assistant_message}</div><br>" | |
return messages_html | |
with gr.Blocks(css=".chatbox {height: 400px; overflow-y: auto; border: 1px solid #ccc; padding: 10px; background-color: #f9f9f9;}") as demo: | |
gr.Markdown("## P-MSQ Chat Interface") | |
chatbot_output = gr.HTML(elem_id="chatbox") | |
msg_input = gr.Textbox( | |
show_label=False, | |
placeholder="Type your message and press Enter...", | |
lines=2, | |
elem_id="input-text" | |
) | |
send_btn = gr.Button("Send") | |
regen_btn = gr.Button("Clear") | |
system_message = gr.Textbox(value="You are P-MSQ (Messaging Service Query), a friendly AI Chatbot that can help in any situations.", label="System message") | |
gr.Markdown("### Settings") | |
max_tokens = gr.Slider(minimum=1, maximum=2048, value=1024, step=1, label="Max new tokens") | |
top_p = gr.Slider(minimum=0, maximum=2, value=0.8, step=0.1, label="Top P") | |
temperature = gr.Slider(minimum=0.1, maximum=1, value=0.7, step=0.1, label="Temperature") | |
history_state = gr.State([]) | |
last_message_state = gr.State("") | |
def user_interaction(message, history, system_message, max_tokens, top_p, temperature): | |
history, assistant_reply = respond(message, history, system_message, max_tokens, top_p, temperature) | |
return render_message(history), history, "", message | |
def regenerate_response(history, last_message, system_message, max_tokens, top_p, temperature): | |
return "", [] | |
msg_input.submit(user_interaction, | |
inputs=[msg_input, history_state, system_message, max_tokens, top_p, temperature], | |
outputs=[chatbot_output, history_state, msg_input, last_message_state]) | |
send_btn.click(user_interaction, | |
inputs=[msg_input, history_state, system_message, max_tokens, top_p, temperature], | |
outputs=[chatbot_output, history_state, msg_input, last_message_state]) | |
regen_btn.click(regenerate_response, | |
inputs=[history_state, last_message_state, system_message, max_tokens, top_p, temperature], | |
outputs=[chatbot_output, history_state]) | |
with gr.Row(): | |
send_btn | |
regen_btn | |
gr.HTML(""" | |
<style> | |
#chatbox { | |
max-height: 400px; | |
overflow-y: auto; | |
border: 1px solid #ccc; | |
background-color: #242424; | |
padding: 10px; | |
} | |
#input-text { | |
width: 100%; | |
box-sizing: border-box; | |
} | |
.gr-button { | |
margin: 5px; | |
padding: 8px 16px; | |
font-size: 14px; | |
} | |
.gr-row { | |
justify-content: flex-end; | |
} | |
</style> | |
<script> | |
const chatbox = document.getElementById('chatbox'); | |
function scrollToBottom() { | |
chatbox.scrollTop = chatbox.scrollHeight; | |
} | |
function handleNewMessage() { | |
setTimeout(scrollToBottom, 50); | |
} | |
window.addEventListener('message', handleNewMessage); | |
</script> | |
""") | |
if __name__ == "__main__": | |
demo.launch() | |