Spaces:
Sleeping
Sleeping
import gradio as gr | |
import requests | |
import json | |
import os | |
API_URL = "https://host.palple.polrambora.com/pmsq" | |
API_TOKEN = os.getenv("POLLY") | |
headers = { | |
"Authorization": f"{API_TOKEN}", | |
"Content-Type": "application/json", | |
} | |
ASSISTANT_PIC_PATH = "https://huggingface.co/spaces/PLRMB/P-MSQ-API-PREVIEW/resolve/main/API.png" | |
USER_PIC_PATH = "https://huggingface.co/spaces/PLRMB/P-MSQ-API-PREVIEW/resolve/main/usr.png" | |
def respond(message, history, system_message, max_tokens, top_p, temperature): | |
messages = [] | |
for user_message, assistant_message, user_profile, assistant_profile, user_pic, assistant_pic in history: | |
if user_message: | |
messages.append({ | |
"role": "user", | |
"content": user_message, | |
"profile": user_profile, | |
"picture": user_pic | |
}) | |
if assistant_message: | |
messages.append({ | |
"role": "assistant", | |
"content": assistant_message, | |
"profile": assistant_profile, | |
"picture": assistant_pic | |
}) | |
data = { | |
"preferences": { | |
"max_char": max_tokens, | |
"temperature": temperature, | |
"top_p": top_p, | |
"system_message": system_message | |
}, | |
"conversation_history": messages, | |
"input": message | |
} | |
response = requests.post(API_URL, headers=headers, data=json.dumps(data)) | |
if response.status_code == 200: | |
response_json = response.json() | |
assistant_reply = response_json["msq"]["message"][0] | |
history.append((message, assistant_reply, "You", "P-ALPLE", USER_PIC_PATH, ASSISTANT_PIC_PATH)) | |
return history, assistant_reply | |
else: | |
return history, "Error: " + response.json().get("error", "Unknown error occurred.") | |
def render_message(history): | |
messages_html = "" | |
for user_message, assistant_message, user_profile, assistant_profile, user_pic, assistant_pic in history: | |
if user_message: | |
messages_html += f"<div style='display: flex; align-items: center; margin-bottom: 10px;'>" | |
if user_pic: | |
messages_html += f"<img src='{user_pic}' style='width: 40px; height: 40px; border-radius: 50%; margin-right: 10px;'>" | |
messages_html += f"<b>{user_profile}:</b> {user_message}</div><br>" | |
if assistant_message: | |
messages_html += f"<div style='display: flex; align-items: center; margin-bottom: 10px;'>" | |
if assistant_pic: | |
messages_html += f"<img src='{assistant_pic}' style='width: 40px; height: 40px; border-radius: 50%; margin-right: 10px;'>" | |
messages_html += f"<b>{assistant_profile}:</b> {assistant_message}</div><br>" | |
return messages_html | |
with gr.Blocks() as demo: | |
chatbot_output = gr.HTML() | |
msg_input = gr.Textbox(show_label=False, placeholder="Type your message here...") | |
system_message = gr.Textbox(value="You are P-MSQ (Messaging Service Query), a friendly AI Chatbot that can help in any situations.", label="System message") | |
max_tokens = gr.Slider(minimum=1, maximum=2048, value=1024, step=1, label="Max new tokens") | |
top_p = gr.Slider(minimum=0, maximum=2, value=0.8, step=0.1, label="Top P") | |
temperature = gr.Slider(minimum=0.1, maximum=1, value=0.7, step=0.1, label="Temperature") | |
history_state = gr.State([]) | |
def user_interaction(message, history, system_message, max_tokens, top_p, temperature): | |
history, assistant_reply = respond(message, history, system_message, max_tokens, top_p, temperature) | |
return render_message(history), history | |
msg_input.submit(user_interaction, | |
inputs=[msg_input, history_state, system_message, max_tokens, top_p, temperature], | |
outputs=[chatbot_output, history_state]) | |
gr.Markdown("## Chat Interface with Profile Pictures") | |
chatbot_output | |
msg_input | |
if __name__ == "__main__": | |
demo.launch() | |