File size: 5,050 Bytes
29b6bc4
7eb1fb9
 
 
29b6bc4
7eb1fb9
bfc9a54
29b6bc4
7eb1fb9
 
 
 
29b6bc4
cba1cd6
 
2827a07
 
7eb1fb9
2827a07
bfc9a54
 
 
 
 
2827a07
bfc9a54
 
 
 
 
 
2827a07
bfc9a54
29b6bc4
7eb1fb9
 
7d7d5bc
7eb1fb9
3029284
7eb1fb9
 
5c97131
7eb1fb9
 
 
 
2827a07
7eb1fb9
bfc9a54
2827a07
cba1cd6
2827a07
7eb1fb9
2827a07
29b6bc4
2827a07
 
bfc9a54
 
cba1cd6
bfc9a54
2827a07
 
bfc9a54
 
cba1cd6
bfc9a54
2827a07
 
bfc9a54
2827a07
 
 
aa7b3a1
 
 
 
 
 
 
27e9f79
aa7b3a1
 
 
 
 
 
2827a07
 
aa7b3a1
 
2827a07
 
 
 
 
aa7b3a1
 
2827a07
 
aa7b3a1
 
 
 
 
 
2827a07
094acf3
aa7b3a1
 
 
bfc9a54
aa7b3a1
 
 
29b6bc4
 
7eb1fb9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import gradio as gr
import requests
import json
import os

API_URL = "https://host.palple.polrambora.com/pmsq"
API_TOKEN = os.getenv("POLLY")

headers = {
    "Authorization": f"{API_TOKEN}",
    "Content-Type": "application/json",
}

ASSISTANT_PIC_PATH = "https://huggingface.co/spaces/PLRMB/P-MSQ-API-PREVIEW/resolve/main/API.png"  
USER_PIC_PATH = "https://huggingface.co/spaces/PLRMB/P-MSQ-API-PREVIEW/resolve/main/usr.png"

def respond(message, history, system_message, max_tokens, top_p, temperature):
    messages = []
    for user_message, assistant_message, user_profile, assistant_profile, user_pic, assistant_pic in history:
        if user_message:
            messages.append({
                "role": "user",
                "content": user_message,
                "profile": user_profile,
                "picture": user_pic
            })
        if assistant_message:
            messages.append({
                "role": "assistant",
                "content": assistant_message,
                "profile": assistant_profile,
                "picture": assistant_pic
            })

    data = {
        "preferences": {
            "max_char": max_tokens,
            "temperature": temperature,
            "top_p": top_p,
            "system_message": system_message
        },
        "conversation_history": messages,
        "input": message
    }

    response = requests.post(API_URL, headers=headers, data=json.dumps(data))
    
    if response.status_code == 200:
        response_json = response.json()
        assistant_reply = response_json["msq"]["message"][0]
        history.append((message, assistant_reply, "You", "P-ALPLE", USER_PIC_PATH, ASSISTANT_PIC_PATH))
        return history, assistant_reply
    else:
        return history, "Error: " + response.json().get("error", "Unknown error occurred.")

def render_message(history):
    messages_html = ""
    for user_message, assistant_message, user_profile, assistant_profile, user_pic, assistant_pic in history:
        if user_message:
            messages_html += f"<div style='display: flex; align-items: center; margin-bottom: 10px;'>"
            if user_pic:
                messages_html += f"<img src='{user_pic}' style='width: 40px; height: 40px; border-radius: 50%; margin-right: 10px;'>"
            messages_html += f"<b>{user_profile}:</b> {user_message}</div><br>"

        if assistant_message:
            messages_html += f"<div style='display: flex; align-items: center; margin-bottom: 10px;'>"
            if assistant_pic:
                messages_html += f"<img src='{assistant_pic}' style='width: 40px; height: 40px; border-radius: 50%; margin-right: 10px;'>"
            messages_html += f"<b>{assistant_profile}:</b> {assistant_message}</div><br>"
    
    return messages_html

with gr.Blocks() as demo:
    gr.Markdown("## P-MSQ Chat Interface with Profile Pictures")  
    gr.Markdown("""
    Welcome to the **P-MSQ** (Messaging Service Query) chat interface! 
    You are interacting with a friendly AI chatbot that can assist you in various situations.
    Use the text input box below to start chatting.
    """)

    with gr.Column():  
        chatbot_output = gr.HTML()  

    with gr.Row():
        msg_input = gr.Textbox(show_label=False, placeholder="Type your message here...", lines=2)
        send_btn = gr.Button("Send")
        regen_btn = gr.Button("Regenerate")

    system_message = gr.Textbox(value="You are P-MSQ (Messaging Service Query), a friendly AI Chatbot that can help in any situations.", label="System message")
    
    gr.Markdown("### Settings")  
    max_tokens = gr.Slider(minimum=1, maximum=2048, value=1024, step=1, label="Max new tokens")
    top_p = gr.Slider(minimum=0, maximum=2, value=0.8, step=0.1, label="Top P")
    temperature = gr.Slider(minimum=0.1, maximum=1, value=0.7, step=0.1, label="Temperature")

    history_state = gr.State([])  
    last_message_state = gr.State("")  

    def user_interaction(message, history, system_message, max_tokens, top_p, temperature):
        history, assistant_reply = respond(message, history, system_message, max_tokens, top_p, temperature)
        return render_message(history), history, "", message  

    def regenerate_response(history, last_message, system_message, max_tokens, top_p, temperature):
        if last_message:
            history, assistant_reply = respond(last_message, history, system_message, max_tokens, top_p, temperature)
            return render_message(history), history
        return render_message(history), history  

    send_btn.click(user_interaction, 
                   inputs=[msg_input, history_state, system_message, max_tokens, top_p, temperature], 
                   outputs=[chatbot_output, history_state, msg_input, last_message_state])

    regen_btn.click(regenerate_response, 
                    inputs=[history_state, last_message_state, system_message, max_tokens, top_p, temperature], 
                    outputs=[chatbot_output, history_state])

if __name__ == "__main__":
    demo.launch()