Spaces:
Sleeping
Sleeping
File size: 3,435 Bytes
5463230 83c6e61 5463230 4d312cb 5463230 4d312cb 5463230 83c6e61 5463230 4d312cb 5463230 4d312cb 5463230 4d312cb 5463230 3bb4d41 4d312cb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
import gradio as gr
import json
from uuid import uuid4
import requests
from pydantic import BaseModel
from typing import List
import ast
import os
QUEUE_MAX_SIZE = int(os.getenv("QUEUE_MAX_SIZE", 20))
QUEUE_CONCURENCY_COUNT = int(os.getenv("QUEUE_CONCURENCY_COUNT", 10))
USERNAME = os.getenv("USERNAME")
PASSWORD = os.getenv("PASSWORD")
CHATBOT_ENDPOINT = os.getenv("CHATBOT_ENDPOINT", "http://localhost:5000")
class LearningBotRequest(BaseModel):
message: List[dict]
persona: str
session_id: str
context: dict
user_serial: str
def generate_uuid():
return str(uuid4())
def construct_message(list_message):
messages = []
for i, pair_message in enumerate(list_message):
if len(pair_message) < 2:
continue
content_user = {"human": pair_message[0]}
content_human = {"AI": pair_message[1]}
messages.append(content_user)
messages.append(content_human)
return messages
def send_message(url, request):
response = requests.post(url, data=request.json())
if response.status_code != 200:
raise gr.Error(response.text)
else:
result = response.json()["data"]["reply"]
return result
def respond(chat_history, message, session_id, user_serial, persona, context, endpoint):
if session_id is None:
session_id = generate_uuid()
context = ast.literal_eval(context)
messages = construct_message(chat_history)
messages.append(
{"human": message}
)
request = LearningBotRequest(
message=messages,
persona=persona,
session_id=session_id,
context=context,
user_serial=user_serial
)
response = send_message(endpoint, request)
return chat_history + [[message, response]] , "Success", session_id, session_id
def reset_textbox():
return gr.update(value='')
with gr.Blocks() as demo:
session_id = gr.State(value=generate_uuid())
with gr.Row():
with gr.Column(scale=5):
clear = gr.Button("Clear all converstation")
with gr.Column(scale=5):
endpoint = gr.Textbox(label="Endpoint API", value=CHATBOT_ENDPOINT)
with gr.Accordion("Parameters", open=False):
user_serial = gr.Textbox(label="User serial")
context = gr.Textbox(label="context", value={})
persona = gr.Textbox(label="persona")
chatbot = gr.Chatbot()
message = gr.Textbox(placeholder="Halo kak, aku mau bertanya", label="Chat Here")
with gr.Row():
with gr.Column(scale=5):
send = gr.Button("Send")
with gr.Column(scale=5):
status_box = gr.Textbox(label="Status code from OpenAI server")
session = gr.Textbox(label="session_id")
message.submit(respond, [chatbot, message, session_id, user_serial, persona, context, endpoint], [chatbot, status_box, session, session_id])
clear.click(lambda: None, None, chatbot, queue=False)
clear.click(lambda: None, None, session_id, queue=False)
send.click(respond, [chatbot, message, session_id, user_serial, persona, context, endpoint], [chatbot, status_box, session, session_id])
send.click(reset_textbox, [], [message])
message.submit(reset_textbox, [], [message])
(
demo
.queue(max_size=QUEUE_MAX_SIZE, concurrency_count=QUEUE_CONCURENCY_COUNT)
.launch(auth=(USERNAME, PASSWORD), debug=True)
) |