Spaces:
Sleeping
Sleeping
import gradio as gr | |
import os | |
import aiohttp | |
import gradio as gr | |
import modelscope_studio as mgr | |
from http import HTTPStatus | |
from dashscope.api_entities.dashscope_response import Role | |
from typing import List, Optional, Tuple, Dict, Any, AsyncGenerator | |
from urllib.error import HTTPError | |
default_system = 'Bạn là Trợ lý gia sư AI dạy ngôn ngữ Tiếng Anh, tên là Teacher Bee AI. Bạn được xây dựng bởi Prep Education để hướng dẫn học viên làm bài tập trên nền tảng Prepedu.com.' | |
History = List[Tuple[str, str]] | |
Messages = List[Dict[str, str]] | |
latex_delimiters = [{ | |
"left": "\\(", | |
"right": "\\)", | |
"display": True | |
}, { | |
"left": "\\begin\{equation\}", | |
"right": "\\end\{equation\}", | |
"display": True | |
}, { | |
"left": "\\begin\{align\}", | |
"right": "\\end\{align\}", | |
"display": True | |
}, { | |
"left": "\\begin\{alignat\}", | |
"right": "\\end\{alignat\}", | |
"display": True | |
}, { | |
"left": "\\begin\{gather\}", | |
"right": "\\end\{gather\}", | |
"display": True | |
}, { | |
"left": "\\begin\{CD\}", | |
"right": "\\end\{CD\}", | |
"display": True | |
}, { | |
"left": "\\[", | |
"right": "\\]", | |
"display": True | |
}] | |
def clear_session() -> tuple[str, list[Any]]: | |
return '', [] | |
def modify_system_session(system: str) -> tuple[str, str, list[Any]]: | |
if system is None or len(system) == 0: | |
system = default_system | |
return system, system, [] | |
def history_to_messages(history: History, system: str) -> Messages: | |
messages = [{'role': Role.SYSTEM, 'content': system}] | |
for h in history: | |
messages.append({'role': Role.USER, 'content': h[0].text}) | |
messages.append({'role': Role.ASSISTANT, 'content': h[1].text}) | |
return messages | |
def messages_to_history(messages: Messages) -> tuple[str, list[list[str]]]: | |
assert messages[0]['role'] == Role.SYSTEM | |
system = messages[0]['content'] | |
history = [] | |
for q, r in zip(messages[1::2], messages[2::2]): | |
history.append([q['content'], r['content']]) | |
return system, history | |
async def model_chat(query: Optional[str], history: Optional[History], system: str, radio: str) -> AsyncGenerator[Tuple[str, str, History, str], None]: | |
if query is None: | |
query = '' | |
if history is None: | |
history = [] | |
messages = history_to_messages(history, system) | |
messages.append({'role': Role.USER, 'content': query}) | |
label_model = radio | |
async with aiohttp.ClientSession() as session: | |
async with session.post( | |
url="http://bore.testsprep.online:8082/v1/chat/completions", | |
json={ | |
"model": label_model, | |
"messages": messages, | |
"result_format": "message", | |
"stream": True | |
} | |
) as response: | |
if response.status == HTTPStatus.OK: | |
async for line in response.content: | |
decoded_line = line.decode('utf-8') | |
yield '', decoded_line, history, system | |
else: | |
raise ValueError(f"Request failed with status {response.status}") | |
def choose_radio(radio, system): | |
chatbot = mgr.Chatbot(label=f'{radio.lower()}') | |
if system is None or len(system) == 0: | |
system = default_system | |
return chatbot, system, system, "" | |
def update_other_radios(value, other_radio1, other_radio2): | |
if value == "": | |
if other_radio1 != "": | |
selected = other_radio1 | |
else: | |
selected = other_radio2 | |
return selected, other_radio1, other_radio2 | |
return value, "", "" | |
def main(): | |
with gr.Blocks() as demo: | |
gr.Markdown("""<center><font size=8>LA: A Party of Foundation Models!</center>""") | |
with gr.Row(): | |
model_options = [ | |
"PrepAI/LA-llama3.1-8b-16k-instruct_4bit_r32_alpha_16_lr1e-4_3ep-sft-awq", | |
"PrepAI/LA-llama-3.1-Storm-8B-16k-instruct_4bit_r32_alpha_16_lr1e-4_3ep-sft-awq" | |
] | |
with gr.Row(): | |
radio = gr.Radio(choices=model_options, label="LA-In-house Models:", value=model_options[0]) | |
with gr.Row(): | |
with gr.Accordion(): | |
with gr.Row(): | |
with gr.Column(scale=3): | |
system_input = gr.Textbox(value=default_system, lines=1, label='System') | |
with gr.Column(scale=1): | |
modify_system = gr.Button("🛠️ Set system prompt and clear history", scale=2) | |
system_state = gr.Textbox(value=default_system, visible=False) | |
chatbot = mgr.Chatbot(label=model_options[0].lower(), latex_delimiters=latex_delimiters) | |
textbox = gr.Textbox(lines=1, label='Input') | |
with gr.Row(): | |
clear_history = gr.Button("🧹 Clear history") | |
sumbit = gr.Button("🚀 Send") | |
textbox.submit(model_chat, | |
inputs=[textbox, chatbot, system_state, radio], | |
outputs=[textbox, chatbot, system_input]) | |
sumbit.click(model_chat, | |
inputs=[textbox, chatbot, system_state, radio], | |
outputs=[textbox, chatbot, system_input], | |
concurrency_limit=5) | |
clear_history.click(fn=clear_session, | |
inputs=[], | |
outputs=[textbox, chatbot]) | |
modify_system.click(fn=modify_system_session, | |
inputs=[system_input], | |
outputs=[system_state, system_input, chatbot]) | |
radio.change(choose_radio, | |
inputs=[radio, system_input], | |
outputs=[chatbot, system_state, system_input, textbox]) | |
demo.queue(api_open=False, default_concurrency_limit=40) | |
demo.launch(max_threads=40, share=True) | |
if __name__ == "__main__": | |
main() | |