phucpx commited on
Commit
7acf6af
·
1 Parent(s): 608914f

add passsage templates

Browse files
Files changed (1) hide show
  1. app.py +173 -0
app.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+
4
+ import aiohttp
5
+ import gradio as gr
6
+ import modelscope_studio as mgr
7
+ from http import HTTPStatus
8
+ from dashscope.api_entities.dashscope_response import Role
9
+ from typing import List, Optional, Tuple, Dict, Any, AsyncGenerator
10
+ from urllib.error import HTTPError
11
+
12
+
13
+ default_system = 'Bạn là Trợ lý gia sư AI dạy ngôn ngữ Tiếng Anh, tên là Teacher Bee AI. Bạn được xây dựng bởi Prep Education để hướng dẫn học viên làm bài tập trên nền tảng Prepedu.com.'
14
+
15
+ History = List[Tuple[str, str]]
16
+ Messages = List[Dict[str, str]]
17
+
18
+ latex_delimiters = [{
19
+ "left": "\\(",
20
+ "right": "\\)",
21
+ "display": True
22
+ }, {
23
+ "left": "\\begin\{equation\}",
24
+ "right": "\\end\{equation\}",
25
+ "display": True
26
+ }, {
27
+ "left": "\\begin\{align\}",
28
+ "right": "\\end\{align\}",
29
+ "display": True
30
+ }, {
31
+ "left": "\\begin\{alignat\}",
32
+ "right": "\\end\{alignat\}",
33
+ "display": True
34
+ }, {
35
+ "left": "\\begin\{gather\}",
36
+ "right": "\\end\{gather\}",
37
+ "display": True
38
+ }, {
39
+ "left": "\\begin\{CD\}",
40
+ "right": "\\end\{CD\}",
41
+ "display": True
42
+ }, {
43
+ "left": "\\[",
44
+ "right": "\\]",
45
+ "display": True
46
+ }]
47
+
48
+
49
+ def clear_session() -> tuple[str, list[Any]]:
50
+ return '', []
51
+
52
+
53
+ def modify_system_session(system: str) -> tuple[str, str, list[Any]]:
54
+ if system is None or len(system) == 0:
55
+ system = default_system
56
+ return system, system, []
57
+
58
+
59
+ def history_to_messages(history: History, system: str) -> Messages:
60
+ messages = [{'role': Role.SYSTEM, 'content': system}]
61
+ for h in history:
62
+ messages.append({'role': Role.USER, 'content': h[0].text})
63
+ messages.append({'role': Role.ASSISTANT, 'content': h[1].text})
64
+ return messages
65
+
66
+
67
+ def messages_to_history(messages: Messages) -> tuple[str, list[list[str]]]:
68
+ assert messages[0]['role'] == Role.SYSTEM
69
+ system = messages[0]['content']
70
+ history = []
71
+ for q, r in zip(messages[1::2], messages[2::2]):
72
+ history.append([q['content'], r['content']])
73
+ return system, history
74
+
75
+
76
+ async def model_chat(query: Optional[str], history: Optional[History], system: str, radio: str) -> AsyncGenerator[Tuple[str, str, History, str], None]:
77
+ if query is None:
78
+ query = ''
79
+ if history is None:
80
+ history = []
81
+ messages = history_to_messages(history, system)
82
+ messages.append({'role': Role.USER, 'content': query})
83
+
84
+ label_model = radio
85
+
86
+ async with aiohttp.ClientSession() as session:
87
+ async with session.post(
88
+ url="http://bore.testsprep.online:8082/v1/chat/completions",
89
+ json={
90
+ "model": label_model,
91
+ "messages": messages,
92
+ "result_format": "message",
93
+ "stream": True
94
+ }
95
+ ) as response:
96
+ if response.status == HTTPStatus.OK:
97
+ async for line in response.content:
98
+ decoded_line = line.decode('utf-8')
99
+ yield '', decoded_line, history, system
100
+ else:
101
+ raise ValueError(f"Request failed with status {response.status}")
102
+
103
+
104
+ def choose_radio(radio, system):
105
+ chatbot = mgr.Chatbot(label=f'{radio.lower()}')
106
+
107
+ if system is None or len(system) == 0:
108
+ system = default_system
109
+
110
+ return chatbot, system, system, ""
111
+
112
+
113
+ def update_other_radios(value, other_radio1, other_radio2):
114
+ if value == "":
115
+ if other_radio1 != "":
116
+ selected = other_radio1
117
+ else:
118
+ selected = other_radio2
119
+ return selected, other_radio1, other_radio2
120
+ return value, "", ""
121
+
122
+
123
+ def main():
124
+ with gr.Blocks() as demo:
125
+ gr.Markdown("""<center><font size=8>LA: A Party of Foundation Models!</center>""")
126
+ with gr.Row():
127
+ model_options = [
128
+ "PrepAI/LA-llama3.1-8b-16k-instruct_4bit_r32_alpha_16_lr1e-4_3ep-sft-awq",
129
+ "PrepAI/LA-llama-3.1-Storm-8B-16k-instruct_4bit_r32_alpha_16_lr1e-4_3ep-sft-awq"
130
+ ]
131
+ with gr.Row():
132
+ radio = gr.Radio(choices=model_options, label="LA-In-house Models:", value=model_options[0])
133
+
134
+ with gr.Row():
135
+ with gr.Accordion():
136
+ with gr.Row():
137
+ with gr.Column(scale=3):
138
+ system_input = gr.Textbox(value=default_system, lines=1, label='System')
139
+ with gr.Column(scale=1):
140
+ modify_system = gr.Button("🛠️ Set system prompt and clear history", scale=2)
141
+ system_state = gr.Textbox(value=default_system, visible=False)
142
+ chatbot = mgr.Chatbot(label=model_options[0].lower(), latex_delimiters=latex_delimiters)
143
+ textbox = gr.Textbox(lines=1, label='Input')
144
+
145
+ with gr.Row():
146
+ clear_history = gr.Button("🧹 Clear history")
147
+ sumbit = gr.Button("🚀 Send")
148
+
149
+ textbox.submit(model_chat,
150
+ inputs=[textbox, chatbot, system_state, radio],
151
+ outputs=[textbox, chatbot, system_input])
152
+
153
+ sumbit.click(model_chat,
154
+ inputs=[textbox, chatbot, system_state, radio],
155
+ outputs=[textbox, chatbot, system_input],
156
+ concurrency_limit=5)
157
+ clear_history.click(fn=clear_session,
158
+ inputs=[],
159
+ outputs=[textbox, chatbot])
160
+ modify_system.click(fn=modify_system_session,
161
+ inputs=[system_input],
162
+ outputs=[system_state, system_input, chatbot])
163
+
164
+ radio.change(choose_radio,
165
+ inputs=[radio, system_input],
166
+ outputs=[chatbot, system_state, system_input, textbox])
167
+
168
+ demo.queue(api_open=False, default_concurrency_limit=40)
169
+ demo.launch(max_threads=40, share=True)
170
+
171
+
172
+ if __name__ == "__main__":
173
+ main()