Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from huggingface_hub import InferenceClient
|
3 |
+
|
4 |
+
client = InferenceClient(model="https://zgg3nzdpswxy4a-80.proxy.runpod.net/")
|
5 |
+
|
6 |
+
def inference(message):
|
7 |
+
partial_message = ""
|
8 |
+
for token in client.text_generation(message, max_new_tokens=256, stream=True):
|
9 |
+
partial_message += token
|
10 |
+
yield partial_message
|
11 |
+
|
12 |
+
gr.ChatInterface(
|
13 |
+
inference,
|
14 |
+
chatbot=gr.Chatbot(height=300, scale=7),
|
15 |
+
textbox=gr.Textbox(placeholder="你可以问我任何关于SequioaDB的问题!", container=False, scale=7),
|
16 |
+
description="这是SequioaDB旗下的AI智能大语言模型,训练超过上万条真实数据和7亿参数。",
|
17 |
+
title="ChatSDB",
|
18 |
+
examples=["SequoiaDB巨杉数据库是什么?", "SequoiaDB巨杉数据库支持哪些类型的数据库实例?"],
|
19 |
+
retry_btn="重试",
|
20 |
+
undo_btn="撤销",
|
21 |
+
clear_btn="清除",
|
22 |
+
submit_btn="提问",
|
23 |
+
).queue().launch()
|