Spaces:
Build error
Build error
doctorsafe
commited on
Commit
·
44ede5e
1
Parent(s):
349107e
Rename app.py to main.py
Browse files
app.py
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
|
3 |
-
def generate_mutimodal(title, context, img):
|
4 |
-
return f"Title:{title}\nContext:{context}\n...{img}"
|
5 |
-
|
6 |
-
server = gr.Interface(
|
7 |
-
fn=generate_mutimodal,
|
8 |
-
inputs=[
|
9 |
-
gr.Textbox(lines=1, placeholder="请输入标题"),
|
10 |
-
gr.Textbox(lines=2, placeholder="请输入正文"),
|
11 |
-
gr.Image(shape=(200, 200), label="请上传图片(可选)")
|
12 |
-
],
|
13 |
-
outputs="text"
|
14 |
-
)
|
15 |
-
|
16 |
-
server.launch()
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
main.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from predict import predict
|
3 |
+
from toolbox import format_io, find_free_port
|
4 |
+
|
5 |
+
try: from config_private import proxies, WEB_PORT, LLM_MODEL
|
6 |
+
except: from config import proxies, WEB_PORT, LLM_MODEL
|
7 |
+
|
8 |
+
# 对一些丧心病狂的实验性功能模块进行测试
|
9 |
+
from functional_crazy import get_crazy_functionals, on_file_uploaded, on_report_generated
|
10 |
+
crazy_functional = get_crazy_functionals()
|
11 |
+
|
12 |
+
# 处理markdown文本格式的转变
|
13 |
+
gr.Chatbot.postprocess = format_io
|
14 |
+
|
15 |
+
# 做一些外观色彩上的调整
|
16 |
+
from theme import adjust_theme
|
17 |
+
set_theme = adjust_theme()
|
18 |
+
|
19 |
+
with gr.Blocks(theme=set_theme, analytics_enabled=False) as demo:
|
20 |
+
gr.HTML(title_html)
|
21 |
+
with gr.Row():
|
22 |
+
with gr.Column(scale=2):
|
23 |
+
chatbot = gr.Chatbot()
|
24 |
+
chatbot.style(height=1000)
|
25 |
+
chatbot.style()
|
26 |
+
history = gr.State([])
|
27 |
+
TRUE = gr.State(True)
|
28 |
+
FALSE = gr.State(False)
|
29 |
+
with gr.Column(scale=1):
|
30 |
+
with gr.Row():
|
31 |
+
with gr.Column(scale=12):
|
32 |
+
txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False)
|
33 |
+
with gr.Column(scale=1):
|
34 |
+
submitBtn = gr.Button("提交", variant="primary")
|
35 |
+
with gr.Row():
|
36 |
+
from check_proxy import check_proxy
|
37 |
+
statusDisplay = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行. \nNetwork: {check_proxy(proxies)}\nModel: {LLM_MODEL}")
|
38 |
+
with gr.Row():
|
39 |
+
for k in functional:
|
40 |
+
variant = functional[k]["Color"] if "Color" in functional[k] else "secondary"
|
41 |
+
functional[k]["Button"] = gr.Button(k, variant=variant)
|
42 |
+
with gr.Row():
|
43 |
+
gr.Markdown("以下部分实验性功能需从input框读取路径.")
|
44 |
+
with gr.Row():
|
45 |
+
for k in crazy_functional:
|
46 |
+
variant = crazy_functional[k]["Color"] if "Color" in crazy_functional[k] else "secondary"
|
47 |
+
crazy_functional[k]["Button"] = gr.Button(k, variant=variant)
|
48 |
+
with gr.Row():
|
49 |
+
gr.Markdown("上传本地文件供上面的实验性功能调用.")
|
50 |
+
with gr.Row():
|
51 |
+
file_upload = gr.Files(label='任何文件,但推荐上传压缩文件(zip, tar)', file_count="multiple")
|
52 |
+
|
53 |
+
systemPromptTxt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt).style(container=True)
|
54 |
+
#inputs, top_p, temperature, top_k, repetition_penalty
|
55 |
+
with gr.Accordion("arguments", open=False):
|
56 |
+
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
|
57 |
+
temperature = gr.Slider(minimum=-0, maximum=5.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
|
58 |
+
|
59 |
+
txt.submit(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay])
|
60 |
+
submitBtn.click(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay], show_progress=True)
|
61 |
+
for k in functional:
|
62 |
+
functional[k]["Button"].click(predict,
|
63 |
+
[txt, top_p, temperature, chatbot, history, systemPromptTxt, TRUE, gr.State(k)], [chatbot, history, statusDisplay], show_progress=True)
|
64 |
+
file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
|
65 |
+
for k in crazy_functional:
|
66 |
+
click_handle = crazy_functional[k]["Button"].click(crazy_functional[k]["Function"],
|
67 |
+
[txt, top_p, temperature, chatbot, history, systemPromptTxt, gr.State(PORT)], [chatbot, history, statusDisplay]
|
68 |
+
)
|
69 |
+
try: click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
|
70 |
+
except: pass
|