Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -88,25 +88,26 @@ def model_inference(input_dict, history):
|
|
88 |
buffer += new_text
|
89 |
yield buffer
|
90 |
|
91 |
-
|
92 |
-
|
93 |
-
]
|
94 |
-
|
95 |
-
gr.HTML(html_header)
|
96 |
-
|
97 |
-
demo = gr.ChatInterface(
|
98 |
-
fn=model_inference,
|
99 |
-
description="# **VL-Rethinker-7B**",
|
100 |
-
examples=examples,
|
101 |
-
fill_height=True,
|
102 |
-
textbox=gr.MultimodalTextbox(label="Query Input", file_types=["image"], file_count="multiple"),
|
103 |
-
stop_btn="Stop Generation",
|
104 |
-
multimodal=True,
|
105 |
-
cache_examples=False,
|
106 |
-
)
|
107 |
|
108 |
-
gr.
|
109 |
-
|
110 |
-
gr.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
|
112 |
demo.launch(debug=True)
|
|
|
88 |
buffer += new_text
|
89 |
yield buffer
|
90 |
|
91 |
+
with gr.Blocks() as demo:
|
92 |
+
examples = [
|
93 |
+
[{"text": "Solve this question.", "files": ["example_images/document.png"]}]
|
94 |
+
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
|
96 |
+
gr.HTML(html_header)
|
97 |
+
|
98 |
+
gr.ChatInterface(
|
99 |
+
fn=model_inference,
|
100 |
+
description="# **VL-Rethinker-7B**",
|
101 |
+
examples=examples,
|
102 |
+
fill_height=True,
|
103 |
+
textbox=gr.MultimodalTextbox(label="Query Input", file_types=["image"], file_count="multiple"),
|
104 |
+
stop_btn="Stop Generation",
|
105 |
+
multimodal=True,
|
106 |
+
cache_examples=False,
|
107 |
+
)
|
108 |
+
|
109 |
+
gr.Markdown(tos_markdown)
|
110 |
+
gr.Markdown(learn_more_markdown)
|
111 |
+
gr.Markdown(bibtext)
|
112 |
|
113 |
demo.launch(debug=True)
|