Update app.py
Browse files
app.py
CHANGED
@@ -85,18 +85,12 @@ def bot_streaming(message, history, max_new_tokens=4500):
|
|
85 |
demo = gr.ChatInterface(
|
86 |
fn=bot_streaming,
|
87 |
title="Multimodal Llama",
|
88 |
-
examples=[
|
89 |
-
[{"text": "Which era does this piece belong to? Give details about the era.", "files":["./examples/rococo.jpg"]}, 200],
|
90 |
-
[{"text": "Where do the droughts happen according to this diagram?", "files":["./examples/weather_events.png"]}, 250],
|
91 |
-
[{"text": "What happens when you take out white cat from this chain?", "files":["./examples/ai2d_test.jpg"]}, 250],
|
92 |
-
[{"text": "How long does it take from invoice date to due date? Be short and concise.", "files":["./examples/invoice.png"]}, 250],
|
93 |
-
[{"text": "Where to find this monument? Can you give me other recommendations around the area?", "files":["./examples/wat_arun.jpg"]}, 250],
|
94 |
-
],
|
95 |
textbox=gr.MultimodalTextbox(),
|
96 |
additional_inputs=[
|
97 |
gr.Slider(
|
98 |
minimum=10,
|
99 |
-
maximum=
|
100 |
value=250,
|
101 |
step=10,
|
102 |
label="Maximum number of new tokens to generate",
|
|
|
85 |
demo = gr.ChatInterface(
|
86 |
fn=bot_streaming,
|
87 |
title="Multimodal Llama",
|
88 |
+
examples=[],
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
textbox=gr.MultimodalTextbox(),
|
90 |
additional_inputs=[
|
91 |
gr.Slider(
|
92 |
minimum=10,
|
93 |
+
maximum=5000,
|
94 |
value=250,
|
95 |
step=10,
|
96 |
label="Maximum number of new tokens to generate",
|