Spaces:
Running
on
Zero
Running
on
Zero
Change layout
Browse files
app.py
CHANGED
@@ -74,7 +74,7 @@ def model_inference(
|
|
74 |
return generated_texts[0]
|
75 |
|
76 |
|
77 |
-
with gr.Blocks(fill_height=
|
78 |
gr.Markdown("## SmolVLM: Small yet Mighty 💫")
|
79 |
gr.Markdown("Play with [HuggingFaceTB/SmolVLM-Instruct](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct) in this demo. To get started, upload an image and text or try one of the examples.")
|
80 |
with gr.Column():
|
@@ -87,7 +87,7 @@ with gr.Blocks(fill_height=True) as demo:
|
|
87 |
|
88 |
|
89 |
|
90 |
-
with gr.Accordion(label="Examples and Advanced Generation Parameters"):
|
91 |
examples=[
|
92 |
["example_images/rococo.jpg", "What art era is this?", "", "Greedy", 0.4, 512, 1.2, 0.8],
|
93 |
["example_images/examples_wat_arun.jpg", "Give me travel tips for the area around this monument.", "", "Greedy", 0.4, 512, 1.2, 0.8],
|
@@ -165,13 +165,13 @@ with gr.Blocks(fill_height=True) as demo:
|
|
165 |
inputs=decoding_strategy,
|
166 |
outputs=top_p,
|
167 |
)
|
168 |
-
|
169 |
examples = examples,
|
170 |
inputs=[image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|
171 |
max_new_tokens, repetition_penalty, top_p],
|
172 |
outputs=output,
|
173 |
-
fn=model_inference
|
174 |
-
)
|
175 |
|
176 |
|
177 |
submit_btn.click(model_inference, inputs = [image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|
|
|
74 |
return generated_texts[0]
|
75 |
|
76 |
|
77 |
+
with gr.Blocks(fill_height=False) as demo:
|
78 |
gr.Markdown("## SmolVLM: Small yet Mighty 💫")
|
79 |
gr.Markdown("Play with [HuggingFaceTB/SmolVLM-Instruct](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct) in this demo. To get started, upload an image and text or try one of the examples.")
|
80 |
with gr.Column():
|
|
|
87 |
|
88 |
|
89 |
|
90 |
+
with gr.Accordion(label="Examples and Advanced Generation Parameters", open=False):
|
91 |
examples=[
|
92 |
["example_images/rococo.jpg", "What art era is this?", "", "Greedy", 0.4, 512, 1.2, 0.8],
|
93 |
["example_images/examples_wat_arun.jpg", "Give me travel tips for the area around this monument.", "", "Greedy", 0.4, 512, 1.2, 0.8],
|
|
|
165 |
inputs=decoding_strategy,
|
166 |
outputs=top_p,
|
167 |
)
|
168 |
+
gr.Examples(
|
169 |
examples = examples,
|
170 |
inputs=[image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|
171 |
max_new_tokens, repetition_penalty, top_p],
|
172 |
outputs=output,
|
173 |
+
fn=model_inference
|
174 |
+
)
|
175 |
|
176 |
|
177 |
submit_btn.click(model_inference, inputs = [image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|