Update app.py
Browse files
app.py
CHANGED
@@ -74,7 +74,7 @@ def model_inference(
|
|
74 |
return generated_texts[0]
|
75 |
|
76 |
|
77 |
-
with gr.Blocks() as demo:
|
78 |
gr.Markdown("## SmolVLM: Small yet Mighty 💫")
|
79 |
gr.Markdown("Play with [HuggingFaceTB/SmolVLM-Instruct](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct) in this demo. To get started, upload an image and text or try one of the examples.")
|
80 |
with gr.Column():
|
@@ -87,8 +87,21 @@ with gr.Blocks() as demo:
|
|
87 |
|
88 |
|
89 |
|
90 |
-
with gr.Accordion(label="Advanced Generation Parameters", open=False):
|
91 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
# Hyper-parameters for generation
|
93 |
max_new_tokens = gr.Slider(
|
94 |
minimum=8,
|
@@ -159,20 +172,7 @@ with gr.Blocks() as demo:
|
|
159 |
inputs=decoding_strategy,
|
160 |
outputs=top_p,
|
161 |
)
|
162 |
-
|
163 |
-
["example_images/rococo.jpg", "What art era is this?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
164 |
-
["example_images/examples_wat_arun.jpg", "Give me travel tips for the area around this monument.", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
165 |
-
["example_images/examples_invoice.png", "What is the due date and the invoice date?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
166 |
-
["example_images/s2w_example.png", "What is this UI about?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
167 |
-
["example_images/examples_weather_events.png", "Where do the severe droughts happen according to this diagram?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
168 |
-
]
|
169 |
-
gr.Examples(
|
170 |
-
examples = examples,
|
171 |
-
inputs=[image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|
172 |
-
max_new_tokens, repetition_penalty, top_p],
|
173 |
-
outputs=output,
|
174 |
-
fn=model_inference, cache_examples=False
|
175 |
-
)
|
176 |
|
177 |
|
178 |
submit_btn.click(model_inference, inputs = [image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|
|
|
74 |
return generated_texts[0]
|
75 |
|
76 |
|
77 |
+
with gr.Blocks(fill_height=True) as demo:
|
78 |
gr.Markdown("## SmolVLM: Small yet Mighty 💫")
|
79 |
gr.Markdown("Play with [HuggingFaceTB/SmolVLM-Instruct](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct) in this demo. To get started, upload an image and text or try one of the examples.")
|
80 |
with gr.Column():
|
|
|
87 |
|
88 |
|
89 |
|
90 |
+
with gr.Accordion(label="Examples and Advanced Generation Parameters", open=False):
|
91 |
+
examples=[
|
92 |
+
["example_images/rococo.jpg", "What art era is this?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
93 |
+
["example_images/examples_wat_arun.jpg", "Give me travel tips for the area around this monument.", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
94 |
+
["example_images/examples_invoice.png", "What is the due date and the invoice date?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
95 |
+
["example_images/s2w_example.png", "What is this UI about?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
96 |
+
["example_images/examples_weather_events.png", "Where do the severe droughts happen according to this diagram?", None, "Greedy", 0.4, 512, 1.2, 0.8],
|
97 |
+
]
|
98 |
+
gr.Examples(
|
99 |
+
examples = examples,
|
100 |
+
inputs=[image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|
101 |
+
max_new_tokens, repetition_penalty, top_p],
|
102 |
+
outputs=output,
|
103 |
+
fn=model_inference, cache_examples=False
|
104 |
+
)
|
105 |
# Hyper-parameters for generation
|
106 |
max_new_tokens = gr.Slider(
|
107 |
minimum=8,
|
|
|
172 |
inputs=decoding_strategy,
|
173 |
outputs=top_p,
|
174 |
)
|
175 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
176 |
|
177 |
|
178 |
submit_btn.click(model_inference, inputs = [image_input, query_input, assistant_prefix, decoding_strategy, temperature,
|