Update Gradio interface with dynamic accordion behavior and predict button enhancements
Browse files- Modify tile 5 to display actual model prediction status
- Add JavaScript to close project accordion on prediction
- Refactor predict button click event to support multiple actions
- Remove commented-out predict button code
app.py
CHANGED
@@ -447,19 +447,19 @@ def generate_results_html(results):
|
|
447 |
<a class="mt-2 text-xs tracking-wide">@cmckinle / more info</a>
|
448 |
</div>
|
449 |
</div>
|
450 |
-
<!-- Tile 5:
|
451 |
<div
|
452 |
class="flex flex-col bg-gray-800 rounded-sm p-4 m-1 border border-gray-800 shadow-xs transition hover:shadow-lg dark:shadow-gray-700/25">
|
453 |
<div
|
454 |
class="-m-4 h-24 {get_header_color(results[4][-1])[0]} rounded-sm rounded-b-none transition border group-hover:border-gray-100 group-hover:shadow-lg group-hover:{get_header_color(results[4][-1])[4]}">
|
455 |
-
<span class="text-gray-300 font-mono tracking-widest p-4 pb-3 block text-xs text-center">MODEL 5:
|
456 |
<span
|
457 |
class="flex w-24 mx-auto tracking-wide items-center justify-center rounded-full {get_header_color(results[4][-1])[2]} px-1 py-0.5 {get_header_color(results[4][-1])[3]}"
|
458 |
>
|
459 |
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="3" stroke="currentColor" class="w-4 h-4 mr-2 -ml-3 group-hover:animate group-hover:animate-pulse">
|
460 |
{'<path stroke-linecap="round" stroke-linejoin="round" d="M9 12.75 11.25 15 15 9.75M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z" />' if results[4][-1] == 'REAL' else '<path stroke-linecap="round" stroke-linejoin="round" d="m9.75 9.75 4.5 4.5m0-4.5-4.5 4.5M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z" />'}
|
461 |
</svg>
|
462 |
-
<p class="whitespace-nowrap text-lg leading-normal font-bold text-center self-center align-middle py-px">
|
463 |
</span>
|
464 |
</div>
|
465 |
<div>
|
@@ -508,18 +508,31 @@ with gr.Blocks() as iface:
|
|
508 |
with gr.Row():
|
509 |
with gr.Column(scale=1):
|
510 |
image_input = gr.Image(label="Upload Image to Analyze", sources=['upload'], type='pil')
|
511 |
-
with gr.Accordion("Settings", open=False):
|
512 |
confidence_slider = gr.Slider(0.0, 1.0, value=0.5, step=0.01, label="Confidence Threshold")
|
513 |
inputs = [image_input, confidence_slider]
|
514 |
with gr.Column(scale=2):
|
515 |
-
with gr.Accordion("Project OpenSight - Model Evaluations & Playground", open=True):
|
516 |
gr.Markdown("## OpenSight is a SOTA gen. image detection model, in pre-release prep.\n\nThis HF Space is a temporary home for us and the public to evaluate the shortcomings of current open source models.\n\n<-- Feel free to play around by starting with an image as we prepare our formal announcement.")
|
517 |
image_output = gr.Image(label="Processed Image", visible=False)
|
518 |
# Custom HTML component to display results in 5 columns
|
519 |
results_html = gr.HTML(label="Model Predictions")
|
520 |
outputs = [image_output, results_html]
|
521 |
|
522 |
-
gr.Button("Predict").click(fn=predict_image_with_html, inputs=inputs, outputs=outputs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
523 |
|
524 |
# Launch the interface
|
525 |
iface.launch()
|
|
|
447 |
<a class="mt-2 text-xs tracking-wide">@cmckinle / more info</a>
|
448 |
</div>
|
449 |
</div>
|
450 |
+
<!-- Tile 5: Newcomer -->
|
451 |
<div
|
452 |
class="flex flex-col bg-gray-800 rounded-sm p-4 m-1 border border-gray-800 shadow-xs transition hover:shadow-lg dark:shadow-gray-700/25">
|
453 |
<div
|
454 |
class="-m-4 h-24 {get_header_color(results[4][-1])[0]} rounded-sm rounded-b-none transition border group-hover:border-gray-100 group-hover:shadow-lg group-hover:{get_header_color(results[4][-1])[4]}">
|
455 |
+
<span class="text-gray-300 font-mono tracking-widest p-4 pb-3 block text-xs text-center">MODEL 5: </span>
|
456 |
<span
|
457 |
class="flex w-24 mx-auto tracking-wide items-center justify-center rounded-full {get_header_color(results[4][-1])[2]} px-1 py-0.5 {get_header_color(results[4][-1])[3]}"
|
458 |
>
|
459 |
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="3" stroke="currentColor" class="w-4 h-4 mr-2 -ml-3 group-hover:animate group-hover:animate-pulse">
|
460 |
{'<path stroke-linecap="round" stroke-linejoin="round" d="M9 12.75 11.25 15 15 9.75M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z" />' if results[4][-1] == 'REAL' else '<path stroke-linecap="round" stroke-linejoin="round" d="m9.75 9.75 4.5 4.5m0-4.5-4.5 4.5M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z" />'}
|
461 |
</svg>
|
462 |
+
<p class="whitespace-nowrap text-lg leading-normal font-bold text-center self-center align-middle py-px">{results[4][-1]}</p>
|
463 |
</span>
|
464 |
</div>
|
465 |
<div>
|
|
|
508 |
with gr.Row():
|
509 |
with gr.Column(scale=1):
|
510 |
image_input = gr.Image(label="Upload Image to Analyze", sources=['upload'], type='pil')
|
511 |
+
with gr.Accordion("Settings", open=False, element_id="settings_accordion"):
|
512 |
confidence_slider = gr.Slider(0.0, 1.0, value=0.5, step=0.01, label="Confidence Threshold")
|
513 |
inputs = [image_input, confidence_slider]
|
514 |
with gr.Column(scale=2):
|
515 |
+
with gr.Accordion("Project OpenSight - Model Evaluations & Playground", open=True, element_id="project_accordion"):
|
516 |
gr.Markdown("## OpenSight is a SOTA gen. image detection model, in pre-release prep.\n\nThis HF Space is a temporary home for us and the public to evaluate the shortcomings of current open source models.\n\n<-- Feel free to play around by starting with an image as we prepare our formal announcement.")
|
517 |
image_output = gr.Image(label="Processed Image", visible=False)
|
518 |
# Custom HTML component to display results in 5 columns
|
519 |
results_html = gr.HTML(label="Model Predictions")
|
520 |
outputs = [image_output, results_html]
|
521 |
|
522 |
+
# gr.Button("Predict").click(fn=predict_image_with_html, inputs=inputs, outputs=outputs)
|
523 |
+
|
524 |
+
predict_button = gr.Button("Predict")
|
525 |
+
predict_button.click(
|
526 |
+
fn=predict_image_with_html,
|
527 |
+
inputs=inputs,
|
528 |
+
outputs=outputs
|
529 |
+
)
|
530 |
+
predict_button.click(
|
531 |
+
fn=None,
|
532 |
+
_js="() => {document.getElementById('project_accordion').open = false;}", # Close the project accordion
|
533 |
+
inputs=[],
|
534 |
+
outputs=[]
|
535 |
+
)
|
536 |
|
537 |
# Launch the interface
|
538 |
iface.launch()
|