LPX commited on
Commit
cbd537f
·
1 Parent(s): d789973

✨ feat(gui): update result tiles and ui elements

Browse files

- update contributor name to be consistent with contributor updates【feat】
- split the user inputs into a more intuitive and functional grid layoutablity interface ''' in line with shown user interaction essays; predictable interaction for users is now a main design pillar in the user interface concept(ui ideas)【feat】
- display results in the new grouped model layout【refactor】
- removed markdown styling table entry for project model openings in new preferences screens as per number of users.failures to adhere; user activity noticed lacking in graphics and image categories【fix】【docs】

Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -193,8 +193,8 @@ def generate_results_html(results):
193
  {generate_tile_html(1, results[1], "ViT Based", "Heem2")}
194
  {generate_tile_html(2, results[2], "SDXL Dataset", "Organika")}
195
  {generate_tile_html(3, results[3], "SDXL + FLUX", "cmckinle")}
196
- {generate_tile_html(4, results[4], "Newcomer", "YourContributorName")}
197
- {generate_tile_html(5, results[5], "Newcomer 2", "YourContributorName")}
198
  </div>
199
  </div>
200
  """
@@ -215,6 +215,7 @@ with gr.Blocks() as iface:
215
  with gr.Accordion("Settings", open=False, elem_id="settings_accordion"):
216
  confidence_slider = gr.Slider(0.0, 1.0, value=0.5, step=0.01, label="Confidence Threshold")
217
  inputs = [image_input, confidence_slider]
 
218
  with gr.Column(scale=2):
219
  with gr.Accordion("Project OpenSight - Model Evaluations & Playground", open=True, elem_id="project_accordion"):
220
  gr.Markdown("## OpenSight is a SOTA gen. image detection model, in pre-release prep.\n\nThis HF Space is a temporary home for us and the public to evaluate the shortcomings of current open source models.\n\n<-- Feel free to play around by starting with an image as we prepare our formal announcement.")
@@ -225,7 +226,7 @@ with gr.Blocks() as iface:
225
 
226
  # gr.Button("Predict").click(fn=predict_image_with_html, inputs=inputs, outputs=outputs)
227
 
228
- predict_button = gr.Button("Predict")
229
  predict_button.click(
230
  fn=predict_image_with_html,
231
  inputs=inputs,
 
193
  {generate_tile_html(1, results[1], "ViT Based", "Heem2")}
194
  {generate_tile_html(2, results[2], "SDXL Dataset", "Organika")}
195
  {generate_tile_html(3, results[3], "SDXL + FLUX", "cmckinle")}
196
+ {generate_tile_html(4, results[4], "Vit Based", "prithivMLmods")}
197
+ {generate_tile_html(5, results[5], "Vit Based, Newer Dataset", "prithivMLmods")}
198
  </div>
199
  </div>
200
  """
 
215
  with gr.Accordion("Settings", open=False, elem_id="settings_accordion"):
216
  confidence_slider = gr.Slider(0.0, 1.0, value=0.5, step=0.01, label="Confidence Threshold")
217
  inputs = [image_input, confidence_slider]
218
+ predict_button = gr.Button("Predict")
219
  with gr.Column(scale=2):
220
  with gr.Accordion("Project OpenSight - Model Evaluations & Playground", open=True, elem_id="project_accordion"):
221
  gr.Markdown("## OpenSight is a SOTA gen. image detection model, in pre-release prep.\n\nThis HF Space is a temporary home for us and the public to evaluate the shortcomings of current open source models.\n\n<-- Feel free to play around by starting with an image as we prepare our formal announcement.")
 
226
 
227
  # gr.Button("Predict").click(fn=predict_image_with_html, inputs=inputs, outputs=outputs)
228
 
229
+
230
  predict_button.click(
231
  fn=predict_image_with_html,
232
  inputs=inputs,