SondosMB commited on
Commit
88e0040
Β·
verified Β·
1 Parent(s): a2154df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -17
app.py CHANGED
@@ -963,7 +963,7 @@ with gr.Blocks(css=css_tech_theme) as demo:
963
  """,elem_id="overview")
964
 
965
 
966
- with gr.TabItem("πŸ“€ Submission-Pro"):
967
  gr.Markdown("""
968
  <div class="submission-section" style="border: 3px solid #6a1b9a; padding: 20px; border-radius: 12px; box-shadow: 0 4px 10px rgba(106, 27, 154, 0.2);">
969
  <h2 style="color: #6a1b9a; text-align: center;">Submit Your Predictions</h2>
@@ -971,7 +971,7 @@ with gr.Blocks(css=css_tech_theme) as demo:
971
  </div>
972
  """)
973
  with gr.Row(elem_id="submission-fields"):
974
- file_input = gr.File(label="πŸ“‚ Upload Prediction CSV for Mobile-MMLU-Pro", file_types=[".csv"], interactive=True,scale=1, min_width=12000)
975
  model_name_input = gr.Textbox(label="🏷️ Model Name", placeholder="Enter your model name",scale=1, min_width=800)
976
  Team_name_input = gr.Textbox(label="🏷️ Team Name", placeholder="Enter your Team name",scale=1, min_width=800)
977
 
@@ -979,11 +979,11 @@ with gr.Blocks(css=css_tech_theme) as demo:
979
  overall_accuracy_display = gr.Number(label="πŸ“Š Overall Accuracy (%)", interactive=False,scale=1,min_width=1200)
980
 
981
  with gr.Row(elem_id="submission-buttons"):
982
- eval_button_pro = gr.Button("πŸ“ˆ Evaluate",scale=1,min_width=1200)
983
- submit_button_pro = gr.Button("πŸ“€ Prove and Submit to Leaderboard", elem_id="evaluation-status", visible=False,scale=1,min_width=1200)
984
  eval_status = gr.Textbox(label="πŸ› οΈ Evaluation Status", interactive=False,scale=1,min_width=1200)
985
-
986
- with gr.TabItem("πŸ“€ Submission"):
987
  gr.Markdown("""
988
  <div class="submission-section" style="border: 3px solid #6a1b9a; padding: 20px; border-radius: 12px; box-shadow: 0 4px 10px rgba(106, 27, 154, 0.2);">
989
  <h2 style="color: #6a1b9a; text-align: center;">Submit Your Predictions</h2>
@@ -991,17 +991,37 @@ with gr.Blocks(css=css_tech_theme) as demo:
991
  </div>
992
  """)
993
  with gr.Row(elem_id="submission-fields"):
994
- file_input = gr.File(label="πŸ“‚ Upload Prediction CSV", file_types=[".csv"], interactive=True,scale=1, min_width=12000)
995
- model_name_input = gr.Textbox(label="🏷️ Model Name", placeholder="Enter your model name",scale=1, min_width=800)
996
- Team_name_input = gr.Textbox(label="🏷️ Team Name", placeholder="Enter your Team name",scale=1, min_width=800)
997
 
998
  with gr.Row(elem_id="submission-results"):
999
- overall_accuracy_display = gr.Number(label="πŸ“Š Overall Accuracy (%)", interactive=False,scale=1,min_width=1200)
1000
 
1001
  with gr.Row(elem_id="submission-buttons"):
1002
- eval_button = gr.Button("πŸ“ˆ Evaluate",scale=1,min_width=1200)
1003
- submit_button = gr.Button("πŸ“€ Prove and Submit to Leaderboard", elem_id="evaluation-status", visible=False,scale=1,min_width=1200)
1004
- eval_status = gr.Textbox(label="πŸ› οΈ Evaluation Status", interactive=False,scale=1,min_width=1200)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1005
 
1006
 
1007
  # with gr.TabItem("πŸ“€ Submission-Pro"):
@@ -1188,8 +1208,8 @@ with gr.Blocks(css=css_tech_theme) as demo:
1188
 
1189
  eval_button_pro.click(
1190
  handle_evaluation_pro,
1191
- inputs=[file_input, model_name_input,Team_name_input],
1192
- outputs=[eval_status, overall_accuracy_display, submit_button_pro],
1193
  )
1194
  eval_button.click(
1195
  handle_evaluation,
@@ -1205,8 +1225,8 @@ with gr.Blocks(css=css_tech_theme) as demo:
1205
 
1206
  submit_button_pro.click(
1207
  handle_submission_pro,
1208
- inputs=[file_input, model_name_input,Team_name_input],
1209
- outputs=[eval_status],
1210
  )
1211
 
1212
 
 
963
  """,elem_id="overview")
964
 
965
 
966
+ with gr.TabItem("πŸ“€ Submission"):
967
  gr.Markdown("""
968
  <div class="submission-section" style="border: 3px solid #6a1b9a; padding: 20px; border-radius: 12px; box-shadow: 0 4px 10px rgba(106, 27, 154, 0.2);">
969
  <h2 style="color: #6a1b9a; text-align: center;">Submit Your Predictions</h2>
 
971
  </div>
972
  """)
973
  with gr.Row(elem_id="submission-fields"):
974
+ file_input = gr.File(label="πŸ“‚ Upload Prediction CSV", file_types=[".csv"], interactive=True,scale=1, min_width=12000)
975
  model_name_input = gr.Textbox(label="🏷️ Model Name", placeholder="Enter your model name",scale=1, min_width=800)
976
  Team_name_input = gr.Textbox(label="🏷️ Team Name", placeholder="Enter your Team name",scale=1, min_width=800)
977
 
 
979
  overall_accuracy_display = gr.Number(label="πŸ“Š Overall Accuracy (%)", interactive=False,scale=1,min_width=1200)
980
 
981
  with gr.Row(elem_id="submission-buttons"):
982
+ eval_button = gr.Button("πŸ“ˆ Evaluate",scale=1,min_width=1200)
983
+ submit_button = gr.Button("πŸ“€ Prove and Submit to Leaderboard", elem_id="evaluation-status", visible=False,scale=1,min_width=1200)
984
  eval_status = gr.Textbox(label="πŸ› οΈ Evaluation Status", interactive=False,scale=1,min_width=1200)
985
+
986
+ with gr.TabItem("πŸ“€ Submission-Pro"):
987
  gr.Markdown("""
988
  <div class="submission-section" style="border: 3px solid #6a1b9a; padding: 20px; border-radius: 12px; box-shadow: 0 4px 10px rgba(106, 27, 154, 0.2);">
989
  <h2 style="color: #6a1b9a; text-align: center;">Submit Your Predictions</h2>
 
991
  </div>
992
  """)
993
  with gr.Row(elem_id="submission-fields"):
994
+ file_input_1 = gr.File(label="πŸ“‚ Upload Prediction CSV for Mobile-MMLU-Pro", file_types=[".csv"], interactive=True,scale=1, min_width=12000)
995
+ model_name_input_1 = gr.Textbox(label="🏷️ Model Name", placeholder="Enter your model name",scale=1, min_width=800)
996
+ Team_name_input_1 = gr.Textbox(label="🏷️ Team Name", placeholder="Enter your Team name",scale=1, min_width=800)
997
 
998
  with gr.Row(elem_id="submission-results"):
999
+ overall_accuracy_display_1 = gr.Number(label="πŸ“Š Overall Accuracy (%)", interactive=False,scale=1,min_width=1200)
1000
 
1001
  with gr.Row(elem_id="submission-buttons"):
1002
+ eval_button_pro = gr.Button("πŸ“ˆ Evaluate",scale=1,min_width=1200)
1003
+ submit_button_pro = gr.Button("πŸ“€ Prove and Submit to Leaderboard", elem_id="evaluation-status", visible=False,scale=1,min_width=1200)
1004
+ eval_status_pro = gr.Textbox(label="πŸ› οΈ Evaluation Status", interactive=False,scale=1,min_width=1200)
1005
+
1006
+ # with gr.TabItem("πŸ“€ Submission"):
1007
+ # gr.Markdown("""
1008
+ # <div class="submission-section" style="border: 3px solid #6a1b9a; padding: 20px; border-radius: 12px; box-shadow: 0 4px 10px rgba(106, 27, 154, 0.2);">
1009
+ # <h2 style="color: #6a1b9a; text-align: center;">Submit Your Predictions</h2>
1010
+ # <p style="font-size: 1.2em; color: #333; text-align: center;">Upload your prediction file and provide your model name to evaluate and optionally submit your results to the leaderboard.</p>
1011
+ # </div>
1012
+ # """)
1013
+ # with gr.Row(elem_id="submission-fields"):
1014
+ # file_input = gr.File(label="πŸ“‚ Upload Prediction CSV", file_types=[".csv"], interactive=True,scale=1, min_width=12000)
1015
+ # model_name_input = gr.Textbox(label="🏷️ Model Name", placeholder="Enter your model name",scale=1, min_width=800)
1016
+ # Team_name_input = gr.Textbox(label="🏷️ Team Name", placeholder="Enter your Team name",scale=1, min_width=800)
1017
+
1018
+ # with gr.Row(elem_id="submission-results"):
1019
+ # overall_accuracy_display = gr.Number(label="πŸ“Š Overall Accuracy (%)", interactive=False,scale=1,min_width=1200)
1020
+
1021
+ # with gr.Row(elem_id="submission-buttons"):
1022
+ # eval_button = gr.Button("πŸ“ˆ Evaluate",scale=1,min_width=1200)
1023
+ # submit_button = gr.Button("πŸ“€ Prove and Submit to Leaderboard", elem_id="evaluation-status", visible=False,scale=1,min_width=1200)
1024
+ # eval_status = gr.Textbox(label="πŸ› οΈ Evaluation Status", interactive=False,scale=1,min_width=1200)
1025
 
1026
 
1027
  # with gr.TabItem("πŸ“€ Submission-Pro"):
 
1208
 
1209
  eval_button_pro.click(
1210
  handle_evaluation_pro,
1211
+ inputs=[file_input_1, model_name_input_1,Team_name_input_1],
1212
+ outputs=[eval_status_pro, overall_accuracy_display_1, submit_button_pro],
1213
  )
1214
  eval_button.click(
1215
  handle_evaluation,
 
1225
 
1226
  submit_button_pro.click(
1227
  handle_submission_pro,
1228
+ inputs=[file_input_1, model_name_input_1,Team_name_input_1],
1229
+ outputs=[eval_status_pro],
1230
  )
1231
 
1232