SondosMB commited on
Commit
6335fa2
·
verified ·
1 Parent(s): ae9dc78

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -168,7 +168,7 @@ if not HF_TOKEN:
168
  # "Correct Predictions", "Total Questions", "Timestamp"
169
  # ]).to_csv(LEADERBOARD_FILE, index=False)
170
 
171
- def initialize_leaderboard_file():
172
  """
173
  Ensure the leaderboard file exists and has the correct headers.
174
  """
@@ -457,7 +457,7 @@ def evaluate_predictions(prediction_file, model_name,Team_name ,add_to_leaderboa
457
 
458
  except Exception as e:
459
  return f"Error during evaluation: {str(e)}", load_leaderboard()
460
- initialize_leaderboard_file()
461
 
462
 
463
 
@@ -516,7 +516,7 @@ def evaluate_predictions_pro(prediction_file, model_name,Team_name ,add_to_leade
516
 
517
  except Exception as e:
518
  return f"Error during evaluation: {str(e)}", load_leaderboard_pro()
519
- initialize_leaderboard_file()
520
 
521
 
522
  # Function to set default mode
 
168
  # "Correct Predictions", "Total Questions", "Timestamp"
169
  # ]).to_csv(LEADERBOARD_FILE, index=False)
170
 
171
+ def initialize_leaderboard_file(LEADERBOARD_FILE):
172
  """
173
  Ensure the leaderboard file exists and has the correct headers.
174
  """
 
457
 
458
  except Exception as e:
459
  return f"Error during evaluation: {str(e)}", load_leaderboard()
460
+ initialize_leaderboard_file(LEADERBOARD_FILE)
461
 
462
 
463
 
 
516
 
517
  except Exception as e:
518
  return f"Error during evaluation: {str(e)}", load_leaderboard_pro()
519
+ initialize_leaderboard_file(LEADERBOARD_FILE_pro)
520
 
521
 
522
  # Function to set default mode