mtasic85 commited on
Commit
79c51bc
·
1 Parent(s): 701ae11

eval pretrain-core-3;

Browse files
Files changed (1) hide show
  1. README.md +56 -0
README.md CHANGED
@@ -220,3 +220,59 @@ litgpt convert_pretrained_checkpoint ../out/pretrain-core-2/final ../out/pretrai
220
  ```bash
221
  CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain_core_model_3.yaml
222
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
220
  ```bash
221
  CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True litgpt pretrain --config pretrain_core_model_3.yaml
222
  ```
223
+
224
+ ```bash
225
+ CUDA_VISIBLE_DEVICES=0 CUDA_LAUNCH_BLOCKING=0 PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True time litgpt evaluate --tasks 'leaderboard' --out_dir '../evaluate/pretrain-core-3/leaderboard/' --batch_size '4' --dtype 'bfloat16' '../out/pretrain-core-3/final'
226
+ ```
227
+
228
+ ```
229
+ | Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
230
+ |-----------------------------------------------------------|-------|------|-----:|-----------------------|---|-----:|---|------|
231
+ |leaderboard | N/A| | | | | | | |
232
+ | - leaderboard_bbh | N/A| | | | | | | |
233
+ | - leaderboard_bbh_boolean_expressions | 1|none | 3|acc_norm |↑ |0.5040|± |0.0317|
234
+ | - leaderboard_bbh_causal_judgement | 1|none | 3|acc_norm |↑ |0.5187|± |0.0366|
235
+ | - leaderboard_bbh_date_understanding | 1|none | 3|acc_norm |↑ |0.2000|± |0.0253|
236
+ | - leaderboard_bbh_disambiguation_qa | 1|none | 3|acc_norm |↑ |0.3560|± |0.0303|
237
+ | - leaderboard_bbh_formal_fallacies | 1|none | 3|acc_norm |↑ |0.5320|± |0.0316|
238
+ | - leaderboard_bbh_geometric_shapes | 1|none | 3|acc_norm |↑ |0.0880|± |0.0180|
239
+ | - leaderboard_bbh_hyperbaton | 1|none | 3|acc_norm |↑ |0.5160|± |0.0317|
240
+ | - leaderboard_bbh_logical_deduction_five_objects | 1|none | 3|acc_norm |↑ |0.2000|± |0.0253|
241
+ | - leaderboard_bbh_logical_deduction_seven_objects | 1|none | 3|acc_norm |↑ |0.1160|± |0.0203|
242
+ | - leaderboard_bbh_logical_deduction_three_objects | 1|none | 3|acc_norm |↑ |0.3400|± |0.0300|
243
+ | - leaderboard_bbh_movie_recommendation | 1|none | 3|acc_norm |↑ |0.2760|± |0.0283|
244
+ | - leaderboard_bbh_navigate | 1|none | 3|acc_norm |↑ |0.4200|± |0.0313|
245
+ | - leaderboard_bbh_object_counting | 1|none | 3|acc_norm |↑ |0.0600|± |0.0151|
246
+ | - leaderboard_bbh_penguins_in_a_table | 1|none | 3|acc_norm |↑ |0.2055|± |0.0336|
247
+ | - leaderboard_bbh_reasoning_about_colored_objects | 1|none | 3|acc_norm |↑ |0.1560|± |0.0230|
248
+ | - leaderboard_bbh_ruin_names | 1|none | 3|acc_norm |↑ |0.2280|± |0.0266|
249
+ | - leaderboard_bbh_salient_translation_error_detection | 1|none | 3|acc_norm |↑ |0.1120|± |0.0200|
250
+ | - leaderboard_bbh_snarks | 1|none | 3|acc_norm |↑ |0.5449|± |0.0374|
251
+ | - leaderboard_bbh_sports_understanding | 1|none | 3|acc_norm |↑ |0.4600|± |0.0316|
252
+ | - leaderboard_bbh_temporal_sequences | 1|none | 3|acc_norm |↑ |0.2840|± |0.0286|
253
+ | - leaderboard_bbh_tracking_shuffled_objects_five_objects | 1|none | 3|acc_norm |↑ |0.1720|± |0.0239|
254
+ | - leaderboard_bbh_tracking_shuffled_objects_seven_objects| 1|none | 3|acc_norm |↑ |0.1400|± |0.0220|
255
+ | - leaderboard_bbh_tracking_shuffled_objects_three_objects| 1|none | 3|acc_norm |↑ |0.3320|± |0.0298|
256
+ | - leaderboard_bbh_web_of_lies | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
257
+ | - leaderboard_gpqa | N/A| | | | | | | |
258
+ | - leaderboard_gpqa_diamond | 1|none | 0|acc_norm |↑ |0.2071|± |0.0289|
259
+ | - leaderboard_gpqa_extended | 1|none | 0|acc_norm |↑ |0.2637|± |0.0189|
260
+ | - leaderboard_gpqa_main | 1|none | 0|acc_norm |↑ |0.2612|± |0.0208|
261
+ | - leaderboard_ifeval | 3|none | 0|inst_level_loose_acc |↑ |0.2302|± | N/A|
262
+ | | |none | 0|inst_level_strict_acc |↑ |0.2230|± | N/A|
263
+ | | |none | 0|prompt_level_loose_acc |↑ |0.1165|± |0.0138|
264
+ | | |none | 0|prompt_level_strict_acc|↑ |0.1109|± |0.0135|
265
+ | - leaderboard_math_hard | N/A| | | | | | | |
266
+ | - leaderboard_math_algebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
267
+ | - leaderboard_math_counting_and_prob_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
268
+ | - leaderboard_math_geometry_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
269
+ | - leaderboard_math_intermediate_algebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
270
+ | - leaderboard_math_num_theory_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
271
+ | - leaderboard_math_prealgebra_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
272
+ | - leaderboard_math_precalculus_hard | 2|none | 4|exact_match |↑ |0.0000|± | 0|
273
+ | - leaderboard_mmlu_pro | 0.1|none | 5|acc |↑ |0.1096|± |0.0028|
274
+ | - leaderboard_musr | N/A| | | | | | | |
275
+ | - leaderboard_musr_murder_mysteries | 1|none | 0|acc_norm |↑ |0.4920|± |0.0317|
276
+ | - leaderboard_musr_object_placements | 1|none | 0|acc_norm |↑ |0.2227|± |0.0261|
277
+ | - leaderboard_musr_team_allocation | 1|none | 0|acc_norm |↑ |0.3960|± |0.0310|
278
+ ```