Oskar Douwe van der Wal commited on
Commit
1a5d1fb
·
1 Parent(s): 7bc099d

New results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. pythia-14m-seed1/step44000/EleutherAI__pythia-14m-seed1/results_2024-08-19T05-50-38.556900.json +2591 -0
  2. pythia-14m-seed1/step45000/EleutherAI__pythia-14m-seed1/results_2024-08-19T05-56-14.763008.json +2591 -0
  3. pythia-14m-seed1/step46000/EleutherAI__pythia-14m-seed1/results_2024-08-19T06-01-48.537526.json +2591 -0
  4. pythia-14m-seed1/step47000/EleutherAI__pythia-14m-seed1/results_2024-08-19T06-07-21.455136.json +2591 -0
  5. pythia-14m-seed1/step48000/EleutherAI__pythia-14m-seed1/results_2024-08-19T06-12-56.244535.json +2591 -0
  6. pythia-410m-seed1/step0/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-28-17.109635.json +96 -0
  7. pythia-410m-seed1/step0/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-14-05.109765.json +96 -0
  8. pythia-410m-seed1/step1/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-30-29.878303.json +96 -0
  9. pythia-410m-seed1/step1/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-16-13.141132.json +96 -0
  10. pythia-410m-seed1/step1000/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-52-40.338990.json +96 -0
  11. pythia-410m-seed1/step1000/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-35-06.794346.json +96 -0
  12. pythia-410m-seed1/step10000/EleutherAI__pythia-410m-seed1/results_2024-08-19T10-12-27.949043.json +96 -0
  13. pythia-410m-seed1/step10000/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-53-57.877190.json +96 -0
  14. pythia-410m-seed1/step100000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-36-43.663669.json +96 -0
  15. pythia-410m-seed1/step100000/EleutherAI__pythia-410m-seed1/results_2024-08-19T16-12-51.708415.json +96 -0
  16. pythia-410m-seed1/step101000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-39-02.636733.json +96 -0
  17. pythia-410m-seed1/step102000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-41-16.692404.json +96 -0
  18. pythia-410m-seed1/step103000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-43-31.736206.json +96 -0
  19. pythia-410m-seed1/step104000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-45-53.228039.json +96 -0
  20. pythia-410m-seed1/step105000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-48-12.742336.json +96 -0
  21. pythia-410m-seed1/step106000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-50-27.254318.json +96 -0
  22. pythia-410m-seed1/step107000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-52-46.612009.json +96 -0
  23. pythia-410m-seed1/step108000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-54-56.733656.json +96 -0
  24. pythia-410m-seed1/step109000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-57-07.517021.json +96 -0
  25. pythia-410m-seed1/step11000/EleutherAI__pythia-410m-seed1/results_2024-08-19T10-14-35.252974.json +96 -0
  26. pythia-410m-seed1/step110000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-59-16.856195.json +96 -0
  27. pythia-410m-seed1/step110000/EleutherAI__pythia-410m-seed1/results_2024-08-19T16-14-56.300208.json +96 -0
  28. pythia-410m-seed1/step111000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-01-28.609515.json +96 -0
  29. pythia-410m-seed1/step112000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-03-42.866090.json +96 -0
  30. pythia-410m-seed1/step113000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-05-53.516831.json +96 -0
  31. pythia-410m-seed1/step114000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-08-07.802761.json +96 -0
  32. pythia-410m-seed1/step115000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-10-18.845461.json +96 -0
  33. pythia-410m-seed1/step116000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-12-39.685880.json +96 -0
  34. pythia-410m-seed1/step117000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-14-54.387325.json +96 -0
  35. pythia-410m-seed1/step118000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-17-09.761598.json +96 -0
  36. pythia-410m-seed1/step119000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-19-24.999819.json +96 -0
  37. pythia-410m-seed1/step12000/EleutherAI__pythia-410m-seed1/results_2024-08-19T10-16-47.118030.json +96 -0
  38. pythia-410m-seed1/step120000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-21-35.912998.json +96 -0
  39. pythia-410m-seed1/step120000/EleutherAI__pythia-410m-seed1/results_2024-08-19T16-17-01.420402.json +96 -0
  40. pythia-410m-seed1/step121000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-23-50.851329.json +96 -0
  41. pythia-410m-seed1/step122000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-26-01.715237.json +96 -0
  42. pythia-410m-seed1/step123000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-28-16.468536.json +96 -0
  43. pythia-410m-seed1/step124000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-30-31.303548.json +96 -0
  44. pythia-410m-seed1/step125000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-32-41.102456.json +96 -0
  45. pythia-410m-seed1/step126000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-34-47.156887.json +96 -0
  46. pythia-410m-seed1/step127000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-37-02.387203.json +96 -0
  47. pythia-410m-seed1/step128/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-46-09.637000.json +96 -0
  48. pythia-410m-seed1/step128/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-30-54.888518.json +96 -0
  49. pythia-410m-seed1/step128000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-39-13.635461.json +96 -0
  50. pythia-410m-seed1/step129000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-41-17.419545.json +96 -0
pythia-14m-seed1/step44000/EleutherAI__pythia-14m-seed1/results_2024-08-19T05-50-38.556900.json ADDED
@@ -0,0 +1,2591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp_wh_vs_that_with_gap_long_distance": {
4
+ "acc,none": 0.133,
5
+ "acc_stderr,none": 0.010743669132397295,
6
+ "alias": "blimp_wh_vs_that_with_gap_long_distance"
7
+ },
8
+ "blimp_wh_vs_that_with_gap": {
9
+ "acc,none": 0.236,
10
+ "acc_stderr,none": 0.013434451402438602,
11
+ "alias": "blimp_wh_vs_that_with_gap"
12
+ },
13
+ "blimp_wh_vs_that_no_gap_long_distance": {
14
+ "acc,none": 0.947,
15
+ "acc_stderr,none": 0.00708810561724649,
16
+ "alias": "blimp_wh_vs_that_no_gap_long_distance"
17
+ },
18
+ "blimp_wh_vs_that_no_gap": {
19
+ "acc,none": 0.921,
20
+ "acc_stderr,none": 0.00853415677333337,
21
+ "alias": "blimp_wh_vs_that_no_gap"
22
+ },
23
+ "blimp_wh_questions_subject_gap_long_distance": {
24
+ "acc,none": 0.935,
25
+ "acc_stderr,none": 0.007799733061832043,
26
+ "alias": "blimp_wh_questions_subject_gap_long_distance"
27
+ },
28
+ "blimp_wh_questions_subject_gap": {
29
+ "acc,none": 0.873,
30
+ "acc_stderr,none": 0.010534798620855644,
31
+ "alias": "blimp_wh_questions_subject_gap"
32
+ },
33
+ "blimp_wh_questions_object_gap": {
34
+ "acc,none": 0.435,
35
+ "acc_stderr,none": 0.015685057252717346,
36
+ "alias": "blimp_wh_questions_object_gap"
37
+ },
38
+ "blimp_wh_island": {
39
+ "acc,none": 0.673,
40
+ "acc_stderr,none": 0.014842213153411162,
41
+ "alias": "blimp_wh_island"
42
+ },
43
+ "blimp_transitive": {
44
+ "acc,none": 0.836,
45
+ "acc_stderr,none": 0.011715000693181425,
46
+ "alias": "blimp_transitive"
47
+ },
48
+ "blimp_tough_vs_raising_2": {
49
+ "acc,none": 0.74,
50
+ "acc_stderr,none": 0.013877773329774218,
51
+ "alias": "blimp_tough_vs_raising_2"
52
+ },
53
+ "blimp_tough_vs_raising_1": {
54
+ "acc,none": 0.391,
55
+ "acc_stderr,none": 0.015438826294681775,
56
+ "alias": "blimp_tough_vs_raising_1"
57
+ },
58
+ "blimp_superlative_quantifiers_2": {
59
+ "acc,none": 0.23,
60
+ "acc_stderr,none": 0.01331455133593608,
61
+ "alias": "blimp_superlative_quantifiers_2"
62
+ },
63
+ "blimp_superlative_quantifiers_1": {
64
+ "acc,none": 0.068,
65
+ "acc_stderr,none": 0.007964887911291622,
66
+ "alias": "blimp_superlative_quantifiers_1"
67
+ },
68
+ "blimp_sentential_subject_island": {
69
+ "acc,none": 0.377,
70
+ "acc_stderr,none": 0.0153331701257798,
71
+ "alias": "blimp_sentential_subject_island"
72
+ },
73
+ "blimp_sentential_negation_npi_scope": {
74
+ "acc,none": 0.518,
75
+ "acc_stderr,none": 0.01580904569940659,
76
+ "alias": "blimp_sentential_negation_npi_scope"
77
+ },
78
+ "blimp_sentential_negation_npi_licensor_present": {
79
+ "acc,none": 0.99,
80
+ "acc_stderr,none": 0.003148000938676784,
81
+ "alias": "blimp_sentential_negation_npi_licensor_present"
82
+ },
83
+ "blimp_regular_plural_subject_verb_agreement_2": {
84
+ "acc,none": 0.814,
85
+ "acc_stderr,none": 0.012310790208412926,
86
+ "alias": "blimp_regular_plural_subject_verb_agreement_2"
87
+ },
88
+ "blimp_regular_plural_subject_verb_agreement_1": {
89
+ "acc,none": 0.87,
90
+ "acc_stderr,none": 0.010640169792499236,
91
+ "alias": "blimp_regular_plural_subject_verb_agreement_1"
92
+ },
93
+ "blimp_principle_A_reconstruction": {
94
+ "acc,none": 0.417,
95
+ "acc_stderr,none": 0.015599819048769583,
96
+ "alias": "blimp_principle_A_reconstruction"
97
+ },
98
+ "blimp_principle_A_domain_3": {
99
+ "acc,none": 0.62,
100
+ "acc_stderr,none": 0.015356947477797658,
101
+ "alias": "blimp_principle_A_domain_3"
102
+ },
103
+ "blimp_principle_A_domain_2": {
104
+ "acc,none": 0.642,
105
+ "acc_stderr,none": 0.015167928865407633,
106
+ "alias": "blimp_principle_A_domain_2"
107
+ },
108
+ "blimp_principle_A_domain_1": {
109
+ "acc,none": 0.91,
110
+ "acc_stderr,none": 0.009054390204866477,
111
+ "alias": "blimp_principle_A_domain_1"
112
+ },
113
+ "blimp_principle_A_case_2": {
114
+ "acc,none": 0.814,
115
+ "acc_stderr,none": 0.012310790208412926,
116
+ "alias": "blimp_principle_A_case_2"
117
+ },
118
+ "blimp_principle_A_case_1": {
119
+ "acc,none": 1.0,
120
+ "acc_stderr,none": 0.0,
121
+ "alias": "blimp_principle_A_case_1"
122
+ },
123
+ "blimp_principle_A_c_command": {
124
+ "acc,none": 0.585,
125
+ "acc_stderr,none": 0.015589035185604594,
126
+ "alias": "blimp_principle_A_c_command"
127
+ },
128
+ "blimp_passive_2": {
129
+ "acc,none": 0.879,
130
+ "acc_stderr,none": 0.010318210380946179,
131
+ "alias": "blimp_passive_2"
132
+ },
133
+ "blimp_passive_1": {
134
+ "acc,none": 0.883,
135
+ "acc_stderr,none": 0.010169287802713345,
136
+ "alias": "blimp_passive_1"
137
+ },
138
+ "blimp_only_npi_scope": {
139
+ "acc,none": 0.702,
140
+ "acc_stderr,none": 0.014470846741134585,
141
+ "alias": "blimp_only_npi_scope"
142
+ },
143
+ "blimp_only_npi_licensor_present": {
144
+ "acc,none": 0.919,
145
+ "acc_stderr,none": 0.008632121032139986,
146
+ "alias": "blimp_only_npi_licensor_present"
147
+ },
148
+ "blimp_npi_present_2": {
149
+ "acc,none": 0.37,
150
+ "acc_stderr,none": 0.015275252316519466,
151
+ "alias": "blimp_npi_present_2"
152
+ },
153
+ "blimp_npi_present_1": {
154
+ "acc,none": 0.303,
155
+ "acc_stderr,none": 0.014539683710535194,
156
+ "alias": "blimp_npi_present_1"
157
+ },
158
+ "blimp_matrix_question_npi_licensor_present": {
159
+ "acc,none": 0.182,
160
+ "acc_stderr,none": 0.0122075806376622,
161
+ "alias": "blimp_matrix_question_npi_licensor_present"
162
+ },
163
+ "blimp_left_branch_island_simple_question": {
164
+ "acc,none": 0.324,
165
+ "acc_stderr,none": 0.014806864733738984,
166
+ "alias": "blimp_left_branch_island_simple_question"
167
+ },
168
+ "blimp_left_branch_island_echo_question": {
169
+ "acc,none": 0.391,
170
+ "acc_stderr,none": 0.015438826294681775,
171
+ "alias": "blimp_left_branch_island_echo_question"
172
+ },
173
+ "blimp_irregular_plural_subject_verb_agreement_2": {
174
+ "acc,none": 0.845,
175
+ "acc_stderr,none": 0.011450157470799522,
176
+ "alias": "blimp_irregular_plural_subject_verb_agreement_2"
177
+ },
178
+ "blimp_irregular_plural_subject_verb_agreement_1": {
179
+ "acc,none": 0.794,
180
+ "acc_stderr,none": 0.012795613612786522,
181
+ "alias": "blimp_irregular_plural_subject_verb_agreement_1"
182
+ },
183
+ "blimp_irregular_past_participle_verbs": {
184
+ "acc,none": 0.885,
185
+ "acc_stderr,none": 0.010093407594904551,
186
+ "alias": "blimp_irregular_past_participle_verbs"
187
+ },
188
+ "blimp_irregular_past_participle_adjectives": {
189
+ "acc,none": 0.986,
190
+ "acc_stderr,none": 0.003717232548256541,
191
+ "alias": "blimp_irregular_past_participle_adjectives"
192
+ },
193
+ "blimp_intransitive": {
194
+ "acc,none": 0.644,
195
+ "acc_stderr,none": 0.015149042659306678,
196
+ "alias": "blimp_intransitive"
197
+ },
198
+ "blimp_inchoative": {
199
+ "acc,none": 0.531,
200
+ "acc_stderr,none": 0.015788865959538965,
201
+ "alias": "blimp_inchoative"
202
+ },
203
+ "blimp_expletive_it_object_raising": {
204
+ "acc,none": 0.739,
205
+ "acc_stderr,none": 0.013895037677965183,
206
+ "alias": "blimp_expletive_it_object_raising"
207
+ },
208
+ "blimp_existential_there_subject_raising": {
209
+ "acc,none": 0.779,
210
+ "acc_stderr,none": 0.01312750285969618,
211
+ "alias": "blimp_existential_there_subject_raising"
212
+ },
213
+ "blimp_existential_there_quantifiers_2": {
214
+ "acc,none": 0.296,
215
+ "acc_stderr,none": 0.01444273494157504,
216
+ "alias": "blimp_existential_there_quantifiers_2"
217
+ },
218
+ "blimp_existential_there_quantifiers_1": {
219
+ "acc,none": 0.95,
220
+ "acc_stderr,none": 0.006895472974897918,
221
+ "alias": "blimp_existential_there_quantifiers_1"
222
+ },
223
+ "blimp_existential_there_object_raising": {
224
+ "acc,none": 0.804,
225
+ "acc_stderr,none": 0.012559527926707347,
226
+ "alias": "blimp_existential_there_object_raising"
227
+ },
228
+ "blimp_ellipsis_n_bar_2": {
229
+ "acc,none": 0.778,
230
+ "acc_stderr,none": 0.013148721948877349,
231
+ "alias": "blimp_ellipsis_n_bar_2"
232
+ },
233
+ "blimp_ellipsis_n_bar_1": {
234
+ "acc,none": 0.512,
235
+ "acc_stderr,none": 0.015814743314581686,
236
+ "alias": "blimp_ellipsis_n_bar_1"
237
+ },
238
+ "blimp_drop_argument": {
239
+ "acc,none": 0.747,
240
+ "acc_stderr,none": 0.013754278613587126,
241
+ "alias": "blimp_drop_argument"
242
+ },
243
+ "blimp_distractor_agreement_relative_clause": {
244
+ "acc,none": 0.229,
245
+ "acc_stderr,none": 0.013294199326613684,
246
+ "alias": "blimp_distractor_agreement_relative_clause"
247
+ },
248
+ "blimp_distractor_agreement_relational_noun": {
249
+ "acc,none": 0.321,
250
+ "acc_stderr,none": 0.01477082181793475,
251
+ "alias": "blimp_distractor_agreement_relational_noun"
252
+ },
253
+ "blimp_determiner_noun_agreement_with_adjective_1": {
254
+ "acc,none": 0.867,
255
+ "acc_stderr,none": 0.010743669132397295,
256
+ "alias": "blimp_determiner_noun_agreement_with_adjective_1"
257
+ },
258
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
259
+ "acc,none": 0.825,
260
+ "acc_stderr,none": 0.012021627157731998,
261
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_2"
262
+ },
263
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
264
+ "acc,none": 0.756,
265
+ "acc_stderr,none": 0.013588548437881386,
266
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_1"
267
+ },
268
+ "blimp_determiner_noun_agreement_with_adj_2": {
269
+ "acc,none": 0.862,
270
+ "acc_stderr,none": 0.010912152632504508,
271
+ "alias": "blimp_determiner_noun_agreement_with_adj_2"
272
+ },
273
+ "blimp_determiner_noun_agreement_irregular_2": {
274
+ "acc,none": 0.844,
275
+ "acc_stderr,none": 0.011480235006122295,
276
+ "alias": "blimp_determiner_noun_agreement_irregular_2"
277
+ },
278
+ "blimp_determiner_noun_agreement_irregular_1": {
279
+ "acc,none": 0.802,
280
+ "acc_stderr,none": 0.012607733934175283,
281
+ "alias": "blimp_determiner_noun_agreement_irregular_1"
282
+ },
283
+ "blimp_determiner_noun_agreement_2": {
284
+ "acc,none": 0.933,
285
+ "acc_stderr,none": 0.007910345983177575,
286
+ "alias": "blimp_determiner_noun_agreement_2"
287
+ },
288
+ "blimp_determiner_noun_agreement_1": {
289
+ "acc,none": 0.916,
290
+ "acc_stderr,none": 0.008776162089491113,
291
+ "alias": "blimp_determiner_noun_agreement_1"
292
+ },
293
+ "blimp_coordinate_structure_constraint_object_extraction": {
294
+ "acc,none": 0.536,
295
+ "acc_stderr,none": 0.015778243024904673,
296
+ "alias": "blimp_coordinate_structure_constraint_object_extraction"
297
+ },
298
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
299
+ "acc,none": 0.366,
300
+ "acc_stderr,none": 0.0152406127264056,
301
+ "alias": "blimp_coordinate_structure_constraint_complex_left_branch"
302
+ },
303
+ "blimp_complex_NP_island": {
304
+ "acc,none": 0.384,
305
+ "acc_stderr,none": 0.015387682761896958,
306
+ "alias": "blimp_complex_NP_island"
307
+ },
308
+ "blimp_causative": {
309
+ "acc,none": 0.619,
310
+ "acc_stderr,none": 0.015364734787007391,
311
+ "alias": "blimp_causative"
312
+ },
313
+ "blimp_animate_subject_trans": {
314
+ "acc,none": 0.864,
315
+ "acc_stderr,none": 0.01084535023047304,
316
+ "alias": "blimp_animate_subject_trans"
317
+ },
318
+ "blimp_animate_subject_passive": {
319
+ "acc,none": 0.728,
320
+ "acc_stderr,none": 0.014078856992462642,
321
+ "alias": "blimp_animate_subject_passive"
322
+ },
323
+ "blimp_anaphor_number_agreement": {
324
+ "acc,none": 0.957,
325
+ "acc_stderr,none": 0.006418114379799739,
326
+ "alias": "blimp_anaphor_number_agreement"
327
+ },
328
+ "blimp_anaphor_gender_agreement": {
329
+ "acc,none": 0.821,
330
+ "acc_stderr,none": 0.012128730605719056,
331
+ "alias": "blimp_anaphor_gender_agreement"
332
+ },
333
+ "blimp_adjunct_island": {
334
+ "acc,none": 0.785,
335
+ "acc_stderr,none": 0.01299784381903183,
336
+ "alias": "blimp_adjunct_island"
337
+ }
338
+ },
339
+ "group_subtasks": {
340
+ "blimp_adjunct_island": [],
341
+ "blimp_anaphor_gender_agreement": [],
342
+ "blimp_anaphor_number_agreement": [],
343
+ "blimp_animate_subject_passive": [],
344
+ "blimp_animate_subject_trans": [],
345
+ "blimp_causative": [],
346
+ "blimp_complex_NP_island": [],
347
+ "blimp_coordinate_structure_constraint_complex_left_branch": [],
348
+ "blimp_coordinate_structure_constraint_object_extraction": [],
349
+ "blimp_determiner_noun_agreement_1": [],
350
+ "blimp_determiner_noun_agreement_2": [],
351
+ "blimp_determiner_noun_agreement_irregular_1": [],
352
+ "blimp_determiner_noun_agreement_irregular_2": [],
353
+ "blimp_determiner_noun_agreement_with_adj_2": [],
354
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": [],
355
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": [],
356
+ "blimp_determiner_noun_agreement_with_adjective_1": [],
357
+ "blimp_distractor_agreement_relational_noun": [],
358
+ "blimp_distractor_agreement_relative_clause": [],
359
+ "blimp_drop_argument": [],
360
+ "blimp_ellipsis_n_bar_1": [],
361
+ "blimp_ellipsis_n_bar_2": [],
362
+ "blimp_existential_there_object_raising": [],
363
+ "blimp_existential_there_quantifiers_1": [],
364
+ "blimp_existential_there_quantifiers_2": [],
365
+ "blimp_existential_there_subject_raising": [],
366
+ "blimp_expletive_it_object_raising": [],
367
+ "blimp_inchoative": [],
368
+ "blimp_intransitive": [],
369
+ "blimp_irregular_past_participle_adjectives": [],
370
+ "blimp_irregular_past_participle_verbs": [],
371
+ "blimp_irregular_plural_subject_verb_agreement_1": [],
372
+ "blimp_irregular_plural_subject_verb_agreement_2": [],
373
+ "blimp_left_branch_island_echo_question": [],
374
+ "blimp_left_branch_island_simple_question": [],
375
+ "blimp_matrix_question_npi_licensor_present": [],
376
+ "blimp_npi_present_1": [],
377
+ "blimp_npi_present_2": [],
378
+ "blimp_only_npi_licensor_present": [],
379
+ "blimp_only_npi_scope": [],
380
+ "blimp_passive_1": [],
381
+ "blimp_passive_2": [],
382
+ "blimp_principle_A_c_command": [],
383
+ "blimp_principle_A_case_1": [],
384
+ "blimp_principle_A_case_2": [],
385
+ "blimp_principle_A_domain_1": [],
386
+ "blimp_principle_A_domain_2": [],
387
+ "blimp_principle_A_domain_3": [],
388
+ "blimp_principle_A_reconstruction": [],
389
+ "blimp_regular_plural_subject_verb_agreement_1": [],
390
+ "blimp_regular_plural_subject_verb_agreement_2": [],
391
+ "blimp_sentential_negation_npi_licensor_present": [],
392
+ "blimp_sentential_negation_npi_scope": [],
393
+ "blimp_sentential_subject_island": [],
394
+ "blimp_superlative_quantifiers_1": [],
395
+ "blimp_superlative_quantifiers_2": [],
396
+ "blimp_tough_vs_raising_1": [],
397
+ "blimp_tough_vs_raising_2": [],
398
+ "blimp_transitive": [],
399
+ "blimp_wh_island": [],
400
+ "blimp_wh_questions_object_gap": [],
401
+ "blimp_wh_questions_subject_gap": [],
402
+ "blimp_wh_questions_subject_gap_long_distance": [],
403
+ "blimp_wh_vs_that_no_gap": [],
404
+ "blimp_wh_vs_that_no_gap_long_distance": [],
405
+ "blimp_wh_vs_that_with_gap": [],
406
+ "blimp_wh_vs_that_with_gap_long_distance": []
407
+ },
408
+ "configs": {
409
+ "blimp_adjunct_island": {
410
+ "task": "blimp_adjunct_island",
411
+ "group": "blimp",
412
+ "dataset_path": "blimp",
413
+ "dataset_name": "adjunct_island",
414
+ "validation_split": "train",
415
+ "doc_to_text": "",
416
+ "doc_to_target": 0,
417
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
418
+ "description": "",
419
+ "target_delimiter": " ",
420
+ "fewshot_delimiter": "\n\n",
421
+ "num_fewshot": 0,
422
+ "metric_list": [
423
+ {
424
+ "metric": "acc"
425
+ }
426
+ ],
427
+ "output_type": "multiple_choice",
428
+ "repeats": 1,
429
+ "should_decontaminate": true,
430
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
431
+ "metadata": {
432
+ "version": 1.0
433
+ }
434
+ },
435
+ "blimp_anaphor_gender_agreement": {
436
+ "task": "blimp_anaphor_gender_agreement",
437
+ "group": "blimp",
438
+ "dataset_path": "blimp",
439
+ "dataset_name": "anaphor_gender_agreement",
440
+ "validation_split": "train",
441
+ "doc_to_text": "",
442
+ "doc_to_target": 0,
443
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
444
+ "description": "",
445
+ "target_delimiter": " ",
446
+ "fewshot_delimiter": "\n\n",
447
+ "num_fewshot": 0,
448
+ "metric_list": [
449
+ {
450
+ "metric": "acc"
451
+ }
452
+ ],
453
+ "output_type": "multiple_choice",
454
+ "repeats": 1,
455
+ "should_decontaminate": true,
456
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
457
+ "metadata": {
458
+ "version": 1.0
459
+ }
460
+ },
461
+ "blimp_anaphor_number_agreement": {
462
+ "task": "blimp_anaphor_number_agreement",
463
+ "group": "blimp",
464
+ "dataset_path": "blimp",
465
+ "dataset_name": "anaphor_number_agreement",
466
+ "validation_split": "train",
467
+ "doc_to_text": "",
468
+ "doc_to_target": 0,
469
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
470
+ "description": "",
471
+ "target_delimiter": " ",
472
+ "fewshot_delimiter": "\n\n",
473
+ "num_fewshot": 0,
474
+ "metric_list": [
475
+ {
476
+ "metric": "acc"
477
+ }
478
+ ],
479
+ "output_type": "multiple_choice",
480
+ "repeats": 1,
481
+ "should_decontaminate": true,
482
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
483
+ "metadata": {
484
+ "version": 1.0
485
+ }
486
+ },
487
+ "blimp_animate_subject_passive": {
488
+ "task": "blimp_animate_subject_passive",
489
+ "group": "blimp",
490
+ "dataset_path": "blimp",
491
+ "dataset_name": "animate_subject_passive",
492
+ "validation_split": "train",
493
+ "doc_to_text": "",
494
+ "doc_to_target": 0,
495
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
496
+ "description": "",
497
+ "target_delimiter": " ",
498
+ "fewshot_delimiter": "\n\n",
499
+ "num_fewshot": 0,
500
+ "metric_list": [
501
+ {
502
+ "metric": "acc"
503
+ }
504
+ ],
505
+ "output_type": "multiple_choice",
506
+ "repeats": 1,
507
+ "should_decontaminate": true,
508
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
509
+ "metadata": {
510
+ "version": 1.0
511
+ }
512
+ },
513
+ "blimp_animate_subject_trans": {
514
+ "task": "blimp_animate_subject_trans",
515
+ "group": "blimp",
516
+ "dataset_path": "blimp",
517
+ "dataset_name": "animate_subject_trans",
518
+ "validation_split": "train",
519
+ "doc_to_text": "",
520
+ "doc_to_target": 0,
521
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
522
+ "description": "",
523
+ "target_delimiter": " ",
524
+ "fewshot_delimiter": "\n\n",
525
+ "num_fewshot": 0,
526
+ "metric_list": [
527
+ {
528
+ "metric": "acc"
529
+ }
530
+ ],
531
+ "output_type": "multiple_choice",
532
+ "repeats": 1,
533
+ "should_decontaminate": true,
534
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
535
+ "metadata": {
536
+ "version": 1.0
537
+ }
538
+ },
539
+ "blimp_causative": {
540
+ "task": "blimp_causative",
541
+ "group": "blimp",
542
+ "dataset_path": "blimp",
543
+ "dataset_name": "causative",
544
+ "validation_split": "train",
545
+ "doc_to_text": "",
546
+ "doc_to_target": 0,
547
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
548
+ "description": "",
549
+ "target_delimiter": " ",
550
+ "fewshot_delimiter": "\n\n",
551
+ "num_fewshot": 0,
552
+ "metric_list": [
553
+ {
554
+ "metric": "acc"
555
+ }
556
+ ],
557
+ "output_type": "multiple_choice",
558
+ "repeats": 1,
559
+ "should_decontaminate": true,
560
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
561
+ "metadata": {
562
+ "version": 1.0
563
+ }
564
+ },
565
+ "blimp_complex_NP_island": {
566
+ "task": "blimp_complex_NP_island",
567
+ "group": "blimp",
568
+ "dataset_path": "blimp",
569
+ "dataset_name": "complex_NP_island",
570
+ "validation_split": "train",
571
+ "doc_to_text": "",
572
+ "doc_to_target": 0,
573
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
574
+ "description": "",
575
+ "target_delimiter": " ",
576
+ "fewshot_delimiter": "\n\n",
577
+ "num_fewshot": 0,
578
+ "metric_list": [
579
+ {
580
+ "metric": "acc"
581
+ }
582
+ ],
583
+ "output_type": "multiple_choice",
584
+ "repeats": 1,
585
+ "should_decontaminate": true,
586
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
587
+ "metadata": {
588
+ "version": 1.0
589
+ }
590
+ },
591
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
592
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
593
+ "group": "blimp",
594
+ "dataset_path": "blimp",
595
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
596
+ "validation_split": "train",
597
+ "doc_to_text": "",
598
+ "doc_to_target": 0,
599
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
600
+ "description": "",
601
+ "target_delimiter": " ",
602
+ "fewshot_delimiter": "\n\n",
603
+ "num_fewshot": 0,
604
+ "metric_list": [
605
+ {
606
+ "metric": "acc"
607
+ }
608
+ ],
609
+ "output_type": "multiple_choice",
610
+ "repeats": 1,
611
+ "should_decontaminate": true,
612
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
613
+ "metadata": {
614
+ "version": 1.0
615
+ }
616
+ },
617
+ "blimp_coordinate_structure_constraint_object_extraction": {
618
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
619
+ "group": "blimp",
620
+ "dataset_path": "blimp",
621
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
622
+ "validation_split": "train",
623
+ "doc_to_text": "",
624
+ "doc_to_target": 0,
625
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
626
+ "description": "",
627
+ "target_delimiter": " ",
628
+ "fewshot_delimiter": "\n\n",
629
+ "num_fewshot": 0,
630
+ "metric_list": [
631
+ {
632
+ "metric": "acc"
633
+ }
634
+ ],
635
+ "output_type": "multiple_choice",
636
+ "repeats": 1,
637
+ "should_decontaminate": true,
638
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
639
+ "metadata": {
640
+ "version": 1.0
641
+ }
642
+ },
643
+ "blimp_determiner_noun_agreement_1": {
644
+ "task": "blimp_determiner_noun_agreement_1",
645
+ "group": "blimp",
646
+ "dataset_path": "blimp",
647
+ "dataset_name": "determiner_noun_agreement_1",
648
+ "validation_split": "train",
649
+ "doc_to_text": "",
650
+ "doc_to_target": 0,
651
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
652
+ "description": "",
653
+ "target_delimiter": " ",
654
+ "fewshot_delimiter": "\n\n",
655
+ "num_fewshot": 0,
656
+ "metric_list": [
657
+ {
658
+ "metric": "acc"
659
+ }
660
+ ],
661
+ "output_type": "multiple_choice",
662
+ "repeats": 1,
663
+ "should_decontaminate": true,
664
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
665
+ "metadata": {
666
+ "version": 1.0
667
+ }
668
+ },
669
+ "blimp_determiner_noun_agreement_2": {
670
+ "task": "blimp_determiner_noun_agreement_2",
671
+ "group": "blimp",
672
+ "dataset_path": "blimp",
673
+ "dataset_name": "determiner_noun_agreement_2",
674
+ "validation_split": "train",
675
+ "doc_to_text": "",
676
+ "doc_to_target": 0,
677
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
678
+ "description": "",
679
+ "target_delimiter": " ",
680
+ "fewshot_delimiter": "\n\n",
681
+ "num_fewshot": 0,
682
+ "metric_list": [
683
+ {
684
+ "metric": "acc"
685
+ }
686
+ ],
687
+ "output_type": "multiple_choice",
688
+ "repeats": 1,
689
+ "should_decontaminate": true,
690
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
691
+ "metadata": {
692
+ "version": 1.0
693
+ }
694
+ },
695
+ "blimp_determiner_noun_agreement_irregular_1": {
696
+ "task": "blimp_determiner_noun_agreement_irregular_1",
697
+ "group": "blimp",
698
+ "dataset_path": "blimp",
699
+ "dataset_name": "determiner_noun_agreement_irregular_1",
700
+ "validation_split": "train",
701
+ "doc_to_text": "",
702
+ "doc_to_target": 0,
703
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
704
+ "description": "",
705
+ "target_delimiter": " ",
706
+ "fewshot_delimiter": "\n\n",
707
+ "num_fewshot": 0,
708
+ "metric_list": [
709
+ {
710
+ "metric": "acc"
711
+ }
712
+ ],
713
+ "output_type": "multiple_choice",
714
+ "repeats": 1,
715
+ "should_decontaminate": true,
716
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
717
+ "metadata": {
718
+ "version": 1.0
719
+ }
720
+ },
721
+ "blimp_determiner_noun_agreement_irregular_2": {
722
+ "task": "blimp_determiner_noun_agreement_irregular_2",
723
+ "group": "blimp",
724
+ "dataset_path": "blimp",
725
+ "dataset_name": "determiner_noun_agreement_irregular_2",
726
+ "validation_split": "train",
727
+ "doc_to_text": "",
728
+ "doc_to_target": 0,
729
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
730
+ "description": "",
731
+ "target_delimiter": " ",
732
+ "fewshot_delimiter": "\n\n",
733
+ "num_fewshot": 0,
734
+ "metric_list": [
735
+ {
736
+ "metric": "acc"
737
+ }
738
+ ],
739
+ "output_type": "multiple_choice",
740
+ "repeats": 1,
741
+ "should_decontaminate": true,
742
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
743
+ "metadata": {
744
+ "version": 1.0
745
+ }
746
+ },
747
+ "blimp_determiner_noun_agreement_with_adj_2": {
748
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
749
+ "group": "blimp",
750
+ "dataset_path": "blimp",
751
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
752
+ "validation_split": "train",
753
+ "doc_to_text": "",
754
+ "doc_to_target": 0,
755
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
756
+ "description": "",
757
+ "target_delimiter": " ",
758
+ "fewshot_delimiter": "\n\n",
759
+ "num_fewshot": 0,
760
+ "metric_list": [
761
+ {
762
+ "metric": "acc"
763
+ }
764
+ ],
765
+ "output_type": "multiple_choice",
766
+ "repeats": 1,
767
+ "should_decontaminate": true,
768
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
769
+ "metadata": {
770
+ "version": 1.0
771
+ }
772
+ },
773
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
774
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
775
+ "group": "blimp",
776
+ "dataset_path": "blimp",
777
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
778
+ "validation_split": "train",
779
+ "doc_to_text": "",
780
+ "doc_to_target": 0,
781
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
782
+ "description": "",
783
+ "target_delimiter": " ",
784
+ "fewshot_delimiter": "\n\n",
785
+ "num_fewshot": 0,
786
+ "metric_list": [
787
+ {
788
+ "metric": "acc"
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
795
+ "metadata": {
796
+ "version": 1.0
797
+ }
798
+ },
799
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
800
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
801
+ "group": "blimp",
802
+ "dataset_path": "blimp",
803
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
804
+ "validation_split": "train",
805
+ "doc_to_text": "",
806
+ "doc_to_target": 0,
807
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
808
+ "description": "",
809
+ "target_delimiter": " ",
810
+ "fewshot_delimiter": "\n\n",
811
+ "num_fewshot": 0,
812
+ "metric_list": [
813
+ {
814
+ "metric": "acc"
815
+ }
816
+ ],
817
+ "output_type": "multiple_choice",
818
+ "repeats": 1,
819
+ "should_decontaminate": true,
820
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
821
+ "metadata": {
822
+ "version": 1.0
823
+ }
824
+ },
825
+ "blimp_determiner_noun_agreement_with_adjective_1": {
826
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
827
+ "group": "blimp",
828
+ "dataset_path": "blimp",
829
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
830
+ "validation_split": "train",
831
+ "doc_to_text": "",
832
+ "doc_to_target": 0,
833
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
834
+ "description": "",
835
+ "target_delimiter": " ",
836
+ "fewshot_delimiter": "\n\n",
837
+ "num_fewshot": 0,
838
+ "metric_list": [
839
+ {
840
+ "metric": "acc"
841
+ }
842
+ ],
843
+ "output_type": "multiple_choice",
844
+ "repeats": 1,
845
+ "should_decontaminate": true,
846
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
847
+ "metadata": {
848
+ "version": 1.0
849
+ }
850
+ },
851
+ "blimp_distractor_agreement_relational_noun": {
852
+ "task": "blimp_distractor_agreement_relational_noun",
853
+ "group": "blimp",
854
+ "dataset_path": "blimp",
855
+ "dataset_name": "distractor_agreement_relational_noun",
856
+ "validation_split": "train",
857
+ "doc_to_text": "",
858
+ "doc_to_target": 0,
859
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
860
+ "description": "",
861
+ "target_delimiter": " ",
862
+ "fewshot_delimiter": "\n\n",
863
+ "num_fewshot": 0,
864
+ "metric_list": [
865
+ {
866
+ "metric": "acc"
867
+ }
868
+ ],
869
+ "output_type": "multiple_choice",
870
+ "repeats": 1,
871
+ "should_decontaminate": true,
872
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
873
+ "metadata": {
874
+ "version": 1.0
875
+ }
876
+ },
877
+ "blimp_distractor_agreement_relative_clause": {
878
+ "task": "blimp_distractor_agreement_relative_clause",
879
+ "group": "blimp",
880
+ "dataset_path": "blimp",
881
+ "dataset_name": "distractor_agreement_relative_clause",
882
+ "validation_split": "train",
883
+ "doc_to_text": "",
884
+ "doc_to_target": 0,
885
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
886
+ "description": "",
887
+ "target_delimiter": " ",
888
+ "fewshot_delimiter": "\n\n",
889
+ "num_fewshot": 0,
890
+ "metric_list": [
891
+ {
892
+ "metric": "acc"
893
+ }
894
+ ],
895
+ "output_type": "multiple_choice",
896
+ "repeats": 1,
897
+ "should_decontaminate": true,
898
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
899
+ "metadata": {
900
+ "version": 1.0
901
+ }
902
+ },
903
+ "blimp_drop_argument": {
904
+ "task": "blimp_drop_argument",
905
+ "group": "blimp",
906
+ "dataset_path": "blimp",
907
+ "dataset_name": "drop_argument",
908
+ "validation_split": "train",
909
+ "doc_to_text": "",
910
+ "doc_to_target": 0,
911
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
912
+ "description": "",
913
+ "target_delimiter": " ",
914
+ "fewshot_delimiter": "\n\n",
915
+ "num_fewshot": 0,
916
+ "metric_list": [
917
+ {
918
+ "metric": "acc"
919
+ }
920
+ ],
921
+ "output_type": "multiple_choice",
922
+ "repeats": 1,
923
+ "should_decontaminate": true,
924
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
925
+ "metadata": {
926
+ "version": 1.0
927
+ }
928
+ },
929
+ "blimp_ellipsis_n_bar_1": {
930
+ "task": "blimp_ellipsis_n_bar_1",
931
+ "group": "blimp",
932
+ "dataset_path": "blimp",
933
+ "dataset_name": "ellipsis_n_bar_1",
934
+ "validation_split": "train",
935
+ "doc_to_text": "",
936
+ "doc_to_target": 0,
937
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
938
+ "description": "",
939
+ "target_delimiter": " ",
940
+ "fewshot_delimiter": "\n\n",
941
+ "num_fewshot": 0,
942
+ "metric_list": [
943
+ {
944
+ "metric": "acc"
945
+ }
946
+ ],
947
+ "output_type": "multiple_choice",
948
+ "repeats": 1,
949
+ "should_decontaminate": true,
950
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
951
+ "metadata": {
952
+ "version": 1.0
953
+ }
954
+ },
955
+ "blimp_ellipsis_n_bar_2": {
956
+ "task": "blimp_ellipsis_n_bar_2",
957
+ "group": "blimp",
958
+ "dataset_path": "blimp",
959
+ "dataset_name": "ellipsis_n_bar_2",
960
+ "validation_split": "train",
961
+ "doc_to_text": "",
962
+ "doc_to_target": 0,
963
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
964
+ "description": "",
965
+ "target_delimiter": " ",
966
+ "fewshot_delimiter": "\n\n",
967
+ "num_fewshot": 0,
968
+ "metric_list": [
969
+ {
970
+ "metric": "acc"
971
+ }
972
+ ],
973
+ "output_type": "multiple_choice",
974
+ "repeats": 1,
975
+ "should_decontaminate": true,
976
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
977
+ "metadata": {
978
+ "version": 1.0
979
+ }
980
+ },
981
+ "blimp_existential_there_object_raising": {
982
+ "task": "blimp_existential_there_object_raising",
983
+ "group": "blimp",
984
+ "dataset_path": "blimp",
985
+ "dataset_name": "existential_there_object_raising",
986
+ "validation_split": "train",
987
+ "doc_to_text": "",
988
+ "doc_to_target": 0,
989
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
990
+ "description": "",
991
+ "target_delimiter": " ",
992
+ "fewshot_delimiter": "\n\n",
993
+ "num_fewshot": 0,
994
+ "metric_list": [
995
+ {
996
+ "metric": "acc"
997
+ }
998
+ ],
999
+ "output_type": "multiple_choice",
1000
+ "repeats": 1,
1001
+ "should_decontaminate": true,
1002
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1003
+ "metadata": {
1004
+ "version": 1.0
1005
+ }
1006
+ },
1007
+ "blimp_existential_there_quantifiers_1": {
1008
+ "task": "blimp_existential_there_quantifiers_1",
1009
+ "group": "blimp",
1010
+ "dataset_path": "blimp",
1011
+ "dataset_name": "existential_there_quantifiers_1",
1012
+ "validation_split": "train",
1013
+ "doc_to_text": "",
1014
+ "doc_to_target": 0,
1015
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1016
+ "description": "",
1017
+ "target_delimiter": " ",
1018
+ "fewshot_delimiter": "\n\n",
1019
+ "num_fewshot": 0,
1020
+ "metric_list": [
1021
+ {
1022
+ "metric": "acc"
1023
+ }
1024
+ ],
1025
+ "output_type": "multiple_choice",
1026
+ "repeats": 1,
1027
+ "should_decontaminate": true,
1028
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1029
+ "metadata": {
1030
+ "version": 1.0
1031
+ }
1032
+ },
1033
+ "blimp_existential_there_quantifiers_2": {
1034
+ "task": "blimp_existential_there_quantifiers_2",
1035
+ "group": "blimp",
1036
+ "dataset_path": "blimp",
1037
+ "dataset_name": "existential_there_quantifiers_2",
1038
+ "validation_split": "train",
1039
+ "doc_to_text": "",
1040
+ "doc_to_target": 0,
1041
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1042
+ "description": "",
1043
+ "target_delimiter": " ",
1044
+ "fewshot_delimiter": "\n\n",
1045
+ "num_fewshot": 0,
1046
+ "metric_list": [
1047
+ {
1048
+ "metric": "acc"
1049
+ }
1050
+ ],
1051
+ "output_type": "multiple_choice",
1052
+ "repeats": 1,
1053
+ "should_decontaminate": true,
1054
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1055
+ "metadata": {
1056
+ "version": 1.0
1057
+ }
1058
+ },
1059
+ "blimp_existential_there_subject_raising": {
1060
+ "task": "blimp_existential_there_subject_raising",
1061
+ "group": "blimp",
1062
+ "dataset_path": "blimp",
1063
+ "dataset_name": "existential_there_subject_raising",
1064
+ "validation_split": "train",
1065
+ "doc_to_text": "",
1066
+ "doc_to_target": 0,
1067
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1068
+ "description": "",
1069
+ "target_delimiter": " ",
1070
+ "fewshot_delimiter": "\n\n",
1071
+ "num_fewshot": 0,
1072
+ "metric_list": [
1073
+ {
1074
+ "metric": "acc"
1075
+ }
1076
+ ],
1077
+ "output_type": "multiple_choice",
1078
+ "repeats": 1,
1079
+ "should_decontaminate": true,
1080
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1081
+ "metadata": {
1082
+ "version": 1.0
1083
+ }
1084
+ },
1085
+ "blimp_expletive_it_object_raising": {
1086
+ "task": "blimp_expletive_it_object_raising",
1087
+ "group": "blimp",
1088
+ "dataset_path": "blimp",
1089
+ "dataset_name": "expletive_it_object_raising",
1090
+ "validation_split": "train",
1091
+ "doc_to_text": "",
1092
+ "doc_to_target": 0,
1093
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1094
+ "description": "",
1095
+ "target_delimiter": " ",
1096
+ "fewshot_delimiter": "\n\n",
1097
+ "num_fewshot": 0,
1098
+ "metric_list": [
1099
+ {
1100
+ "metric": "acc"
1101
+ }
1102
+ ],
1103
+ "output_type": "multiple_choice",
1104
+ "repeats": 1,
1105
+ "should_decontaminate": true,
1106
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1107
+ "metadata": {
1108
+ "version": 1.0
1109
+ }
1110
+ },
1111
+ "blimp_inchoative": {
1112
+ "task": "blimp_inchoative",
1113
+ "group": "blimp",
1114
+ "dataset_path": "blimp",
1115
+ "dataset_name": "inchoative",
1116
+ "validation_split": "train",
1117
+ "doc_to_text": "",
1118
+ "doc_to_target": 0,
1119
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1120
+ "description": "",
1121
+ "target_delimiter": " ",
1122
+ "fewshot_delimiter": "\n\n",
1123
+ "num_fewshot": 0,
1124
+ "metric_list": [
1125
+ {
1126
+ "metric": "acc"
1127
+ }
1128
+ ],
1129
+ "output_type": "multiple_choice",
1130
+ "repeats": 1,
1131
+ "should_decontaminate": true,
1132
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1133
+ "metadata": {
1134
+ "version": 1.0
1135
+ }
1136
+ },
1137
+ "blimp_intransitive": {
1138
+ "task": "blimp_intransitive",
1139
+ "group": "blimp",
1140
+ "dataset_path": "blimp",
1141
+ "dataset_name": "intransitive",
1142
+ "validation_split": "train",
1143
+ "doc_to_text": "",
1144
+ "doc_to_target": 0,
1145
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1146
+ "description": "",
1147
+ "target_delimiter": " ",
1148
+ "fewshot_delimiter": "\n\n",
1149
+ "num_fewshot": 0,
1150
+ "metric_list": [
1151
+ {
1152
+ "metric": "acc"
1153
+ }
1154
+ ],
1155
+ "output_type": "multiple_choice",
1156
+ "repeats": 1,
1157
+ "should_decontaminate": true,
1158
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1159
+ "metadata": {
1160
+ "version": 1.0
1161
+ }
1162
+ },
1163
+ "blimp_irregular_past_participle_adjectives": {
1164
+ "task": "blimp_irregular_past_participle_adjectives",
1165
+ "group": "blimp",
1166
+ "dataset_path": "blimp",
1167
+ "dataset_name": "irregular_past_participle_adjectives",
1168
+ "validation_split": "train",
1169
+ "doc_to_text": "",
1170
+ "doc_to_target": 0,
1171
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1172
+ "description": "",
1173
+ "target_delimiter": " ",
1174
+ "fewshot_delimiter": "\n\n",
1175
+ "num_fewshot": 0,
1176
+ "metric_list": [
1177
+ {
1178
+ "metric": "acc"
1179
+ }
1180
+ ],
1181
+ "output_type": "multiple_choice",
1182
+ "repeats": 1,
1183
+ "should_decontaminate": true,
1184
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1185
+ "metadata": {
1186
+ "version": 1.0
1187
+ }
1188
+ },
1189
+ "blimp_irregular_past_participle_verbs": {
1190
+ "task": "blimp_irregular_past_participle_verbs",
1191
+ "group": "blimp",
1192
+ "dataset_path": "blimp",
1193
+ "dataset_name": "irregular_past_participle_verbs",
1194
+ "validation_split": "train",
1195
+ "doc_to_text": "",
1196
+ "doc_to_target": 0,
1197
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1198
+ "description": "",
1199
+ "target_delimiter": " ",
1200
+ "fewshot_delimiter": "\n\n",
1201
+ "num_fewshot": 0,
1202
+ "metric_list": [
1203
+ {
1204
+ "metric": "acc"
1205
+ }
1206
+ ],
1207
+ "output_type": "multiple_choice",
1208
+ "repeats": 1,
1209
+ "should_decontaminate": true,
1210
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1211
+ "metadata": {
1212
+ "version": 1.0
1213
+ }
1214
+ },
1215
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1216
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1217
+ "group": "blimp",
1218
+ "dataset_path": "blimp",
1219
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1220
+ "validation_split": "train",
1221
+ "doc_to_text": "",
1222
+ "doc_to_target": 0,
1223
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1224
+ "description": "",
1225
+ "target_delimiter": " ",
1226
+ "fewshot_delimiter": "\n\n",
1227
+ "num_fewshot": 0,
1228
+ "metric_list": [
1229
+ {
1230
+ "metric": "acc"
1231
+ }
1232
+ ],
1233
+ "output_type": "multiple_choice",
1234
+ "repeats": 1,
1235
+ "should_decontaminate": true,
1236
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1237
+ "metadata": {
1238
+ "version": 1.0
1239
+ }
1240
+ },
1241
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1242
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1243
+ "group": "blimp",
1244
+ "dataset_path": "blimp",
1245
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1246
+ "validation_split": "train",
1247
+ "doc_to_text": "",
1248
+ "doc_to_target": 0,
1249
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1250
+ "description": "",
1251
+ "target_delimiter": " ",
1252
+ "fewshot_delimiter": "\n\n",
1253
+ "num_fewshot": 0,
1254
+ "metric_list": [
1255
+ {
1256
+ "metric": "acc"
1257
+ }
1258
+ ],
1259
+ "output_type": "multiple_choice",
1260
+ "repeats": 1,
1261
+ "should_decontaminate": true,
1262
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1263
+ "metadata": {
1264
+ "version": 1.0
1265
+ }
1266
+ },
1267
+ "blimp_left_branch_island_echo_question": {
1268
+ "task": "blimp_left_branch_island_echo_question",
1269
+ "group": "blimp",
1270
+ "dataset_path": "blimp",
1271
+ "dataset_name": "left_branch_island_echo_question",
1272
+ "validation_split": "train",
1273
+ "doc_to_text": "",
1274
+ "doc_to_target": 0,
1275
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1276
+ "description": "",
1277
+ "target_delimiter": " ",
1278
+ "fewshot_delimiter": "\n\n",
1279
+ "num_fewshot": 0,
1280
+ "metric_list": [
1281
+ {
1282
+ "metric": "acc"
1283
+ }
1284
+ ],
1285
+ "output_type": "multiple_choice",
1286
+ "repeats": 1,
1287
+ "should_decontaminate": true,
1288
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1289
+ "metadata": {
1290
+ "version": 1.0
1291
+ }
1292
+ },
1293
+ "blimp_left_branch_island_simple_question": {
1294
+ "task": "blimp_left_branch_island_simple_question",
1295
+ "group": "blimp",
1296
+ "dataset_path": "blimp",
1297
+ "dataset_name": "left_branch_island_simple_question",
1298
+ "validation_split": "train",
1299
+ "doc_to_text": "",
1300
+ "doc_to_target": 0,
1301
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1302
+ "description": "",
1303
+ "target_delimiter": " ",
1304
+ "fewshot_delimiter": "\n\n",
1305
+ "num_fewshot": 0,
1306
+ "metric_list": [
1307
+ {
1308
+ "metric": "acc"
1309
+ }
1310
+ ],
1311
+ "output_type": "multiple_choice",
1312
+ "repeats": 1,
1313
+ "should_decontaminate": true,
1314
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1315
+ "metadata": {
1316
+ "version": 1.0
1317
+ }
1318
+ },
1319
+ "blimp_matrix_question_npi_licensor_present": {
1320
+ "task": "blimp_matrix_question_npi_licensor_present",
1321
+ "group": "blimp",
1322
+ "dataset_path": "blimp",
1323
+ "dataset_name": "matrix_question_npi_licensor_present",
1324
+ "validation_split": "train",
1325
+ "doc_to_text": "",
1326
+ "doc_to_target": 0,
1327
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1328
+ "description": "",
1329
+ "target_delimiter": " ",
1330
+ "fewshot_delimiter": "\n\n",
1331
+ "num_fewshot": 0,
1332
+ "metric_list": [
1333
+ {
1334
+ "metric": "acc"
1335
+ }
1336
+ ],
1337
+ "output_type": "multiple_choice",
1338
+ "repeats": 1,
1339
+ "should_decontaminate": true,
1340
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1341
+ "metadata": {
1342
+ "version": 1.0
1343
+ }
1344
+ },
1345
+ "blimp_npi_present_1": {
1346
+ "task": "blimp_npi_present_1",
1347
+ "group": "blimp",
1348
+ "dataset_path": "blimp",
1349
+ "dataset_name": "npi_present_1",
1350
+ "validation_split": "train",
1351
+ "doc_to_text": "",
1352
+ "doc_to_target": 0,
1353
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1354
+ "description": "",
1355
+ "target_delimiter": " ",
1356
+ "fewshot_delimiter": "\n\n",
1357
+ "num_fewshot": 0,
1358
+ "metric_list": [
1359
+ {
1360
+ "metric": "acc"
1361
+ }
1362
+ ],
1363
+ "output_type": "multiple_choice",
1364
+ "repeats": 1,
1365
+ "should_decontaminate": true,
1366
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1367
+ "metadata": {
1368
+ "version": 1.0
1369
+ }
1370
+ },
1371
+ "blimp_npi_present_2": {
1372
+ "task": "blimp_npi_present_2",
1373
+ "group": "blimp",
1374
+ "dataset_path": "blimp",
1375
+ "dataset_name": "npi_present_2",
1376
+ "validation_split": "train",
1377
+ "doc_to_text": "",
1378
+ "doc_to_target": 0,
1379
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1380
+ "description": "",
1381
+ "target_delimiter": " ",
1382
+ "fewshot_delimiter": "\n\n",
1383
+ "num_fewshot": 0,
1384
+ "metric_list": [
1385
+ {
1386
+ "metric": "acc"
1387
+ }
1388
+ ],
1389
+ "output_type": "multiple_choice",
1390
+ "repeats": 1,
1391
+ "should_decontaminate": true,
1392
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1393
+ "metadata": {
1394
+ "version": 1.0
1395
+ }
1396
+ },
1397
+ "blimp_only_npi_licensor_present": {
1398
+ "task": "blimp_only_npi_licensor_present",
1399
+ "group": "blimp",
1400
+ "dataset_path": "blimp",
1401
+ "dataset_name": "only_npi_licensor_present",
1402
+ "validation_split": "train",
1403
+ "doc_to_text": "",
1404
+ "doc_to_target": 0,
1405
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1406
+ "description": "",
1407
+ "target_delimiter": " ",
1408
+ "fewshot_delimiter": "\n\n",
1409
+ "num_fewshot": 0,
1410
+ "metric_list": [
1411
+ {
1412
+ "metric": "acc"
1413
+ }
1414
+ ],
1415
+ "output_type": "multiple_choice",
1416
+ "repeats": 1,
1417
+ "should_decontaminate": true,
1418
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1419
+ "metadata": {
1420
+ "version": 1.0
1421
+ }
1422
+ },
1423
+ "blimp_only_npi_scope": {
1424
+ "task": "blimp_only_npi_scope",
1425
+ "group": "blimp",
1426
+ "dataset_path": "blimp",
1427
+ "dataset_name": "only_npi_scope",
1428
+ "validation_split": "train",
1429
+ "doc_to_text": "",
1430
+ "doc_to_target": 0,
1431
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1432
+ "description": "",
1433
+ "target_delimiter": " ",
1434
+ "fewshot_delimiter": "\n\n",
1435
+ "num_fewshot": 0,
1436
+ "metric_list": [
1437
+ {
1438
+ "metric": "acc"
1439
+ }
1440
+ ],
1441
+ "output_type": "multiple_choice",
1442
+ "repeats": 1,
1443
+ "should_decontaminate": true,
1444
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1445
+ "metadata": {
1446
+ "version": 1.0
1447
+ }
1448
+ },
1449
+ "blimp_passive_1": {
1450
+ "task": "blimp_passive_1",
1451
+ "group": "blimp",
1452
+ "dataset_path": "blimp",
1453
+ "dataset_name": "passive_1",
1454
+ "validation_split": "train",
1455
+ "doc_to_text": "",
1456
+ "doc_to_target": 0,
1457
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1458
+ "description": "",
1459
+ "target_delimiter": " ",
1460
+ "fewshot_delimiter": "\n\n",
1461
+ "num_fewshot": 0,
1462
+ "metric_list": [
1463
+ {
1464
+ "metric": "acc"
1465
+ }
1466
+ ],
1467
+ "output_type": "multiple_choice",
1468
+ "repeats": 1,
1469
+ "should_decontaminate": true,
1470
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1471
+ "metadata": {
1472
+ "version": 1.0
1473
+ }
1474
+ },
1475
+ "blimp_passive_2": {
1476
+ "task": "blimp_passive_2",
1477
+ "group": "blimp",
1478
+ "dataset_path": "blimp",
1479
+ "dataset_name": "passive_2",
1480
+ "validation_split": "train",
1481
+ "doc_to_text": "",
1482
+ "doc_to_target": 0,
1483
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1484
+ "description": "",
1485
+ "target_delimiter": " ",
1486
+ "fewshot_delimiter": "\n\n",
1487
+ "num_fewshot": 0,
1488
+ "metric_list": [
1489
+ {
1490
+ "metric": "acc"
1491
+ }
1492
+ ],
1493
+ "output_type": "multiple_choice",
1494
+ "repeats": 1,
1495
+ "should_decontaminate": true,
1496
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1497
+ "metadata": {
1498
+ "version": 1.0
1499
+ }
1500
+ },
1501
+ "blimp_principle_A_c_command": {
1502
+ "task": "blimp_principle_A_c_command",
1503
+ "group": "blimp",
1504
+ "dataset_path": "blimp",
1505
+ "dataset_name": "principle_A_c_command",
1506
+ "validation_split": "train",
1507
+ "doc_to_text": "",
1508
+ "doc_to_target": 0,
1509
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1510
+ "description": "",
1511
+ "target_delimiter": " ",
1512
+ "fewshot_delimiter": "\n\n",
1513
+ "num_fewshot": 0,
1514
+ "metric_list": [
1515
+ {
1516
+ "metric": "acc"
1517
+ }
1518
+ ],
1519
+ "output_type": "multiple_choice",
1520
+ "repeats": 1,
1521
+ "should_decontaminate": true,
1522
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1523
+ "metadata": {
1524
+ "version": 1.0
1525
+ }
1526
+ },
1527
+ "blimp_principle_A_case_1": {
1528
+ "task": "blimp_principle_A_case_1",
1529
+ "group": "blimp",
1530
+ "dataset_path": "blimp",
1531
+ "dataset_name": "principle_A_case_1",
1532
+ "validation_split": "train",
1533
+ "doc_to_text": "",
1534
+ "doc_to_target": 0,
1535
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1536
+ "description": "",
1537
+ "target_delimiter": " ",
1538
+ "fewshot_delimiter": "\n\n",
1539
+ "num_fewshot": 0,
1540
+ "metric_list": [
1541
+ {
1542
+ "metric": "acc"
1543
+ }
1544
+ ],
1545
+ "output_type": "multiple_choice",
1546
+ "repeats": 1,
1547
+ "should_decontaminate": true,
1548
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1549
+ "metadata": {
1550
+ "version": 1.0
1551
+ }
1552
+ },
1553
+ "blimp_principle_A_case_2": {
1554
+ "task": "blimp_principle_A_case_2",
1555
+ "group": "blimp",
1556
+ "dataset_path": "blimp",
1557
+ "dataset_name": "principle_A_case_2",
1558
+ "validation_split": "train",
1559
+ "doc_to_text": "",
1560
+ "doc_to_target": 0,
1561
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1562
+ "description": "",
1563
+ "target_delimiter": " ",
1564
+ "fewshot_delimiter": "\n\n",
1565
+ "num_fewshot": 0,
1566
+ "metric_list": [
1567
+ {
1568
+ "metric": "acc"
1569
+ }
1570
+ ],
1571
+ "output_type": "multiple_choice",
1572
+ "repeats": 1,
1573
+ "should_decontaminate": true,
1574
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1575
+ "metadata": {
1576
+ "version": 1.0
1577
+ }
1578
+ },
1579
+ "blimp_principle_A_domain_1": {
1580
+ "task": "blimp_principle_A_domain_1",
1581
+ "group": "blimp",
1582
+ "dataset_path": "blimp",
1583
+ "dataset_name": "principle_A_domain_1",
1584
+ "validation_split": "train",
1585
+ "doc_to_text": "",
1586
+ "doc_to_target": 0,
1587
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1588
+ "description": "",
1589
+ "target_delimiter": " ",
1590
+ "fewshot_delimiter": "\n\n",
1591
+ "num_fewshot": 0,
1592
+ "metric_list": [
1593
+ {
1594
+ "metric": "acc"
1595
+ }
1596
+ ],
1597
+ "output_type": "multiple_choice",
1598
+ "repeats": 1,
1599
+ "should_decontaminate": true,
1600
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1601
+ "metadata": {
1602
+ "version": 1.0
1603
+ }
1604
+ },
1605
+ "blimp_principle_A_domain_2": {
1606
+ "task": "blimp_principle_A_domain_2",
1607
+ "group": "blimp",
1608
+ "dataset_path": "blimp",
1609
+ "dataset_name": "principle_A_domain_2",
1610
+ "validation_split": "train",
1611
+ "doc_to_text": "",
1612
+ "doc_to_target": 0,
1613
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1614
+ "description": "",
1615
+ "target_delimiter": " ",
1616
+ "fewshot_delimiter": "\n\n",
1617
+ "num_fewshot": 0,
1618
+ "metric_list": [
1619
+ {
1620
+ "metric": "acc"
1621
+ }
1622
+ ],
1623
+ "output_type": "multiple_choice",
1624
+ "repeats": 1,
1625
+ "should_decontaminate": true,
1626
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1627
+ "metadata": {
1628
+ "version": 1.0
1629
+ }
1630
+ },
1631
+ "blimp_principle_A_domain_3": {
1632
+ "task": "blimp_principle_A_domain_3",
1633
+ "group": "blimp",
1634
+ "dataset_path": "blimp",
1635
+ "dataset_name": "principle_A_domain_3",
1636
+ "validation_split": "train",
1637
+ "doc_to_text": "",
1638
+ "doc_to_target": 0,
1639
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1640
+ "description": "",
1641
+ "target_delimiter": " ",
1642
+ "fewshot_delimiter": "\n\n",
1643
+ "num_fewshot": 0,
1644
+ "metric_list": [
1645
+ {
1646
+ "metric": "acc"
1647
+ }
1648
+ ],
1649
+ "output_type": "multiple_choice",
1650
+ "repeats": 1,
1651
+ "should_decontaminate": true,
1652
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1653
+ "metadata": {
1654
+ "version": 1.0
1655
+ }
1656
+ },
1657
+ "blimp_principle_A_reconstruction": {
1658
+ "task": "blimp_principle_A_reconstruction",
1659
+ "group": "blimp",
1660
+ "dataset_path": "blimp",
1661
+ "dataset_name": "principle_A_reconstruction",
1662
+ "validation_split": "train",
1663
+ "doc_to_text": "",
1664
+ "doc_to_target": 0,
1665
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1666
+ "description": "",
1667
+ "target_delimiter": " ",
1668
+ "fewshot_delimiter": "\n\n",
1669
+ "num_fewshot": 0,
1670
+ "metric_list": [
1671
+ {
1672
+ "metric": "acc"
1673
+ }
1674
+ ],
1675
+ "output_type": "multiple_choice",
1676
+ "repeats": 1,
1677
+ "should_decontaminate": true,
1678
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1679
+ "metadata": {
1680
+ "version": 1.0
1681
+ }
1682
+ },
1683
+ "blimp_regular_plural_subject_verb_agreement_1": {
1684
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1685
+ "group": "blimp",
1686
+ "dataset_path": "blimp",
1687
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1688
+ "validation_split": "train",
1689
+ "doc_to_text": "",
1690
+ "doc_to_target": 0,
1691
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1692
+ "description": "",
1693
+ "target_delimiter": " ",
1694
+ "fewshot_delimiter": "\n\n",
1695
+ "num_fewshot": 0,
1696
+ "metric_list": [
1697
+ {
1698
+ "metric": "acc"
1699
+ }
1700
+ ],
1701
+ "output_type": "multiple_choice",
1702
+ "repeats": 1,
1703
+ "should_decontaminate": true,
1704
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1705
+ "metadata": {
1706
+ "version": 1.0
1707
+ }
1708
+ },
1709
+ "blimp_regular_plural_subject_verb_agreement_2": {
1710
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1711
+ "group": "blimp",
1712
+ "dataset_path": "blimp",
1713
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1714
+ "validation_split": "train",
1715
+ "doc_to_text": "",
1716
+ "doc_to_target": 0,
1717
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1718
+ "description": "",
1719
+ "target_delimiter": " ",
1720
+ "fewshot_delimiter": "\n\n",
1721
+ "num_fewshot": 0,
1722
+ "metric_list": [
1723
+ {
1724
+ "metric": "acc"
1725
+ }
1726
+ ],
1727
+ "output_type": "multiple_choice",
1728
+ "repeats": 1,
1729
+ "should_decontaminate": true,
1730
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1731
+ "metadata": {
1732
+ "version": 1.0
1733
+ }
1734
+ },
1735
+ "blimp_sentential_negation_npi_licensor_present": {
1736
+ "task": "blimp_sentential_negation_npi_licensor_present",
1737
+ "group": "blimp",
1738
+ "dataset_path": "blimp",
1739
+ "dataset_name": "sentential_negation_npi_licensor_present",
1740
+ "validation_split": "train",
1741
+ "doc_to_text": "",
1742
+ "doc_to_target": 0,
1743
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1744
+ "description": "",
1745
+ "target_delimiter": " ",
1746
+ "fewshot_delimiter": "\n\n",
1747
+ "num_fewshot": 0,
1748
+ "metric_list": [
1749
+ {
1750
+ "metric": "acc"
1751
+ }
1752
+ ],
1753
+ "output_type": "multiple_choice",
1754
+ "repeats": 1,
1755
+ "should_decontaminate": true,
1756
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1757
+ "metadata": {
1758
+ "version": 1.0
1759
+ }
1760
+ },
1761
+ "blimp_sentential_negation_npi_scope": {
1762
+ "task": "blimp_sentential_negation_npi_scope",
1763
+ "group": "blimp",
1764
+ "dataset_path": "blimp",
1765
+ "dataset_name": "sentential_negation_npi_scope",
1766
+ "validation_split": "train",
1767
+ "doc_to_text": "",
1768
+ "doc_to_target": 0,
1769
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1770
+ "description": "",
1771
+ "target_delimiter": " ",
1772
+ "fewshot_delimiter": "\n\n",
1773
+ "num_fewshot": 0,
1774
+ "metric_list": [
1775
+ {
1776
+ "metric": "acc"
1777
+ }
1778
+ ],
1779
+ "output_type": "multiple_choice",
1780
+ "repeats": 1,
1781
+ "should_decontaminate": true,
1782
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1783
+ "metadata": {
1784
+ "version": 1.0
1785
+ }
1786
+ },
1787
+ "blimp_sentential_subject_island": {
1788
+ "task": "blimp_sentential_subject_island",
1789
+ "group": "blimp",
1790
+ "dataset_path": "blimp",
1791
+ "dataset_name": "sentential_subject_island",
1792
+ "validation_split": "train",
1793
+ "doc_to_text": "",
1794
+ "doc_to_target": 0,
1795
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1796
+ "description": "",
1797
+ "target_delimiter": " ",
1798
+ "fewshot_delimiter": "\n\n",
1799
+ "num_fewshot": 0,
1800
+ "metric_list": [
1801
+ {
1802
+ "metric": "acc"
1803
+ }
1804
+ ],
1805
+ "output_type": "multiple_choice",
1806
+ "repeats": 1,
1807
+ "should_decontaminate": true,
1808
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1809
+ "metadata": {
1810
+ "version": 1.0
1811
+ }
1812
+ },
1813
+ "blimp_superlative_quantifiers_1": {
1814
+ "task": "blimp_superlative_quantifiers_1",
1815
+ "group": "blimp",
1816
+ "dataset_path": "blimp",
1817
+ "dataset_name": "superlative_quantifiers_1",
1818
+ "validation_split": "train",
1819
+ "doc_to_text": "",
1820
+ "doc_to_target": 0,
1821
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1822
+ "description": "",
1823
+ "target_delimiter": " ",
1824
+ "fewshot_delimiter": "\n\n",
1825
+ "num_fewshot": 0,
1826
+ "metric_list": [
1827
+ {
1828
+ "metric": "acc"
1829
+ }
1830
+ ],
1831
+ "output_type": "multiple_choice",
1832
+ "repeats": 1,
1833
+ "should_decontaminate": true,
1834
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1835
+ "metadata": {
1836
+ "version": 1.0
1837
+ }
1838
+ },
1839
+ "blimp_superlative_quantifiers_2": {
1840
+ "task": "blimp_superlative_quantifiers_2",
1841
+ "group": "blimp",
1842
+ "dataset_path": "blimp",
1843
+ "dataset_name": "superlative_quantifiers_2",
1844
+ "validation_split": "train",
1845
+ "doc_to_text": "",
1846
+ "doc_to_target": 0,
1847
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1848
+ "description": "",
1849
+ "target_delimiter": " ",
1850
+ "fewshot_delimiter": "\n\n",
1851
+ "num_fewshot": 0,
1852
+ "metric_list": [
1853
+ {
1854
+ "metric": "acc"
1855
+ }
1856
+ ],
1857
+ "output_type": "multiple_choice",
1858
+ "repeats": 1,
1859
+ "should_decontaminate": true,
1860
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1861
+ "metadata": {
1862
+ "version": 1.0
1863
+ }
1864
+ },
1865
+ "blimp_tough_vs_raising_1": {
1866
+ "task": "blimp_tough_vs_raising_1",
1867
+ "group": "blimp",
1868
+ "dataset_path": "blimp",
1869
+ "dataset_name": "tough_vs_raising_1",
1870
+ "validation_split": "train",
1871
+ "doc_to_text": "",
1872
+ "doc_to_target": 0,
1873
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1874
+ "description": "",
1875
+ "target_delimiter": " ",
1876
+ "fewshot_delimiter": "\n\n",
1877
+ "num_fewshot": 0,
1878
+ "metric_list": [
1879
+ {
1880
+ "metric": "acc"
1881
+ }
1882
+ ],
1883
+ "output_type": "multiple_choice",
1884
+ "repeats": 1,
1885
+ "should_decontaminate": true,
1886
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1887
+ "metadata": {
1888
+ "version": 1.0
1889
+ }
1890
+ },
1891
+ "blimp_tough_vs_raising_2": {
1892
+ "task": "blimp_tough_vs_raising_2",
1893
+ "group": "blimp",
1894
+ "dataset_path": "blimp",
1895
+ "dataset_name": "tough_vs_raising_2",
1896
+ "validation_split": "train",
1897
+ "doc_to_text": "",
1898
+ "doc_to_target": 0,
1899
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1900
+ "description": "",
1901
+ "target_delimiter": " ",
1902
+ "fewshot_delimiter": "\n\n",
1903
+ "num_fewshot": 0,
1904
+ "metric_list": [
1905
+ {
1906
+ "metric": "acc"
1907
+ }
1908
+ ],
1909
+ "output_type": "multiple_choice",
1910
+ "repeats": 1,
1911
+ "should_decontaminate": true,
1912
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1913
+ "metadata": {
1914
+ "version": 1.0
1915
+ }
1916
+ },
1917
+ "blimp_transitive": {
1918
+ "task": "blimp_transitive",
1919
+ "group": "blimp",
1920
+ "dataset_path": "blimp",
1921
+ "dataset_name": "transitive",
1922
+ "validation_split": "train",
1923
+ "doc_to_text": "",
1924
+ "doc_to_target": 0,
1925
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1926
+ "description": "",
1927
+ "target_delimiter": " ",
1928
+ "fewshot_delimiter": "\n\n",
1929
+ "num_fewshot": 0,
1930
+ "metric_list": [
1931
+ {
1932
+ "metric": "acc"
1933
+ }
1934
+ ],
1935
+ "output_type": "multiple_choice",
1936
+ "repeats": 1,
1937
+ "should_decontaminate": true,
1938
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1939
+ "metadata": {
1940
+ "version": 1.0
1941
+ }
1942
+ },
1943
+ "blimp_wh_island": {
1944
+ "task": "blimp_wh_island",
1945
+ "group": "blimp",
1946
+ "dataset_path": "blimp",
1947
+ "dataset_name": "wh_island",
1948
+ "validation_split": "train",
1949
+ "doc_to_text": "",
1950
+ "doc_to_target": 0,
1951
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1952
+ "description": "",
1953
+ "target_delimiter": " ",
1954
+ "fewshot_delimiter": "\n\n",
1955
+ "num_fewshot": 0,
1956
+ "metric_list": [
1957
+ {
1958
+ "metric": "acc"
1959
+ }
1960
+ ],
1961
+ "output_type": "multiple_choice",
1962
+ "repeats": 1,
1963
+ "should_decontaminate": true,
1964
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1965
+ "metadata": {
1966
+ "version": 1.0
1967
+ }
1968
+ },
1969
+ "blimp_wh_questions_object_gap": {
1970
+ "task": "blimp_wh_questions_object_gap",
1971
+ "group": "blimp",
1972
+ "dataset_path": "blimp",
1973
+ "dataset_name": "wh_questions_object_gap",
1974
+ "validation_split": "train",
1975
+ "doc_to_text": "",
1976
+ "doc_to_target": 0,
1977
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1978
+ "description": "",
1979
+ "target_delimiter": " ",
1980
+ "fewshot_delimiter": "\n\n",
1981
+ "num_fewshot": 0,
1982
+ "metric_list": [
1983
+ {
1984
+ "metric": "acc"
1985
+ }
1986
+ ],
1987
+ "output_type": "multiple_choice",
1988
+ "repeats": 1,
1989
+ "should_decontaminate": true,
1990
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1991
+ "metadata": {
1992
+ "version": 1.0
1993
+ }
1994
+ },
1995
+ "blimp_wh_questions_subject_gap": {
1996
+ "task": "blimp_wh_questions_subject_gap",
1997
+ "group": "blimp",
1998
+ "dataset_path": "blimp",
1999
+ "dataset_name": "wh_questions_subject_gap",
2000
+ "validation_split": "train",
2001
+ "doc_to_text": "",
2002
+ "doc_to_target": 0,
2003
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2004
+ "description": "",
2005
+ "target_delimiter": " ",
2006
+ "fewshot_delimiter": "\n\n",
2007
+ "num_fewshot": 0,
2008
+ "metric_list": [
2009
+ {
2010
+ "metric": "acc"
2011
+ }
2012
+ ],
2013
+ "output_type": "multiple_choice",
2014
+ "repeats": 1,
2015
+ "should_decontaminate": true,
2016
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2017
+ "metadata": {
2018
+ "version": 1.0
2019
+ }
2020
+ },
2021
+ "blimp_wh_questions_subject_gap_long_distance": {
2022
+ "task": "blimp_wh_questions_subject_gap_long_distance",
2023
+ "group": "blimp",
2024
+ "dataset_path": "blimp",
2025
+ "dataset_name": "wh_questions_subject_gap_long_distance",
2026
+ "validation_split": "train",
2027
+ "doc_to_text": "",
2028
+ "doc_to_target": 0,
2029
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2030
+ "description": "",
2031
+ "target_delimiter": " ",
2032
+ "fewshot_delimiter": "\n\n",
2033
+ "num_fewshot": 0,
2034
+ "metric_list": [
2035
+ {
2036
+ "metric": "acc"
2037
+ }
2038
+ ],
2039
+ "output_type": "multiple_choice",
2040
+ "repeats": 1,
2041
+ "should_decontaminate": true,
2042
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2043
+ "metadata": {
2044
+ "version": 1.0
2045
+ }
2046
+ },
2047
+ "blimp_wh_vs_that_no_gap": {
2048
+ "task": "blimp_wh_vs_that_no_gap",
2049
+ "group": "blimp",
2050
+ "dataset_path": "blimp",
2051
+ "dataset_name": "wh_vs_that_no_gap",
2052
+ "validation_split": "train",
2053
+ "doc_to_text": "",
2054
+ "doc_to_target": 0,
2055
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2056
+ "description": "",
2057
+ "target_delimiter": " ",
2058
+ "fewshot_delimiter": "\n\n",
2059
+ "num_fewshot": 0,
2060
+ "metric_list": [
2061
+ {
2062
+ "metric": "acc"
2063
+ }
2064
+ ],
2065
+ "output_type": "multiple_choice",
2066
+ "repeats": 1,
2067
+ "should_decontaminate": true,
2068
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2069
+ "metadata": {
2070
+ "version": 1.0
2071
+ }
2072
+ },
2073
+ "blimp_wh_vs_that_no_gap_long_distance": {
2074
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2075
+ "group": "blimp",
2076
+ "dataset_path": "blimp",
2077
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2078
+ "validation_split": "train",
2079
+ "doc_to_text": "",
2080
+ "doc_to_target": 0,
2081
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2082
+ "description": "",
2083
+ "target_delimiter": " ",
2084
+ "fewshot_delimiter": "\n\n",
2085
+ "num_fewshot": 0,
2086
+ "metric_list": [
2087
+ {
2088
+ "metric": "acc"
2089
+ }
2090
+ ],
2091
+ "output_type": "multiple_choice",
2092
+ "repeats": 1,
2093
+ "should_decontaminate": true,
2094
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2095
+ "metadata": {
2096
+ "version": 1.0
2097
+ }
2098
+ },
2099
+ "blimp_wh_vs_that_with_gap": {
2100
+ "task": "blimp_wh_vs_that_with_gap",
2101
+ "group": "blimp",
2102
+ "dataset_path": "blimp",
2103
+ "dataset_name": "wh_vs_that_with_gap",
2104
+ "validation_split": "train",
2105
+ "doc_to_text": "",
2106
+ "doc_to_target": 0,
2107
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2108
+ "description": "",
2109
+ "target_delimiter": " ",
2110
+ "fewshot_delimiter": "\n\n",
2111
+ "num_fewshot": 0,
2112
+ "metric_list": [
2113
+ {
2114
+ "metric": "acc"
2115
+ }
2116
+ ],
2117
+ "output_type": "multiple_choice",
2118
+ "repeats": 1,
2119
+ "should_decontaminate": true,
2120
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2121
+ "metadata": {
2122
+ "version": 1.0
2123
+ }
2124
+ },
2125
+ "blimp_wh_vs_that_with_gap_long_distance": {
2126
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2127
+ "group": "blimp",
2128
+ "dataset_path": "blimp",
2129
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2130
+ "validation_split": "train",
2131
+ "doc_to_text": "",
2132
+ "doc_to_target": 0,
2133
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2134
+ "description": "",
2135
+ "target_delimiter": " ",
2136
+ "fewshot_delimiter": "\n\n",
2137
+ "num_fewshot": 0,
2138
+ "metric_list": [
2139
+ {
2140
+ "metric": "acc"
2141
+ }
2142
+ ],
2143
+ "output_type": "multiple_choice",
2144
+ "repeats": 1,
2145
+ "should_decontaminate": true,
2146
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2147
+ "metadata": {
2148
+ "version": 1.0
2149
+ }
2150
+ }
2151
+ },
2152
+ "versions": {
2153
+ "blimp_adjunct_island": 1.0,
2154
+ "blimp_anaphor_gender_agreement": 1.0,
2155
+ "blimp_anaphor_number_agreement": 1.0,
2156
+ "blimp_animate_subject_passive": 1.0,
2157
+ "blimp_animate_subject_trans": 1.0,
2158
+ "blimp_causative": 1.0,
2159
+ "blimp_complex_NP_island": 1.0,
2160
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2161
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2162
+ "blimp_determiner_noun_agreement_1": 1.0,
2163
+ "blimp_determiner_noun_agreement_2": 1.0,
2164
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2165
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2166
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2167
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2168
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2169
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2170
+ "blimp_distractor_agreement_relational_noun": 1.0,
2171
+ "blimp_distractor_agreement_relative_clause": 1.0,
2172
+ "blimp_drop_argument": 1.0,
2173
+ "blimp_ellipsis_n_bar_1": 1.0,
2174
+ "blimp_ellipsis_n_bar_2": 1.0,
2175
+ "blimp_existential_there_object_raising": 1.0,
2176
+ "blimp_existential_there_quantifiers_1": 1.0,
2177
+ "blimp_existential_there_quantifiers_2": 1.0,
2178
+ "blimp_existential_there_subject_raising": 1.0,
2179
+ "blimp_expletive_it_object_raising": 1.0,
2180
+ "blimp_inchoative": 1.0,
2181
+ "blimp_intransitive": 1.0,
2182
+ "blimp_irregular_past_participle_adjectives": 1.0,
2183
+ "blimp_irregular_past_participle_verbs": 1.0,
2184
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2185
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2186
+ "blimp_left_branch_island_echo_question": 1.0,
2187
+ "blimp_left_branch_island_simple_question": 1.0,
2188
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2189
+ "blimp_npi_present_1": 1.0,
2190
+ "blimp_npi_present_2": 1.0,
2191
+ "blimp_only_npi_licensor_present": 1.0,
2192
+ "blimp_only_npi_scope": 1.0,
2193
+ "blimp_passive_1": 1.0,
2194
+ "blimp_passive_2": 1.0,
2195
+ "blimp_principle_A_c_command": 1.0,
2196
+ "blimp_principle_A_case_1": 1.0,
2197
+ "blimp_principle_A_case_2": 1.0,
2198
+ "blimp_principle_A_domain_1": 1.0,
2199
+ "blimp_principle_A_domain_2": 1.0,
2200
+ "blimp_principle_A_domain_3": 1.0,
2201
+ "blimp_principle_A_reconstruction": 1.0,
2202
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2203
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2204
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2205
+ "blimp_sentential_negation_npi_scope": 1.0,
2206
+ "blimp_sentential_subject_island": 1.0,
2207
+ "blimp_superlative_quantifiers_1": 1.0,
2208
+ "blimp_superlative_quantifiers_2": 1.0,
2209
+ "blimp_tough_vs_raising_1": 1.0,
2210
+ "blimp_tough_vs_raising_2": 1.0,
2211
+ "blimp_transitive": 1.0,
2212
+ "blimp_wh_island": 1.0,
2213
+ "blimp_wh_questions_object_gap": 1.0,
2214
+ "blimp_wh_questions_subject_gap": 1.0,
2215
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2216
+ "blimp_wh_vs_that_no_gap": 1.0,
2217
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2218
+ "blimp_wh_vs_that_with_gap": 1.0,
2219
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2220
+ },
2221
+ "n-shot": {
2222
+ "blimp_adjunct_island": 0,
2223
+ "blimp_anaphor_gender_agreement": 0,
2224
+ "blimp_anaphor_number_agreement": 0,
2225
+ "blimp_animate_subject_passive": 0,
2226
+ "blimp_animate_subject_trans": 0,
2227
+ "blimp_causative": 0,
2228
+ "blimp_complex_NP_island": 0,
2229
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2230
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2231
+ "blimp_determiner_noun_agreement_1": 0,
2232
+ "blimp_determiner_noun_agreement_2": 0,
2233
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2234
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2235
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2236
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2237
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2238
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2239
+ "blimp_distractor_agreement_relational_noun": 0,
2240
+ "blimp_distractor_agreement_relative_clause": 0,
2241
+ "blimp_drop_argument": 0,
2242
+ "blimp_ellipsis_n_bar_1": 0,
2243
+ "blimp_ellipsis_n_bar_2": 0,
2244
+ "blimp_existential_there_object_raising": 0,
2245
+ "blimp_existential_there_quantifiers_1": 0,
2246
+ "blimp_existential_there_quantifiers_2": 0,
2247
+ "blimp_existential_there_subject_raising": 0,
2248
+ "blimp_expletive_it_object_raising": 0,
2249
+ "blimp_inchoative": 0,
2250
+ "blimp_intransitive": 0,
2251
+ "blimp_irregular_past_participle_adjectives": 0,
2252
+ "blimp_irregular_past_participle_verbs": 0,
2253
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2254
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2255
+ "blimp_left_branch_island_echo_question": 0,
2256
+ "blimp_left_branch_island_simple_question": 0,
2257
+ "blimp_matrix_question_npi_licensor_present": 0,
2258
+ "blimp_npi_present_1": 0,
2259
+ "blimp_npi_present_2": 0,
2260
+ "blimp_only_npi_licensor_present": 0,
2261
+ "blimp_only_npi_scope": 0,
2262
+ "blimp_passive_1": 0,
2263
+ "blimp_passive_2": 0,
2264
+ "blimp_principle_A_c_command": 0,
2265
+ "blimp_principle_A_case_1": 0,
2266
+ "blimp_principle_A_case_2": 0,
2267
+ "blimp_principle_A_domain_1": 0,
2268
+ "blimp_principle_A_domain_2": 0,
2269
+ "blimp_principle_A_domain_3": 0,
2270
+ "blimp_principle_A_reconstruction": 0,
2271
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2272
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2273
+ "blimp_sentential_negation_npi_licensor_present": 0,
2274
+ "blimp_sentential_negation_npi_scope": 0,
2275
+ "blimp_sentential_subject_island": 0,
2276
+ "blimp_superlative_quantifiers_1": 0,
2277
+ "blimp_superlative_quantifiers_2": 0,
2278
+ "blimp_tough_vs_raising_1": 0,
2279
+ "blimp_tough_vs_raising_2": 0,
2280
+ "blimp_transitive": 0,
2281
+ "blimp_wh_island": 0,
2282
+ "blimp_wh_questions_object_gap": 0,
2283
+ "blimp_wh_questions_subject_gap": 0,
2284
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2285
+ "blimp_wh_vs_that_no_gap": 0,
2286
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2287
+ "blimp_wh_vs_that_with_gap": 0,
2288
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2289
+ },
2290
+ "n-samples": {
2291
+ "blimp_wh_vs_that_with_gap_long_distance": {
2292
+ "original": 1000,
2293
+ "effective": 1000
2294
+ },
2295
+ "blimp_wh_vs_that_with_gap": {
2296
+ "original": 1000,
2297
+ "effective": 1000
2298
+ },
2299
+ "blimp_wh_vs_that_no_gap_long_distance": {
2300
+ "original": 1000,
2301
+ "effective": 1000
2302
+ },
2303
+ "blimp_wh_vs_that_no_gap": {
2304
+ "original": 1000,
2305
+ "effective": 1000
2306
+ },
2307
+ "blimp_wh_questions_subject_gap_long_distance": {
2308
+ "original": 1000,
2309
+ "effective": 1000
2310
+ },
2311
+ "blimp_wh_questions_subject_gap": {
2312
+ "original": 1000,
2313
+ "effective": 1000
2314
+ },
2315
+ "blimp_wh_questions_object_gap": {
2316
+ "original": 1000,
2317
+ "effective": 1000
2318
+ },
2319
+ "blimp_wh_island": {
2320
+ "original": 1000,
2321
+ "effective": 1000
2322
+ },
2323
+ "blimp_transitive": {
2324
+ "original": 1000,
2325
+ "effective": 1000
2326
+ },
2327
+ "blimp_tough_vs_raising_2": {
2328
+ "original": 1000,
2329
+ "effective": 1000
2330
+ },
2331
+ "blimp_tough_vs_raising_1": {
2332
+ "original": 1000,
2333
+ "effective": 1000
2334
+ },
2335
+ "blimp_superlative_quantifiers_2": {
2336
+ "original": 1000,
2337
+ "effective": 1000
2338
+ },
2339
+ "blimp_superlative_quantifiers_1": {
2340
+ "original": 1000,
2341
+ "effective": 1000
2342
+ },
2343
+ "blimp_sentential_subject_island": {
2344
+ "original": 1000,
2345
+ "effective": 1000
2346
+ },
2347
+ "blimp_sentential_negation_npi_scope": {
2348
+ "original": 1000,
2349
+ "effective": 1000
2350
+ },
2351
+ "blimp_sentential_negation_npi_licensor_present": {
2352
+ "original": 1000,
2353
+ "effective": 1000
2354
+ },
2355
+ "blimp_regular_plural_subject_verb_agreement_2": {
2356
+ "original": 1000,
2357
+ "effective": 1000
2358
+ },
2359
+ "blimp_regular_plural_subject_verb_agreement_1": {
2360
+ "original": 1000,
2361
+ "effective": 1000
2362
+ },
2363
+ "blimp_principle_A_reconstruction": {
2364
+ "original": 1000,
2365
+ "effective": 1000
2366
+ },
2367
+ "blimp_principle_A_domain_3": {
2368
+ "original": 1000,
2369
+ "effective": 1000
2370
+ },
2371
+ "blimp_principle_A_domain_2": {
2372
+ "original": 1000,
2373
+ "effective": 1000
2374
+ },
2375
+ "blimp_principle_A_domain_1": {
2376
+ "original": 1000,
2377
+ "effective": 1000
2378
+ },
2379
+ "blimp_principle_A_case_2": {
2380
+ "original": 1000,
2381
+ "effective": 1000
2382
+ },
2383
+ "blimp_principle_A_case_1": {
2384
+ "original": 1000,
2385
+ "effective": 1000
2386
+ },
2387
+ "blimp_principle_A_c_command": {
2388
+ "original": 1000,
2389
+ "effective": 1000
2390
+ },
2391
+ "blimp_passive_2": {
2392
+ "original": 1000,
2393
+ "effective": 1000
2394
+ },
2395
+ "blimp_passive_1": {
2396
+ "original": 1000,
2397
+ "effective": 1000
2398
+ },
2399
+ "blimp_only_npi_scope": {
2400
+ "original": 1000,
2401
+ "effective": 1000
2402
+ },
2403
+ "blimp_only_npi_licensor_present": {
2404
+ "original": 1000,
2405
+ "effective": 1000
2406
+ },
2407
+ "blimp_npi_present_2": {
2408
+ "original": 1000,
2409
+ "effective": 1000
2410
+ },
2411
+ "blimp_npi_present_1": {
2412
+ "original": 1000,
2413
+ "effective": 1000
2414
+ },
2415
+ "blimp_matrix_question_npi_licensor_present": {
2416
+ "original": 1000,
2417
+ "effective": 1000
2418
+ },
2419
+ "blimp_left_branch_island_simple_question": {
2420
+ "original": 1000,
2421
+ "effective": 1000
2422
+ },
2423
+ "blimp_left_branch_island_echo_question": {
2424
+ "original": 1000,
2425
+ "effective": 1000
2426
+ },
2427
+ "blimp_irregular_plural_subject_verb_agreement_2": {
2428
+ "original": 1000,
2429
+ "effective": 1000
2430
+ },
2431
+ "blimp_irregular_plural_subject_verb_agreement_1": {
2432
+ "original": 1000,
2433
+ "effective": 1000
2434
+ },
2435
+ "blimp_irregular_past_participle_verbs": {
2436
+ "original": 1000,
2437
+ "effective": 1000
2438
+ },
2439
+ "blimp_irregular_past_participle_adjectives": {
2440
+ "original": 1000,
2441
+ "effective": 1000
2442
+ },
2443
+ "blimp_intransitive": {
2444
+ "original": 1000,
2445
+ "effective": 1000
2446
+ },
2447
+ "blimp_inchoative": {
2448
+ "original": 1000,
2449
+ "effective": 1000
2450
+ },
2451
+ "blimp_expletive_it_object_raising": {
2452
+ "original": 1000,
2453
+ "effective": 1000
2454
+ },
2455
+ "blimp_existential_there_subject_raising": {
2456
+ "original": 1000,
2457
+ "effective": 1000
2458
+ },
2459
+ "blimp_existential_there_quantifiers_2": {
2460
+ "original": 1000,
2461
+ "effective": 1000
2462
+ },
2463
+ "blimp_existential_there_quantifiers_1": {
2464
+ "original": 1000,
2465
+ "effective": 1000
2466
+ },
2467
+ "blimp_existential_there_object_raising": {
2468
+ "original": 1000,
2469
+ "effective": 1000
2470
+ },
2471
+ "blimp_ellipsis_n_bar_2": {
2472
+ "original": 1000,
2473
+ "effective": 1000
2474
+ },
2475
+ "blimp_ellipsis_n_bar_1": {
2476
+ "original": 1000,
2477
+ "effective": 1000
2478
+ },
2479
+ "blimp_drop_argument": {
2480
+ "original": 1000,
2481
+ "effective": 1000
2482
+ },
2483
+ "blimp_distractor_agreement_relative_clause": {
2484
+ "original": 1000,
2485
+ "effective": 1000
2486
+ },
2487
+ "blimp_distractor_agreement_relational_noun": {
2488
+ "original": 1000,
2489
+ "effective": 1000
2490
+ },
2491
+ "blimp_determiner_noun_agreement_with_adjective_1": {
2492
+ "original": 1000,
2493
+ "effective": 1000
2494
+ },
2495
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
2496
+ "original": 1000,
2497
+ "effective": 1000
2498
+ },
2499
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
2500
+ "original": 1000,
2501
+ "effective": 1000
2502
+ },
2503
+ "blimp_determiner_noun_agreement_with_adj_2": {
2504
+ "original": 1000,
2505
+ "effective": 1000
2506
+ },
2507
+ "blimp_determiner_noun_agreement_irregular_2": {
2508
+ "original": 1000,
2509
+ "effective": 1000
2510
+ },
2511
+ "blimp_determiner_noun_agreement_irregular_1": {
2512
+ "original": 1000,
2513
+ "effective": 1000
2514
+ },
2515
+ "blimp_determiner_noun_agreement_2": {
2516
+ "original": 1000,
2517
+ "effective": 1000
2518
+ },
2519
+ "blimp_determiner_noun_agreement_1": {
2520
+ "original": 1000,
2521
+ "effective": 1000
2522
+ },
2523
+ "blimp_coordinate_structure_constraint_object_extraction": {
2524
+ "original": 1000,
2525
+ "effective": 1000
2526
+ },
2527
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
2528
+ "original": 1000,
2529
+ "effective": 1000
2530
+ },
2531
+ "blimp_complex_NP_island": {
2532
+ "original": 1000,
2533
+ "effective": 1000
2534
+ },
2535
+ "blimp_causative": {
2536
+ "original": 1000,
2537
+ "effective": 1000
2538
+ },
2539
+ "blimp_animate_subject_trans": {
2540
+ "original": 1000,
2541
+ "effective": 1000
2542
+ },
2543
+ "blimp_animate_subject_passive": {
2544
+ "original": 1000,
2545
+ "effective": 1000
2546
+ },
2547
+ "blimp_anaphor_number_agreement": {
2548
+ "original": 1000,
2549
+ "effective": 1000
2550
+ },
2551
+ "blimp_anaphor_gender_agreement": {
2552
+ "original": 1000,
2553
+ "effective": 1000
2554
+ },
2555
+ "blimp_adjunct_island": {
2556
+ "original": 1000,
2557
+ "effective": 1000
2558
+ }
2559
+ },
2560
+ "config": {
2561
+ "model": "hf",
2562
+ "model_args": "pretrained=EleutherAI/pythia-14m-seed1,revision=step44000",
2563
+ "model_num_parameters": 14067712,
2564
+ "model_dtype": "torch.float16",
2565
+ "model_revision": "step44000",
2566
+ "model_sha": "84f55295500644f84cf599c6c755a88927fe6371",
2567
+ "batch_size": "1024",
2568
+ "batch_sizes": [],
2569
+ "device": "cuda",
2570
+ "use_cache": null,
2571
+ "limit": null,
2572
+ "bootstrap_iters": 100000,
2573
+ "gen_kwargs": null,
2574
+ "random_seed": 0,
2575
+ "numpy_seed": 1234,
2576
+ "torch_seed": 1234,
2577
+ "fewshot_seed": 1234
2578
+ },
2579
+ "git_hash": "51a7ca9",
2580
+ "date": 1724071513.811942,
2581
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: NVIDIA GeForce RTX 2080 Ti\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 32\nOn-line CPU(s) list: 0-31\nThread(s) per core: 1\nCore(s) per socket: 32\nSocket(s): 1\nNUMA node(s): 2\nVendor ID: AuthenticAMD\nCPU family: 23\nModel: 49\nModel name: AMD EPYC 7502P 32-Core Processor\nStepping: 0\nCPU MHz: 2500.000\nCPU max MHz: 2500.0000\nCPU min MHz: 1500.0000\nBogoMIPS: 5000.08\nVirtualization: AMD-V\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 512K\nL3 cache: 16384K\nNUMA node0 CPU(s): 0-15\nNUMA node1 CPU(s): 16-31\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc art rep_good nopl nonstop_tsc extd_apicid aperfmperf eagerfpu pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_l2 cpb cat_l3 cdp_l3 hw_pstate sme ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif umip overflow_recov succor smca\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
2582
+ "transformers_version": "4.40.2",
2583
+ "upper_git_hash": null,
2584
+ "task_hashes": {},
2585
+ "model_source": "hf",
2586
+ "model_name": "EleutherAI/pythia-14m-seed1",
2587
+ "model_name_sanitized": "EleutherAI__pythia-14m-seed1",
2588
+ "start_time": 1526222.405809366,
2589
+ "end_time": 1526562.000683229,
2590
+ "total_evaluation_time_seconds": "339.59487386303954"
2591
+ }
pythia-14m-seed1/step45000/EleutherAI__pythia-14m-seed1/results_2024-08-19T05-56-14.763008.json ADDED
@@ -0,0 +1,2591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp_wh_vs_that_with_gap_long_distance": {
4
+ "acc,none": 0.117,
5
+ "acc_stderr,none": 0.010169287802713345,
6
+ "alias": "blimp_wh_vs_that_with_gap_long_distance"
7
+ },
8
+ "blimp_wh_vs_that_with_gap": {
9
+ "acc,none": 0.239,
10
+ "acc_stderr,none": 0.013493000446937704,
11
+ "alias": "blimp_wh_vs_that_with_gap"
12
+ },
13
+ "blimp_wh_vs_that_no_gap_long_distance": {
14
+ "acc,none": 0.958,
15
+ "acc_stderr,none": 0.006346359293033784,
16
+ "alias": "blimp_wh_vs_that_no_gap_long_distance"
17
+ },
18
+ "blimp_wh_vs_that_no_gap": {
19
+ "acc,none": 0.917,
20
+ "acc_stderr,none": 0.008728527206074756,
21
+ "alias": "blimp_wh_vs_that_no_gap"
22
+ },
23
+ "blimp_wh_questions_subject_gap_long_distance": {
24
+ "acc,none": 0.92,
25
+ "acc_stderr,none": 0.00858333697775368,
26
+ "alias": "blimp_wh_questions_subject_gap_long_distance"
27
+ },
28
+ "blimp_wh_questions_subject_gap": {
29
+ "acc,none": 0.87,
30
+ "acc_stderr,none": 0.010640169792499236,
31
+ "alias": "blimp_wh_questions_subject_gap"
32
+ },
33
+ "blimp_wh_questions_object_gap": {
34
+ "acc,none": 0.418,
35
+ "acc_stderr,none": 0.015605111967541904,
36
+ "alias": "blimp_wh_questions_object_gap"
37
+ },
38
+ "blimp_wh_island": {
39
+ "acc,none": 0.638,
40
+ "acc_stderr,none": 0.015204840912919383,
41
+ "alias": "blimp_wh_island"
42
+ },
43
+ "blimp_transitive": {
44
+ "acc,none": 0.825,
45
+ "acc_stderr,none": 0.012021627157731998,
46
+ "alias": "blimp_transitive"
47
+ },
48
+ "blimp_tough_vs_raising_2": {
49
+ "acc,none": 0.734,
50
+ "acc_stderr,none": 0.013979965645145179,
51
+ "alias": "blimp_tough_vs_raising_2"
52
+ },
53
+ "blimp_tough_vs_raising_1": {
54
+ "acc,none": 0.406,
55
+ "acc_stderr,none": 0.01553722643863458,
56
+ "alias": "blimp_tough_vs_raising_1"
57
+ },
58
+ "blimp_superlative_quantifiers_2": {
59
+ "acc,none": 0.231,
60
+ "acc_stderr,none": 0.013334797216936478,
61
+ "alias": "blimp_superlative_quantifiers_2"
62
+ },
63
+ "blimp_superlative_quantifiers_1": {
64
+ "acc,none": 0.151,
65
+ "acc_stderr,none": 0.011328165223341657,
66
+ "alias": "blimp_superlative_quantifiers_1"
67
+ },
68
+ "blimp_sentential_subject_island": {
69
+ "acc,none": 0.379,
70
+ "acc_stderr,none": 0.015349091002225332,
71
+ "alias": "blimp_sentential_subject_island"
72
+ },
73
+ "blimp_sentential_negation_npi_scope": {
74
+ "acc,none": 0.519,
75
+ "acc_stderr,none": 0.015807874268505835,
76
+ "alias": "blimp_sentential_negation_npi_scope"
77
+ },
78
+ "blimp_sentential_negation_npi_licensor_present": {
79
+ "acc,none": 0.984,
80
+ "acc_stderr,none": 0.003969856390319398,
81
+ "alias": "blimp_sentential_negation_npi_licensor_present"
82
+ },
83
+ "blimp_regular_plural_subject_verb_agreement_2": {
84
+ "acc,none": 0.819,
85
+ "acc_stderr,none": 0.012181436179178027,
86
+ "alias": "blimp_regular_plural_subject_verb_agreement_2"
87
+ },
88
+ "blimp_regular_plural_subject_verb_agreement_1": {
89
+ "acc,none": 0.857,
90
+ "acc_stderr,none": 0.011075814808567074,
91
+ "alias": "blimp_regular_plural_subject_verb_agreement_1"
92
+ },
93
+ "blimp_principle_A_reconstruction": {
94
+ "acc,none": 0.442,
95
+ "acc_stderr,none": 0.015712507211864152,
96
+ "alias": "blimp_principle_A_reconstruction"
97
+ },
98
+ "blimp_principle_A_domain_3": {
99
+ "acc,none": 0.617,
100
+ "acc_stderr,none": 0.0153801023256527,
101
+ "alias": "blimp_principle_A_domain_3"
102
+ },
103
+ "blimp_principle_A_domain_2": {
104
+ "acc,none": 0.622,
105
+ "acc_stderr,none": 0.015341165254026547,
106
+ "alias": "blimp_principle_A_domain_2"
107
+ },
108
+ "blimp_principle_A_domain_1": {
109
+ "acc,none": 0.889,
110
+ "acc_stderr,none": 0.009938701010583716,
111
+ "alias": "blimp_principle_A_domain_1"
112
+ },
113
+ "blimp_principle_A_case_2": {
114
+ "acc,none": 0.84,
115
+ "acc_stderr,none": 0.011598902298689068,
116
+ "alias": "blimp_principle_A_case_2"
117
+ },
118
+ "blimp_principle_A_case_1": {
119
+ "acc,none": 1.0,
120
+ "acc_stderr,none": 0.0,
121
+ "alias": "blimp_principle_A_case_1"
122
+ },
123
+ "blimp_principle_A_c_command": {
124
+ "acc,none": 0.554,
125
+ "acc_stderr,none": 0.01572677116675039,
126
+ "alias": "blimp_principle_A_c_command"
127
+ },
128
+ "blimp_passive_2": {
129
+ "acc,none": 0.882,
130
+ "acc_stderr,none": 0.010206869264381718,
131
+ "alias": "blimp_passive_2"
132
+ },
133
+ "blimp_passive_1": {
134
+ "acc,none": 0.9,
135
+ "acc_stderr,none": 0.00949157995752499,
136
+ "alias": "blimp_passive_1"
137
+ },
138
+ "blimp_only_npi_scope": {
139
+ "acc,none": 0.84,
140
+ "acc_stderr,none": 0.011598902298689068,
141
+ "alias": "blimp_only_npi_scope"
142
+ },
143
+ "blimp_only_npi_licensor_present": {
144
+ "acc,none": 0.915,
145
+ "acc_stderr,none": 0.008823426366942316,
146
+ "alias": "blimp_only_npi_licensor_present"
147
+ },
148
+ "blimp_npi_present_2": {
149
+ "acc,none": 0.395,
150
+ "acc_stderr,none": 0.015466551464829328,
151
+ "alias": "blimp_npi_present_2"
152
+ },
153
+ "blimp_npi_present_1": {
154
+ "acc,none": 0.322,
155
+ "acc_stderr,none": 0.014782913600996744,
156
+ "alias": "blimp_npi_present_1"
157
+ },
158
+ "blimp_matrix_question_npi_licensor_present": {
159
+ "acc,none": 0.163,
160
+ "acc_stderr,none": 0.011686212712746913,
161
+ "alias": "blimp_matrix_question_npi_licensor_present"
162
+ },
163
+ "blimp_left_branch_island_simple_question": {
164
+ "acc,none": 0.352,
165
+ "acc_stderr,none": 0.015110404505648562,
166
+ "alias": "blimp_left_branch_island_simple_question"
167
+ },
168
+ "blimp_left_branch_island_echo_question": {
169
+ "acc,none": 0.401,
170
+ "acc_stderr,none": 0.015506109745498368,
171
+ "alias": "blimp_left_branch_island_echo_question"
172
+ },
173
+ "blimp_irregular_plural_subject_verb_agreement_2": {
174
+ "acc,none": 0.849,
175
+ "acc_stderr,none": 0.011328165223341657,
176
+ "alias": "blimp_irregular_plural_subject_verb_agreement_2"
177
+ },
178
+ "blimp_irregular_plural_subject_verb_agreement_1": {
179
+ "acc,none": 0.768,
180
+ "acc_stderr,none": 0.013354937452281623,
181
+ "alias": "blimp_irregular_plural_subject_verb_agreement_1"
182
+ },
183
+ "blimp_irregular_past_participle_verbs": {
184
+ "acc,none": 0.886,
185
+ "acc_stderr,none": 0.010055103435823278,
186
+ "alias": "blimp_irregular_past_participle_verbs"
187
+ },
188
+ "blimp_irregular_past_participle_adjectives": {
189
+ "acc,none": 0.992,
190
+ "acc_stderr,none": 0.002818500300504498,
191
+ "alias": "blimp_irregular_past_participle_adjectives"
192
+ },
193
+ "blimp_intransitive": {
194
+ "acc,none": 0.649,
195
+ "acc_stderr,none": 0.015100563798316508,
196
+ "alias": "blimp_intransitive"
197
+ },
198
+ "blimp_inchoative": {
199
+ "acc,none": 0.534,
200
+ "acc_stderr,none": 0.01578268332993769,
201
+ "alias": "blimp_inchoative"
202
+ },
203
+ "blimp_expletive_it_object_raising": {
204
+ "acc,none": 0.732,
205
+ "acc_stderr,none": 0.014013292702729519,
206
+ "alias": "blimp_expletive_it_object_raising"
207
+ },
208
+ "blimp_existential_there_subject_raising": {
209
+ "acc,none": 0.751,
210
+ "acc_stderr,none": 0.013681600278702275,
211
+ "alias": "blimp_existential_there_subject_raising"
212
+ },
213
+ "blimp_existential_there_quantifiers_2": {
214
+ "acc,none": 0.313,
215
+ "acc_stderr,none": 0.014671272822977775,
216
+ "alias": "blimp_existential_there_quantifiers_2"
217
+ },
218
+ "blimp_existential_there_quantifiers_1": {
219
+ "acc,none": 0.955,
220
+ "acc_stderr,none": 0.006558812241406063,
221
+ "alias": "blimp_existential_there_quantifiers_1"
222
+ },
223
+ "blimp_existential_there_object_raising": {
224
+ "acc,none": 0.804,
225
+ "acc_stderr,none": 0.012559527926707347,
226
+ "alias": "blimp_existential_there_object_raising"
227
+ },
228
+ "blimp_ellipsis_n_bar_2": {
229
+ "acc,none": 0.765,
230
+ "acc_stderr,none": 0.013414729030247124,
231
+ "alias": "blimp_ellipsis_n_bar_2"
232
+ },
233
+ "blimp_ellipsis_n_bar_1": {
234
+ "acc,none": 0.543,
235
+ "acc_stderr,none": 0.0157606915901365,
236
+ "alias": "blimp_ellipsis_n_bar_1"
237
+ },
238
+ "blimp_drop_argument": {
239
+ "acc,none": 0.763,
240
+ "acc_stderr,none": 0.013454070462577935,
241
+ "alias": "blimp_drop_argument"
242
+ },
243
+ "blimp_distractor_agreement_relative_clause": {
244
+ "acc,none": 0.26,
245
+ "acc_stderr,none": 0.013877773329774218,
246
+ "alias": "blimp_distractor_agreement_relative_clause"
247
+ },
248
+ "blimp_distractor_agreement_relational_noun": {
249
+ "acc,none": 0.343,
250
+ "acc_stderr,none": 0.015019206922357005,
251
+ "alias": "blimp_distractor_agreement_relational_noun"
252
+ },
253
+ "blimp_determiner_noun_agreement_with_adjective_1": {
254
+ "acc,none": 0.865,
255
+ "acc_stderr,none": 0.010811655372416006,
256
+ "alias": "blimp_determiner_noun_agreement_with_adjective_1"
257
+ },
258
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
259
+ "acc,none": 0.818,
260
+ "acc_stderr,none": 0.0122075806376622,
261
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_2"
262
+ },
263
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
264
+ "acc,none": 0.73,
265
+ "acc_stderr,none": 0.01404625563263382,
266
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_1"
267
+ },
268
+ "blimp_determiner_noun_agreement_with_adj_2": {
269
+ "acc,none": 0.838,
270
+ "acc_stderr,none": 0.011657267771304384,
271
+ "alias": "blimp_determiner_noun_agreement_with_adj_2"
272
+ },
273
+ "blimp_determiner_noun_agreement_irregular_2": {
274
+ "acc,none": 0.852,
275
+ "acc_stderr,none": 0.011234866364235145,
276
+ "alias": "blimp_determiner_noun_agreement_irregular_2"
277
+ },
278
+ "blimp_determiner_noun_agreement_irregular_1": {
279
+ "acc,none": 0.783,
280
+ "acc_stderr,none": 0.013041513757270706,
281
+ "alias": "blimp_determiner_noun_agreement_irregular_1"
282
+ },
283
+ "blimp_determiner_noun_agreement_2": {
284
+ "acc,none": 0.932,
285
+ "acc_stderr,none": 0.007964887911291624,
286
+ "alias": "blimp_determiner_noun_agreement_2"
287
+ },
288
+ "blimp_determiner_noun_agreement_1": {
289
+ "acc,none": 0.913,
290
+ "acc_stderr,none": 0.008916866630745944,
291
+ "alias": "blimp_determiner_noun_agreement_1"
292
+ },
293
+ "blimp_coordinate_structure_constraint_object_extraction": {
294
+ "acc,none": 0.503,
295
+ "acc_stderr,none": 0.01581901517924682,
296
+ "alias": "blimp_coordinate_structure_constraint_object_extraction"
297
+ },
298
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
299
+ "acc,none": 0.385,
300
+ "acc_stderr,none": 0.015395194445410945,
301
+ "alias": "blimp_coordinate_structure_constraint_complex_left_branch"
302
+ },
303
+ "blimp_complex_NP_island": {
304
+ "acc,none": 0.392,
305
+ "acc_stderr,none": 0.015445859463771338,
306
+ "alias": "blimp_complex_NP_island"
307
+ },
308
+ "blimp_causative": {
309
+ "acc,none": 0.629,
310
+ "acc_stderr,none": 0.015283736211823096,
311
+ "alias": "blimp_causative"
312
+ },
313
+ "blimp_animate_subject_trans": {
314
+ "acc,none": 0.856,
315
+ "acc_stderr,none": 0.01110798754893916,
316
+ "alias": "blimp_animate_subject_trans"
317
+ },
318
+ "blimp_animate_subject_passive": {
319
+ "acc,none": 0.727,
320
+ "acc_stderr,none": 0.014095022868717513,
321
+ "alias": "blimp_animate_subject_passive"
322
+ },
323
+ "blimp_anaphor_number_agreement": {
324
+ "acc,none": 0.941,
325
+ "acc_stderr,none": 0.007454835650406693,
326
+ "alias": "blimp_anaphor_number_agreement"
327
+ },
328
+ "blimp_anaphor_gender_agreement": {
329
+ "acc,none": 0.783,
330
+ "acc_stderr,none": 0.013041513757270706,
331
+ "alias": "blimp_anaphor_gender_agreement"
332
+ },
333
+ "blimp_adjunct_island": {
334
+ "acc,none": 0.735,
335
+ "acc_stderr,none": 0.013963164754810064,
336
+ "alias": "blimp_adjunct_island"
337
+ }
338
+ },
339
+ "group_subtasks": {
340
+ "blimp_adjunct_island": [],
341
+ "blimp_anaphor_gender_agreement": [],
342
+ "blimp_anaphor_number_agreement": [],
343
+ "blimp_animate_subject_passive": [],
344
+ "blimp_animate_subject_trans": [],
345
+ "blimp_causative": [],
346
+ "blimp_complex_NP_island": [],
347
+ "blimp_coordinate_structure_constraint_complex_left_branch": [],
348
+ "blimp_coordinate_structure_constraint_object_extraction": [],
349
+ "blimp_determiner_noun_agreement_1": [],
350
+ "blimp_determiner_noun_agreement_2": [],
351
+ "blimp_determiner_noun_agreement_irregular_1": [],
352
+ "blimp_determiner_noun_agreement_irregular_2": [],
353
+ "blimp_determiner_noun_agreement_with_adj_2": [],
354
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": [],
355
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": [],
356
+ "blimp_determiner_noun_agreement_with_adjective_1": [],
357
+ "blimp_distractor_agreement_relational_noun": [],
358
+ "blimp_distractor_agreement_relative_clause": [],
359
+ "blimp_drop_argument": [],
360
+ "blimp_ellipsis_n_bar_1": [],
361
+ "blimp_ellipsis_n_bar_2": [],
362
+ "blimp_existential_there_object_raising": [],
363
+ "blimp_existential_there_quantifiers_1": [],
364
+ "blimp_existential_there_quantifiers_2": [],
365
+ "blimp_existential_there_subject_raising": [],
366
+ "blimp_expletive_it_object_raising": [],
367
+ "blimp_inchoative": [],
368
+ "blimp_intransitive": [],
369
+ "blimp_irregular_past_participle_adjectives": [],
370
+ "blimp_irregular_past_participle_verbs": [],
371
+ "blimp_irregular_plural_subject_verb_agreement_1": [],
372
+ "blimp_irregular_plural_subject_verb_agreement_2": [],
373
+ "blimp_left_branch_island_echo_question": [],
374
+ "blimp_left_branch_island_simple_question": [],
375
+ "blimp_matrix_question_npi_licensor_present": [],
376
+ "blimp_npi_present_1": [],
377
+ "blimp_npi_present_2": [],
378
+ "blimp_only_npi_licensor_present": [],
379
+ "blimp_only_npi_scope": [],
380
+ "blimp_passive_1": [],
381
+ "blimp_passive_2": [],
382
+ "blimp_principle_A_c_command": [],
383
+ "blimp_principle_A_case_1": [],
384
+ "blimp_principle_A_case_2": [],
385
+ "blimp_principle_A_domain_1": [],
386
+ "blimp_principle_A_domain_2": [],
387
+ "blimp_principle_A_domain_3": [],
388
+ "blimp_principle_A_reconstruction": [],
389
+ "blimp_regular_plural_subject_verb_agreement_1": [],
390
+ "blimp_regular_plural_subject_verb_agreement_2": [],
391
+ "blimp_sentential_negation_npi_licensor_present": [],
392
+ "blimp_sentential_negation_npi_scope": [],
393
+ "blimp_sentential_subject_island": [],
394
+ "blimp_superlative_quantifiers_1": [],
395
+ "blimp_superlative_quantifiers_2": [],
396
+ "blimp_tough_vs_raising_1": [],
397
+ "blimp_tough_vs_raising_2": [],
398
+ "blimp_transitive": [],
399
+ "blimp_wh_island": [],
400
+ "blimp_wh_questions_object_gap": [],
401
+ "blimp_wh_questions_subject_gap": [],
402
+ "blimp_wh_questions_subject_gap_long_distance": [],
403
+ "blimp_wh_vs_that_no_gap": [],
404
+ "blimp_wh_vs_that_no_gap_long_distance": [],
405
+ "blimp_wh_vs_that_with_gap": [],
406
+ "blimp_wh_vs_that_with_gap_long_distance": []
407
+ },
408
+ "configs": {
409
+ "blimp_adjunct_island": {
410
+ "task": "blimp_adjunct_island",
411
+ "group": "blimp",
412
+ "dataset_path": "blimp",
413
+ "dataset_name": "adjunct_island",
414
+ "validation_split": "train",
415
+ "doc_to_text": "",
416
+ "doc_to_target": 0,
417
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
418
+ "description": "",
419
+ "target_delimiter": " ",
420
+ "fewshot_delimiter": "\n\n",
421
+ "num_fewshot": 0,
422
+ "metric_list": [
423
+ {
424
+ "metric": "acc"
425
+ }
426
+ ],
427
+ "output_type": "multiple_choice",
428
+ "repeats": 1,
429
+ "should_decontaminate": true,
430
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
431
+ "metadata": {
432
+ "version": 1.0
433
+ }
434
+ },
435
+ "blimp_anaphor_gender_agreement": {
436
+ "task": "blimp_anaphor_gender_agreement",
437
+ "group": "blimp",
438
+ "dataset_path": "blimp",
439
+ "dataset_name": "anaphor_gender_agreement",
440
+ "validation_split": "train",
441
+ "doc_to_text": "",
442
+ "doc_to_target": 0,
443
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
444
+ "description": "",
445
+ "target_delimiter": " ",
446
+ "fewshot_delimiter": "\n\n",
447
+ "num_fewshot": 0,
448
+ "metric_list": [
449
+ {
450
+ "metric": "acc"
451
+ }
452
+ ],
453
+ "output_type": "multiple_choice",
454
+ "repeats": 1,
455
+ "should_decontaminate": true,
456
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
457
+ "metadata": {
458
+ "version": 1.0
459
+ }
460
+ },
461
+ "blimp_anaphor_number_agreement": {
462
+ "task": "blimp_anaphor_number_agreement",
463
+ "group": "blimp",
464
+ "dataset_path": "blimp",
465
+ "dataset_name": "anaphor_number_agreement",
466
+ "validation_split": "train",
467
+ "doc_to_text": "",
468
+ "doc_to_target": 0,
469
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
470
+ "description": "",
471
+ "target_delimiter": " ",
472
+ "fewshot_delimiter": "\n\n",
473
+ "num_fewshot": 0,
474
+ "metric_list": [
475
+ {
476
+ "metric": "acc"
477
+ }
478
+ ],
479
+ "output_type": "multiple_choice",
480
+ "repeats": 1,
481
+ "should_decontaminate": true,
482
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
483
+ "metadata": {
484
+ "version": 1.0
485
+ }
486
+ },
487
+ "blimp_animate_subject_passive": {
488
+ "task": "blimp_animate_subject_passive",
489
+ "group": "blimp",
490
+ "dataset_path": "blimp",
491
+ "dataset_name": "animate_subject_passive",
492
+ "validation_split": "train",
493
+ "doc_to_text": "",
494
+ "doc_to_target": 0,
495
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
496
+ "description": "",
497
+ "target_delimiter": " ",
498
+ "fewshot_delimiter": "\n\n",
499
+ "num_fewshot": 0,
500
+ "metric_list": [
501
+ {
502
+ "metric": "acc"
503
+ }
504
+ ],
505
+ "output_type": "multiple_choice",
506
+ "repeats": 1,
507
+ "should_decontaminate": true,
508
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
509
+ "metadata": {
510
+ "version": 1.0
511
+ }
512
+ },
513
+ "blimp_animate_subject_trans": {
514
+ "task": "blimp_animate_subject_trans",
515
+ "group": "blimp",
516
+ "dataset_path": "blimp",
517
+ "dataset_name": "animate_subject_trans",
518
+ "validation_split": "train",
519
+ "doc_to_text": "",
520
+ "doc_to_target": 0,
521
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
522
+ "description": "",
523
+ "target_delimiter": " ",
524
+ "fewshot_delimiter": "\n\n",
525
+ "num_fewshot": 0,
526
+ "metric_list": [
527
+ {
528
+ "metric": "acc"
529
+ }
530
+ ],
531
+ "output_type": "multiple_choice",
532
+ "repeats": 1,
533
+ "should_decontaminate": true,
534
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
535
+ "metadata": {
536
+ "version": 1.0
537
+ }
538
+ },
539
+ "blimp_causative": {
540
+ "task": "blimp_causative",
541
+ "group": "blimp",
542
+ "dataset_path": "blimp",
543
+ "dataset_name": "causative",
544
+ "validation_split": "train",
545
+ "doc_to_text": "",
546
+ "doc_to_target": 0,
547
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
548
+ "description": "",
549
+ "target_delimiter": " ",
550
+ "fewshot_delimiter": "\n\n",
551
+ "num_fewshot": 0,
552
+ "metric_list": [
553
+ {
554
+ "metric": "acc"
555
+ }
556
+ ],
557
+ "output_type": "multiple_choice",
558
+ "repeats": 1,
559
+ "should_decontaminate": true,
560
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
561
+ "metadata": {
562
+ "version": 1.0
563
+ }
564
+ },
565
+ "blimp_complex_NP_island": {
566
+ "task": "blimp_complex_NP_island",
567
+ "group": "blimp",
568
+ "dataset_path": "blimp",
569
+ "dataset_name": "complex_NP_island",
570
+ "validation_split": "train",
571
+ "doc_to_text": "",
572
+ "doc_to_target": 0,
573
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
574
+ "description": "",
575
+ "target_delimiter": " ",
576
+ "fewshot_delimiter": "\n\n",
577
+ "num_fewshot": 0,
578
+ "metric_list": [
579
+ {
580
+ "metric": "acc"
581
+ }
582
+ ],
583
+ "output_type": "multiple_choice",
584
+ "repeats": 1,
585
+ "should_decontaminate": true,
586
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
587
+ "metadata": {
588
+ "version": 1.0
589
+ }
590
+ },
591
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
592
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
593
+ "group": "blimp",
594
+ "dataset_path": "blimp",
595
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
596
+ "validation_split": "train",
597
+ "doc_to_text": "",
598
+ "doc_to_target": 0,
599
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
600
+ "description": "",
601
+ "target_delimiter": " ",
602
+ "fewshot_delimiter": "\n\n",
603
+ "num_fewshot": 0,
604
+ "metric_list": [
605
+ {
606
+ "metric": "acc"
607
+ }
608
+ ],
609
+ "output_type": "multiple_choice",
610
+ "repeats": 1,
611
+ "should_decontaminate": true,
612
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
613
+ "metadata": {
614
+ "version": 1.0
615
+ }
616
+ },
617
+ "blimp_coordinate_structure_constraint_object_extraction": {
618
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
619
+ "group": "blimp",
620
+ "dataset_path": "blimp",
621
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
622
+ "validation_split": "train",
623
+ "doc_to_text": "",
624
+ "doc_to_target": 0,
625
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
626
+ "description": "",
627
+ "target_delimiter": " ",
628
+ "fewshot_delimiter": "\n\n",
629
+ "num_fewshot": 0,
630
+ "metric_list": [
631
+ {
632
+ "metric": "acc"
633
+ }
634
+ ],
635
+ "output_type": "multiple_choice",
636
+ "repeats": 1,
637
+ "should_decontaminate": true,
638
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
639
+ "metadata": {
640
+ "version": 1.0
641
+ }
642
+ },
643
+ "blimp_determiner_noun_agreement_1": {
644
+ "task": "blimp_determiner_noun_agreement_1",
645
+ "group": "blimp",
646
+ "dataset_path": "blimp",
647
+ "dataset_name": "determiner_noun_agreement_1",
648
+ "validation_split": "train",
649
+ "doc_to_text": "",
650
+ "doc_to_target": 0,
651
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
652
+ "description": "",
653
+ "target_delimiter": " ",
654
+ "fewshot_delimiter": "\n\n",
655
+ "num_fewshot": 0,
656
+ "metric_list": [
657
+ {
658
+ "metric": "acc"
659
+ }
660
+ ],
661
+ "output_type": "multiple_choice",
662
+ "repeats": 1,
663
+ "should_decontaminate": true,
664
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
665
+ "metadata": {
666
+ "version": 1.0
667
+ }
668
+ },
669
+ "blimp_determiner_noun_agreement_2": {
670
+ "task": "blimp_determiner_noun_agreement_2",
671
+ "group": "blimp",
672
+ "dataset_path": "blimp",
673
+ "dataset_name": "determiner_noun_agreement_2",
674
+ "validation_split": "train",
675
+ "doc_to_text": "",
676
+ "doc_to_target": 0,
677
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
678
+ "description": "",
679
+ "target_delimiter": " ",
680
+ "fewshot_delimiter": "\n\n",
681
+ "num_fewshot": 0,
682
+ "metric_list": [
683
+ {
684
+ "metric": "acc"
685
+ }
686
+ ],
687
+ "output_type": "multiple_choice",
688
+ "repeats": 1,
689
+ "should_decontaminate": true,
690
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
691
+ "metadata": {
692
+ "version": 1.0
693
+ }
694
+ },
695
+ "blimp_determiner_noun_agreement_irregular_1": {
696
+ "task": "blimp_determiner_noun_agreement_irregular_1",
697
+ "group": "blimp",
698
+ "dataset_path": "blimp",
699
+ "dataset_name": "determiner_noun_agreement_irregular_1",
700
+ "validation_split": "train",
701
+ "doc_to_text": "",
702
+ "doc_to_target": 0,
703
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
704
+ "description": "",
705
+ "target_delimiter": " ",
706
+ "fewshot_delimiter": "\n\n",
707
+ "num_fewshot": 0,
708
+ "metric_list": [
709
+ {
710
+ "metric": "acc"
711
+ }
712
+ ],
713
+ "output_type": "multiple_choice",
714
+ "repeats": 1,
715
+ "should_decontaminate": true,
716
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
717
+ "metadata": {
718
+ "version": 1.0
719
+ }
720
+ },
721
+ "blimp_determiner_noun_agreement_irregular_2": {
722
+ "task": "blimp_determiner_noun_agreement_irregular_2",
723
+ "group": "blimp",
724
+ "dataset_path": "blimp",
725
+ "dataset_name": "determiner_noun_agreement_irregular_2",
726
+ "validation_split": "train",
727
+ "doc_to_text": "",
728
+ "doc_to_target": 0,
729
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
730
+ "description": "",
731
+ "target_delimiter": " ",
732
+ "fewshot_delimiter": "\n\n",
733
+ "num_fewshot": 0,
734
+ "metric_list": [
735
+ {
736
+ "metric": "acc"
737
+ }
738
+ ],
739
+ "output_type": "multiple_choice",
740
+ "repeats": 1,
741
+ "should_decontaminate": true,
742
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
743
+ "metadata": {
744
+ "version": 1.0
745
+ }
746
+ },
747
+ "blimp_determiner_noun_agreement_with_adj_2": {
748
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
749
+ "group": "blimp",
750
+ "dataset_path": "blimp",
751
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
752
+ "validation_split": "train",
753
+ "doc_to_text": "",
754
+ "doc_to_target": 0,
755
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
756
+ "description": "",
757
+ "target_delimiter": " ",
758
+ "fewshot_delimiter": "\n\n",
759
+ "num_fewshot": 0,
760
+ "metric_list": [
761
+ {
762
+ "metric": "acc"
763
+ }
764
+ ],
765
+ "output_type": "multiple_choice",
766
+ "repeats": 1,
767
+ "should_decontaminate": true,
768
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
769
+ "metadata": {
770
+ "version": 1.0
771
+ }
772
+ },
773
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
774
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
775
+ "group": "blimp",
776
+ "dataset_path": "blimp",
777
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
778
+ "validation_split": "train",
779
+ "doc_to_text": "",
780
+ "doc_to_target": 0,
781
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
782
+ "description": "",
783
+ "target_delimiter": " ",
784
+ "fewshot_delimiter": "\n\n",
785
+ "num_fewshot": 0,
786
+ "metric_list": [
787
+ {
788
+ "metric": "acc"
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
795
+ "metadata": {
796
+ "version": 1.0
797
+ }
798
+ },
799
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
800
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
801
+ "group": "blimp",
802
+ "dataset_path": "blimp",
803
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
804
+ "validation_split": "train",
805
+ "doc_to_text": "",
806
+ "doc_to_target": 0,
807
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
808
+ "description": "",
809
+ "target_delimiter": " ",
810
+ "fewshot_delimiter": "\n\n",
811
+ "num_fewshot": 0,
812
+ "metric_list": [
813
+ {
814
+ "metric": "acc"
815
+ }
816
+ ],
817
+ "output_type": "multiple_choice",
818
+ "repeats": 1,
819
+ "should_decontaminate": true,
820
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
821
+ "metadata": {
822
+ "version": 1.0
823
+ }
824
+ },
825
+ "blimp_determiner_noun_agreement_with_adjective_1": {
826
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
827
+ "group": "blimp",
828
+ "dataset_path": "blimp",
829
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
830
+ "validation_split": "train",
831
+ "doc_to_text": "",
832
+ "doc_to_target": 0,
833
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
834
+ "description": "",
835
+ "target_delimiter": " ",
836
+ "fewshot_delimiter": "\n\n",
837
+ "num_fewshot": 0,
838
+ "metric_list": [
839
+ {
840
+ "metric": "acc"
841
+ }
842
+ ],
843
+ "output_type": "multiple_choice",
844
+ "repeats": 1,
845
+ "should_decontaminate": true,
846
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
847
+ "metadata": {
848
+ "version": 1.0
849
+ }
850
+ },
851
+ "blimp_distractor_agreement_relational_noun": {
852
+ "task": "blimp_distractor_agreement_relational_noun",
853
+ "group": "blimp",
854
+ "dataset_path": "blimp",
855
+ "dataset_name": "distractor_agreement_relational_noun",
856
+ "validation_split": "train",
857
+ "doc_to_text": "",
858
+ "doc_to_target": 0,
859
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
860
+ "description": "",
861
+ "target_delimiter": " ",
862
+ "fewshot_delimiter": "\n\n",
863
+ "num_fewshot": 0,
864
+ "metric_list": [
865
+ {
866
+ "metric": "acc"
867
+ }
868
+ ],
869
+ "output_type": "multiple_choice",
870
+ "repeats": 1,
871
+ "should_decontaminate": true,
872
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
873
+ "metadata": {
874
+ "version": 1.0
875
+ }
876
+ },
877
+ "blimp_distractor_agreement_relative_clause": {
878
+ "task": "blimp_distractor_agreement_relative_clause",
879
+ "group": "blimp",
880
+ "dataset_path": "blimp",
881
+ "dataset_name": "distractor_agreement_relative_clause",
882
+ "validation_split": "train",
883
+ "doc_to_text": "",
884
+ "doc_to_target": 0,
885
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
886
+ "description": "",
887
+ "target_delimiter": " ",
888
+ "fewshot_delimiter": "\n\n",
889
+ "num_fewshot": 0,
890
+ "metric_list": [
891
+ {
892
+ "metric": "acc"
893
+ }
894
+ ],
895
+ "output_type": "multiple_choice",
896
+ "repeats": 1,
897
+ "should_decontaminate": true,
898
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
899
+ "metadata": {
900
+ "version": 1.0
901
+ }
902
+ },
903
+ "blimp_drop_argument": {
904
+ "task": "blimp_drop_argument",
905
+ "group": "blimp",
906
+ "dataset_path": "blimp",
907
+ "dataset_name": "drop_argument",
908
+ "validation_split": "train",
909
+ "doc_to_text": "",
910
+ "doc_to_target": 0,
911
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
912
+ "description": "",
913
+ "target_delimiter": " ",
914
+ "fewshot_delimiter": "\n\n",
915
+ "num_fewshot": 0,
916
+ "metric_list": [
917
+ {
918
+ "metric": "acc"
919
+ }
920
+ ],
921
+ "output_type": "multiple_choice",
922
+ "repeats": 1,
923
+ "should_decontaminate": true,
924
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
925
+ "metadata": {
926
+ "version": 1.0
927
+ }
928
+ },
929
+ "blimp_ellipsis_n_bar_1": {
930
+ "task": "blimp_ellipsis_n_bar_1",
931
+ "group": "blimp",
932
+ "dataset_path": "blimp",
933
+ "dataset_name": "ellipsis_n_bar_1",
934
+ "validation_split": "train",
935
+ "doc_to_text": "",
936
+ "doc_to_target": 0,
937
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
938
+ "description": "",
939
+ "target_delimiter": " ",
940
+ "fewshot_delimiter": "\n\n",
941
+ "num_fewshot": 0,
942
+ "metric_list": [
943
+ {
944
+ "metric": "acc"
945
+ }
946
+ ],
947
+ "output_type": "multiple_choice",
948
+ "repeats": 1,
949
+ "should_decontaminate": true,
950
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
951
+ "metadata": {
952
+ "version": 1.0
953
+ }
954
+ },
955
+ "blimp_ellipsis_n_bar_2": {
956
+ "task": "blimp_ellipsis_n_bar_2",
957
+ "group": "blimp",
958
+ "dataset_path": "blimp",
959
+ "dataset_name": "ellipsis_n_bar_2",
960
+ "validation_split": "train",
961
+ "doc_to_text": "",
962
+ "doc_to_target": 0,
963
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
964
+ "description": "",
965
+ "target_delimiter": " ",
966
+ "fewshot_delimiter": "\n\n",
967
+ "num_fewshot": 0,
968
+ "metric_list": [
969
+ {
970
+ "metric": "acc"
971
+ }
972
+ ],
973
+ "output_type": "multiple_choice",
974
+ "repeats": 1,
975
+ "should_decontaminate": true,
976
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
977
+ "metadata": {
978
+ "version": 1.0
979
+ }
980
+ },
981
+ "blimp_existential_there_object_raising": {
982
+ "task": "blimp_existential_there_object_raising",
983
+ "group": "blimp",
984
+ "dataset_path": "blimp",
985
+ "dataset_name": "existential_there_object_raising",
986
+ "validation_split": "train",
987
+ "doc_to_text": "",
988
+ "doc_to_target": 0,
989
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
990
+ "description": "",
991
+ "target_delimiter": " ",
992
+ "fewshot_delimiter": "\n\n",
993
+ "num_fewshot": 0,
994
+ "metric_list": [
995
+ {
996
+ "metric": "acc"
997
+ }
998
+ ],
999
+ "output_type": "multiple_choice",
1000
+ "repeats": 1,
1001
+ "should_decontaminate": true,
1002
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1003
+ "metadata": {
1004
+ "version": 1.0
1005
+ }
1006
+ },
1007
+ "blimp_existential_there_quantifiers_1": {
1008
+ "task": "blimp_existential_there_quantifiers_1",
1009
+ "group": "blimp",
1010
+ "dataset_path": "blimp",
1011
+ "dataset_name": "existential_there_quantifiers_1",
1012
+ "validation_split": "train",
1013
+ "doc_to_text": "",
1014
+ "doc_to_target": 0,
1015
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1016
+ "description": "",
1017
+ "target_delimiter": " ",
1018
+ "fewshot_delimiter": "\n\n",
1019
+ "num_fewshot": 0,
1020
+ "metric_list": [
1021
+ {
1022
+ "metric": "acc"
1023
+ }
1024
+ ],
1025
+ "output_type": "multiple_choice",
1026
+ "repeats": 1,
1027
+ "should_decontaminate": true,
1028
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1029
+ "metadata": {
1030
+ "version": 1.0
1031
+ }
1032
+ },
1033
+ "blimp_existential_there_quantifiers_2": {
1034
+ "task": "blimp_existential_there_quantifiers_2",
1035
+ "group": "blimp",
1036
+ "dataset_path": "blimp",
1037
+ "dataset_name": "existential_there_quantifiers_2",
1038
+ "validation_split": "train",
1039
+ "doc_to_text": "",
1040
+ "doc_to_target": 0,
1041
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1042
+ "description": "",
1043
+ "target_delimiter": " ",
1044
+ "fewshot_delimiter": "\n\n",
1045
+ "num_fewshot": 0,
1046
+ "metric_list": [
1047
+ {
1048
+ "metric": "acc"
1049
+ }
1050
+ ],
1051
+ "output_type": "multiple_choice",
1052
+ "repeats": 1,
1053
+ "should_decontaminate": true,
1054
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1055
+ "metadata": {
1056
+ "version": 1.0
1057
+ }
1058
+ },
1059
+ "blimp_existential_there_subject_raising": {
1060
+ "task": "blimp_existential_there_subject_raising",
1061
+ "group": "blimp",
1062
+ "dataset_path": "blimp",
1063
+ "dataset_name": "existential_there_subject_raising",
1064
+ "validation_split": "train",
1065
+ "doc_to_text": "",
1066
+ "doc_to_target": 0,
1067
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1068
+ "description": "",
1069
+ "target_delimiter": " ",
1070
+ "fewshot_delimiter": "\n\n",
1071
+ "num_fewshot": 0,
1072
+ "metric_list": [
1073
+ {
1074
+ "metric": "acc"
1075
+ }
1076
+ ],
1077
+ "output_type": "multiple_choice",
1078
+ "repeats": 1,
1079
+ "should_decontaminate": true,
1080
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1081
+ "metadata": {
1082
+ "version": 1.0
1083
+ }
1084
+ },
1085
+ "blimp_expletive_it_object_raising": {
1086
+ "task": "blimp_expletive_it_object_raising",
1087
+ "group": "blimp",
1088
+ "dataset_path": "blimp",
1089
+ "dataset_name": "expletive_it_object_raising",
1090
+ "validation_split": "train",
1091
+ "doc_to_text": "",
1092
+ "doc_to_target": 0,
1093
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1094
+ "description": "",
1095
+ "target_delimiter": " ",
1096
+ "fewshot_delimiter": "\n\n",
1097
+ "num_fewshot": 0,
1098
+ "metric_list": [
1099
+ {
1100
+ "metric": "acc"
1101
+ }
1102
+ ],
1103
+ "output_type": "multiple_choice",
1104
+ "repeats": 1,
1105
+ "should_decontaminate": true,
1106
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1107
+ "metadata": {
1108
+ "version": 1.0
1109
+ }
1110
+ },
1111
+ "blimp_inchoative": {
1112
+ "task": "blimp_inchoative",
1113
+ "group": "blimp",
1114
+ "dataset_path": "blimp",
1115
+ "dataset_name": "inchoative",
1116
+ "validation_split": "train",
1117
+ "doc_to_text": "",
1118
+ "doc_to_target": 0,
1119
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1120
+ "description": "",
1121
+ "target_delimiter": " ",
1122
+ "fewshot_delimiter": "\n\n",
1123
+ "num_fewshot": 0,
1124
+ "metric_list": [
1125
+ {
1126
+ "metric": "acc"
1127
+ }
1128
+ ],
1129
+ "output_type": "multiple_choice",
1130
+ "repeats": 1,
1131
+ "should_decontaminate": true,
1132
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1133
+ "metadata": {
1134
+ "version": 1.0
1135
+ }
1136
+ },
1137
+ "blimp_intransitive": {
1138
+ "task": "blimp_intransitive",
1139
+ "group": "blimp",
1140
+ "dataset_path": "blimp",
1141
+ "dataset_name": "intransitive",
1142
+ "validation_split": "train",
1143
+ "doc_to_text": "",
1144
+ "doc_to_target": 0,
1145
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1146
+ "description": "",
1147
+ "target_delimiter": " ",
1148
+ "fewshot_delimiter": "\n\n",
1149
+ "num_fewshot": 0,
1150
+ "metric_list": [
1151
+ {
1152
+ "metric": "acc"
1153
+ }
1154
+ ],
1155
+ "output_type": "multiple_choice",
1156
+ "repeats": 1,
1157
+ "should_decontaminate": true,
1158
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1159
+ "metadata": {
1160
+ "version": 1.0
1161
+ }
1162
+ },
1163
+ "blimp_irregular_past_participle_adjectives": {
1164
+ "task": "blimp_irregular_past_participle_adjectives",
1165
+ "group": "blimp",
1166
+ "dataset_path": "blimp",
1167
+ "dataset_name": "irregular_past_participle_adjectives",
1168
+ "validation_split": "train",
1169
+ "doc_to_text": "",
1170
+ "doc_to_target": 0,
1171
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1172
+ "description": "",
1173
+ "target_delimiter": " ",
1174
+ "fewshot_delimiter": "\n\n",
1175
+ "num_fewshot": 0,
1176
+ "metric_list": [
1177
+ {
1178
+ "metric": "acc"
1179
+ }
1180
+ ],
1181
+ "output_type": "multiple_choice",
1182
+ "repeats": 1,
1183
+ "should_decontaminate": true,
1184
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1185
+ "metadata": {
1186
+ "version": 1.0
1187
+ }
1188
+ },
1189
+ "blimp_irregular_past_participle_verbs": {
1190
+ "task": "blimp_irregular_past_participle_verbs",
1191
+ "group": "blimp",
1192
+ "dataset_path": "blimp",
1193
+ "dataset_name": "irregular_past_participle_verbs",
1194
+ "validation_split": "train",
1195
+ "doc_to_text": "",
1196
+ "doc_to_target": 0,
1197
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1198
+ "description": "",
1199
+ "target_delimiter": " ",
1200
+ "fewshot_delimiter": "\n\n",
1201
+ "num_fewshot": 0,
1202
+ "metric_list": [
1203
+ {
1204
+ "metric": "acc"
1205
+ }
1206
+ ],
1207
+ "output_type": "multiple_choice",
1208
+ "repeats": 1,
1209
+ "should_decontaminate": true,
1210
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1211
+ "metadata": {
1212
+ "version": 1.0
1213
+ }
1214
+ },
1215
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1216
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1217
+ "group": "blimp",
1218
+ "dataset_path": "blimp",
1219
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1220
+ "validation_split": "train",
1221
+ "doc_to_text": "",
1222
+ "doc_to_target": 0,
1223
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1224
+ "description": "",
1225
+ "target_delimiter": " ",
1226
+ "fewshot_delimiter": "\n\n",
1227
+ "num_fewshot": 0,
1228
+ "metric_list": [
1229
+ {
1230
+ "metric": "acc"
1231
+ }
1232
+ ],
1233
+ "output_type": "multiple_choice",
1234
+ "repeats": 1,
1235
+ "should_decontaminate": true,
1236
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1237
+ "metadata": {
1238
+ "version": 1.0
1239
+ }
1240
+ },
1241
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1242
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1243
+ "group": "blimp",
1244
+ "dataset_path": "blimp",
1245
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1246
+ "validation_split": "train",
1247
+ "doc_to_text": "",
1248
+ "doc_to_target": 0,
1249
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1250
+ "description": "",
1251
+ "target_delimiter": " ",
1252
+ "fewshot_delimiter": "\n\n",
1253
+ "num_fewshot": 0,
1254
+ "metric_list": [
1255
+ {
1256
+ "metric": "acc"
1257
+ }
1258
+ ],
1259
+ "output_type": "multiple_choice",
1260
+ "repeats": 1,
1261
+ "should_decontaminate": true,
1262
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1263
+ "metadata": {
1264
+ "version": 1.0
1265
+ }
1266
+ },
1267
+ "blimp_left_branch_island_echo_question": {
1268
+ "task": "blimp_left_branch_island_echo_question",
1269
+ "group": "blimp",
1270
+ "dataset_path": "blimp",
1271
+ "dataset_name": "left_branch_island_echo_question",
1272
+ "validation_split": "train",
1273
+ "doc_to_text": "",
1274
+ "doc_to_target": 0,
1275
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1276
+ "description": "",
1277
+ "target_delimiter": " ",
1278
+ "fewshot_delimiter": "\n\n",
1279
+ "num_fewshot": 0,
1280
+ "metric_list": [
1281
+ {
1282
+ "metric": "acc"
1283
+ }
1284
+ ],
1285
+ "output_type": "multiple_choice",
1286
+ "repeats": 1,
1287
+ "should_decontaminate": true,
1288
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1289
+ "metadata": {
1290
+ "version": 1.0
1291
+ }
1292
+ },
1293
+ "blimp_left_branch_island_simple_question": {
1294
+ "task": "blimp_left_branch_island_simple_question",
1295
+ "group": "blimp",
1296
+ "dataset_path": "blimp",
1297
+ "dataset_name": "left_branch_island_simple_question",
1298
+ "validation_split": "train",
1299
+ "doc_to_text": "",
1300
+ "doc_to_target": 0,
1301
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1302
+ "description": "",
1303
+ "target_delimiter": " ",
1304
+ "fewshot_delimiter": "\n\n",
1305
+ "num_fewshot": 0,
1306
+ "metric_list": [
1307
+ {
1308
+ "metric": "acc"
1309
+ }
1310
+ ],
1311
+ "output_type": "multiple_choice",
1312
+ "repeats": 1,
1313
+ "should_decontaminate": true,
1314
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1315
+ "metadata": {
1316
+ "version": 1.0
1317
+ }
1318
+ },
1319
+ "blimp_matrix_question_npi_licensor_present": {
1320
+ "task": "blimp_matrix_question_npi_licensor_present",
1321
+ "group": "blimp",
1322
+ "dataset_path": "blimp",
1323
+ "dataset_name": "matrix_question_npi_licensor_present",
1324
+ "validation_split": "train",
1325
+ "doc_to_text": "",
1326
+ "doc_to_target": 0,
1327
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1328
+ "description": "",
1329
+ "target_delimiter": " ",
1330
+ "fewshot_delimiter": "\n\n",
1331
+ "num_fewshot": 0,
1332
+ "metric_list": [
1333
+ {
1334
+ "metric": "acc"
1335
+ }
1336
+ ],
1337
+ "output_type": "multiple_choice",
1338
+ "repeats": 1,
1339
+ "should_decontaminate": true,
1340
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1341
+ "metadata": {
1342
+ "version": 1.0
1343
+ }
1344
+ },
1345
+ "blimp_npi_present_1": {
1346
+ "task": "blimp_npi_present_1",
1347
+ "group": "blimp",
1348
+ "dataset_path": "blimp",
1349
+ "dataset_name": "npi_present_1",
1350
+ "validation_split": "train",
1351
+ "doc_to_text": "",
1352
+ "doc_to_target": 0,
1353
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1354
+ "description": "",
1355
+ "target_delimiter": " ",
1356
+ "fewshot_delimiter": "\n\n",
1357
+ "num_fewshot": 0,
1358
+ "metric_list": [
1359
+ {
1360
+ "metric": "acc"
1361
+ }
1362
+ ],
1363
+ "output_type": "multiple_choice",
1364
+ "repeats": 1,
1365
+ "should_decontaminate": true,
1366
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1367
+ "metadata": {
1368
+ "version": 1.0
1369
+ }
1370
+ },
1371
+ "blimp_npi_present_2": {
1372
+ "task": "blimp_npi_present_2",
1373
+ "group": "blimp",
1374
+ "dataset_path": "blimp",
1375
+ "dataset_name": "npi_present_2",
1376
+ "validation_split": "train",
1377
+ "doc_to_text": "",
1378
+ "doc_to_target": 0,
1379
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1380
+ "description": "",
1381
+ "target_delimiter": " ",
1382
+ "fewshot_delimiter": "\n\n",
1383
+ "num_fewshot": 0,
1384
+ "metric_list": [
1385
+ {
1386
+ "metric": "acc"
1387
+ }
1388
+ ],
1389
+ "output_type": "multiple_choice",
1390
+ "repeats": 1,
1391
+ "should_decontaminate": true,
1392
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1393
+ "metadata": {
1394
+ "version": 1.0
1395
+ }
1396
+ },
1397
+ "blimp_only_npi_licensor_present": {
1398
+ "task": "blimp_only_npi_licensor_present",
1399
+ "group": "blimp",
1400
+ "dataset_path": "blimp",
1401
+ "dataset_name": "only_npi_licensor_present",
1402
+ "validation_split": "train",
1403
+ "doc_to_text": "",
1404
+ "doc_to_target": 0,
1405
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1406
+ "description": "",
1407
+ "target_delimiter": " ",
1408
+ "fewshot_delimiter": "\n\n",
1409
+ "num_fewshot": 0,
1410
+ "metric_list": [
1411
+ {
1412
+ "metric": "acc"
1413
+ }
1414
+ ],
1415
+ "output_type": "multiple_choice",
1416
+ "repeats": 1,
1417
+ "should_decontaminate": true,
1418
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1419
+ "metadata": {
1420
+ "version": 1.0
1421
+ }
1422
+ },
1423
+ "blimp_only_npi_scope": {
1424
+ "task": "blimp_only_npi_scope",
1425
+ "group": "blimp",
1426
+ "dataset_path": "blimp",
1427
+ "dataset_name": "only_npi_scope",
1428
+ "validation_split": "train",
1429
+ "doc_to_text": "",
1430
+ "doc_to_target": 0,
1431
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1432
+ "description": "",
1433
+ "target_delimiter": " ",
1434
+ "fewshot_delimiter": "\n\n",
1435
+ "num_fewshot": 0,
1436
+ "metric_list": [
1437
+ {
1438
+ "metric": "acc"
1439
+ }
1440
+ ],
1441
+ "output_type": "multiple_choice",
1442
+ "repeats": 1,
1443
+ "should_decontaminate": true,
1444
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1445
+ "metadata": {
1446
+ "version": 1.0
1447
+ }
1448
+ },
1449
+ "blimp_passive_1": {
1450
+ "task": "blimp_passive_1",
1451
+ "group": "blimp",
1452
+ "dataset_path": "blimp",
1453
+ "dataset_name": "passive_1",
1454
+ "validation_split": "train",
1455
+ "doc_to_text": "",
1456
+ "doc_to_target": 0,
1457
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1458
+ "description": "",
1459
+ "target_delimiter": " ",
1460
+ "fewshot_delimiter": "\n\n",
1461
+ "num_fewshot": 0,
1462
+ "metric_list": [
1463
+ {
1464
+ "metric": "acc"
1465
+ }
1466
+ ],
1467
+ "output_type": "multiple_choice",
1468
+ "repeats": 1,
1469
+ "should_decontaminate": true,
1470
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1471
+ "metadata": {
1472
+ "version": 1.0
1473
+ }
1474
+ },
1475
+ "blimp_passive_2": {
1476
+ "task": "blimp_passive_2",
1477
+ "group": "blimp",
1478
+ "dataset_path": "blimp",
1479
+ "dataset_name": "passive_2",
1480
+ "validation_split": "train",
1481
+ "doc_to_text": "",
1482
+ "doc_to_target": 0,
1483
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1484
+ "description": "",
1485
+ "target_delimiter": " ",
1486
+ "fewshot_delimiter": "\n\n",
1487
+ "num_fewshot": 0,
1488
+ "metric_list": [
1489
+ {
1490
+ "metric": "acc"
1491
+ }
1492
+ ],
1493
+ "output_type": "multiple_choice",
1494
+ "repeats": 1,
1495
+ "should_decontaminate": true,
1496
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1497
+ "metadata": {
1498
+ "version": 1.0
1499
+ }
1500
+ },
1501
+ "blimp_principle_A_c_command": {
1502
+ "task": "blimp_principle_A_c_command",
1503
+ "group": "blimp",
1504
+ "dataset_path": "blimp",
1505
+ "dataset_name": "principle_A_c_command",
1506
+ "validation_split": "train",
1507
+ "doc_to_text": "",
1508
+ "doc_to_target": 0,
1509
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1510
+ "description": "",
1511
+ "target_delimiter": " ",
1512
+ "fewshot_delimiter": "\n\n",
1513
+ "num_fewshot": 0,
1514
+ "metric_list": [
1515
+ {
1516
+ "metric": "acc"
1517
+ }
1518
+ ],
1519
+ "output_type": "multiple_choice",
1520
+ "repeats": 1,
1521
+ "should_decontaminate": true,
1522
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1523
+ "metadata": {
1524
+ "version": 1.0
1525
+ }
1526
+ },
1527
+ "blimp_principle_A_case_1": {
1528
+ "task": "blimp_principle_A_case_1",
1529
+ "group": "blimp",
1530
+ "dataset_path": "blimp",
1531
+ "dataset_name": "principle_A_case_1",
1532
+ "validation_split": "train",
1533
+ "doc_to_text": "",
1534
+ "doc_to_target": 0,
1535
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1536
+ "description": "",
1537
+ "target_delimiter": " ",
1538
+ "fewshot_delimiter": "\n\n",
1539
+ "num_fewshot": 0,
1540
+ "metric_list": [
1541
+ {
1542
+ "metric": "acc"
1543
+ }
1544
+ ],
1545
+ "output_type": "multiple_choice",
1546
+ "repeats": 1,
1547
+ "should_decontaminate": true,
1548
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1549
+ "metadata": {
1550
+ "version": 1.0
1551
+ }
1552
+ },
1553
+ "blimp_principle_A_case_2": {
1554
+ "task": "blimp_principle_A_case_2",
1555
+ "group": "blimp",
1556
+ "dataset_path": "blimp",
1557
+ "dataset_name": "principle_A_case_2",
1558
+ "validation_split": "train",
1559
+ "doc_to_text": "",
1560
+ "doc_to_target": 0,
1561
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1562
+ "description": "",
1563
+ "target_delimiter": " ",
1564
+ "fewshot_delimiter": "\n\n",
1565
+ "num_fewshot": 0,
1566
+ "metric_list": [
1567
+ {
1568
+ "metric": "acc"
1569
+ }
1570
+ ],
1571
+ "output_type": "multiple_choice",
1572
+ "repeats": 1,
1573
+ "should_decontaminate": true,
1574
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1575
+ "metadata": {
1576
+ "version": 1.0
1577
+ }
1578
+ },
1579
+ "blimp_principle_A_domain_1": {
1580
+ "task": "blimp_principle_A_domain_1",
1581
+ "group": "blimp",
1582
+ "dataset_path": "blimp",
1583
+ "dataset_name": "principle_A_domain_1",
1584
+ "validation_split": "train",
1585
+ "doc_to_text": "",
1586
+ "doc_to_target": 0,
1587
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1588
+ "description": "",
1589
+ "target_delimiter": " ",
1590
+ "fewshot_delimiter": "\n\n",
1591
+ "num_fewshot": 0,
1592
+ "metric_list": [
1593
+ {
1594
+ "metric": "acc"
1595
+ }
1596
+ ],
1597
+ "output_type": "multiple_choice",
1598
+ "repeats": 1,
1599
+ "should_decontaminate": true,
1600
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1601
+ "metadata": {
1602
+ "version": 1.0
1603
+ }
1604
+ },
1605
+ "blimp_principle_A_domain_2": {
1606
+ "task": "blimp_principle_A_domain_2",
1607
+ "group": "blimp",
1608
+ "dataset_path": "blimp",
1609
+ "dataset_name": "principle_A_domain_2",
1610
+ "validation_split": "train",
1611
+ "doc_to_text": "",
1612
+ "doc_to_target": 0,
1613
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1614
+ "description": "",
1615
+ "target_delimiter": " ",
1616
+ "fewshot_delimiter": "\n\n",
1617
+ "num_fewshot": 0,
1618
+ "metric_list": [
1619
+ {
1620
+ "metric": "acc"
1621
+ }
1622
+ ],
1623
+ "output_type": "multiple_choice",
1624
+ "repeats": 1,
1625
+ "should_decontaminate": true,
1626
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1627
+ "metadata": {
1628
+ "version": 1.0
1629
+ }
1630
+ },
1631
+ "blimp_principle_A_domain_3": {
1632
+ "task": "blimp_principle_A_domain_3",
1633
+ "group": "blimp",
1634
+ "dataset_path": "blimp",
1635
+ "dataset_name": "principle_A_domain_3",
1636
+ "validation_split": "train",
1637
+ "doc_to_text": "",
1638
+ "doc_to_target": 0,
1639
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1640
+ "description": "",
1641
+ "target_delimiter": " ",
1642
+ "fewshot_delimiter": "\n\n",
1643
+ "num_fewshot": 0,
1644
+ "metric_list": [
1645
+ {
1646
+ "metric": "acc"
1647
+ }
1648
+ ],
1649
+ "output_type": "multiple_choice",
1650
+ "repeats": 1,
1651
+ "should_decontaminate": true,
1652
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1653
+ "metadata": {
1654
+ "version": 1.0
1655
+ }
1656
+ },
1657
+ "blimp_principle_A_reconstruction": {
1658
+ "task": "blimp_principle_A_reconstruction",
1659
+ "group": "blimp",
1660
+ "dataset_path": "blimp",
1661
+ "dataset_name": "principle_A_reconstruction",
1662
+ "validation_split": "train",
1663
+ "doc_to_text": "",
1664
+ "doc_to_target": 0,
1665
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1666
+ "description": "",
1667
+ "target_delimiter": " ",
1668
+ "fewshot_delimiter": "\n\n",
1669
+ "num_fewshot": 0,
1670
+ "metric_list": [
1671
+ {
1672
+ "metric": "acc"
1673
+ }
1674
+ ],
1675
+ "output_type": "multiple_choice",
1676
+ "repeats": 1,
1677
+ "should_decontaminate": true,
1678
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1679
+ "metadata": {
1680
+ "version": 1.0
1681
+ }
1682
+ },
1683
+ "blimp_regular_plural_subject_verb_agreement_1": {
1684
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1685
+ "group": "blimp",
1686
+ "dataset_path": "blimp",
1687
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1688
+ "validation_split": "train",
1689
+ "doc_to_text": "",
1690
+ "doc_to_target": 0,
1691
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1692
+ "description": "",
1693
+ "target_delimiter": " ",
1694
+ "fewshot_delimiter": "\n\n",
1695
+ "num_fewshot": 0,
1696
+ "metric_list": [
1697
+ {
1698
+ "metric": "acc"
1699
+ }
1700
+ ],
1701
+ "output_type": "multiple_choice",
1702
+ "repeats": 1,
1703
+ "should_decontaminate": true,
1704
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1705
+ "metadata": {
1706
+ "version": 1.0
1707
+ }
1708
+ },
1709
+ "blimp_regular_plural_subject_verb_agreement_2": {
1710
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1711
+ "group": "blimp",
1712
+ "dataset_path": "blimp",
1713
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1714
+ "validation_split": "train",
1715
+ "doc_to_text": "",
1716
+ "doc_to_target": 0,
1717
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1718
+ "description": "",
1719
+ "target_delimiter": " ",
1720
+ "fewshot_delimiter": "\n\n",
1721
+ "num_fewshot": 0,
1722
+ "metric_list": [
1723
+ {
1724
+ "metric": "acc"
1725
+ }
1726
+ ],
1727
+ "output_type": "multiple_choice",
1728
+ "repeats": 1,
1729
+ "should_decontaminate": true,
1730
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1731
+ "metadata": {
1732
+ "version": 1.0
1733
+ }
1734
+ },
1735
+ "blimp_sentential_negation_npi_licensor_present": {
1736
+ "task": "blimp_sentential_negation_npi_licensor_present",
1737
+ "group": "blimp",
1738
+ "dataset_path": "blimp",
1739
+ "dataset_name": "sentential_negation_npi_licensor_present",
1740
+ "validation_split": "train",
1741
+ "doc_to_text": "",
1742
+ "doc_to_target": 0,
1743
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1744
+ "description": "",
1745
+ "target_delimiter": " ",
1746
+ "fewshot_delimiter": "\n\n",
1747
+ "num_fewshot": 0,
1748
+ "metric_list": [
1749
+ {
1750
+ "metric": "acc"
1751
+ }
1752
+ ],
1753
+ "output_type": "multiple_choice",
1754
+ "repeats": 1,
1755
+ "should_decontaminate": true,
1756
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1757
+ "metadata": {
1758
+ "version": 1.0
1759
+ }
1760
+ },
1761
+ "blimp_sentential_negation_npi_scope": {
1762
+ "task": "blimp_sentential_negation_npi_scope",
1763
+ "group": "blimp",
1764
+ "dataset_path": "blimp",
1765
+ "dataset_name": "sentential_negation_npi_scope",
1766
+ "validation_split": "train",
1767
+ "doc_to_text": "",
1768
+ "doc_to_target": 0,
1769
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1770
+ "description": "",
1771
+ "target_delimiter": " ",
1772
+ "fewshot_delimiter": "\n\n",
1773
+ "num_fewshot": 0,
1774
+ "metric_list": [
1775
+ {
1776
+ "metric": "acc"
1777
+ }
1778
+ ],
1779
+ "output_type": "multiple_choice",
1780
+ "repeats": 1,
1781
+ "should_decontaminate": true,
1782
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1783
+ "metadata": {
1784
+ "version": 1.0
1785
+ }
1786
+ },
1787
+ "blimp_sentential_subject_island": {
1788
+ "task": "blimp_sentential_subject_island",
1789
+ "group": "blimp",
1790
+ "dataset_path": "blimp",
1791
+ "dataset_name": "sentential_subject_island",
1792
+ "validation_split": "train",
1793
+ "doc_to_text": "",
1794
+ "doc_to_target": 0,
1795
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1796
+ "description": "",
1797
+ "target_delimiter": " ",
1798
+ "fewshot_delimiter": "\n\n",
1799
+ "num_fewshot": 0,
1800
+ "metric_list": [
1801
+ {
1802
+ "metric": "acc"
1803
+ }
1804
+ ],
1805
+ "output_type": "multiple_choice",
1806
+ "repeats": 1,
1807
+ "should_decontaminate": true,
1808
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1809
+ "metadata": {
1810
+ "version": 1.0
1811
+ }
1812
+ },
1813
+ "blimp_superlative_quantifiers_1": {
1814
+ "task": "blimp_superlative_quantifiers_1",
1815
+ "group": "blimp",
1816
+ "dataset_path": "blimp",
1817
+ "dataset_name": "superlative_quantifiers_1",
1818
+ "validation_split": "train",
1819
+ "doc_to_text": "",
1820
+ "doc_to_target": 0,
1821
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1822
+ "description": "",
1823
+ "target_delimiter": " ",
1824
+ "fewshot_delimiter": "\n\n",
1825
+ "num_fewshot": 0,
1826
+ "metric_list": [
1827
+ {
1828
+ "metric": "acc"
1829
+ }
1830
+ ],
1831
+ "output_type": "multiple_choice",
1832
+ "repeats": 1,
1833
+ "should_decontaminate": true,
1834
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1835
+ "metadata": {
1836
+ "version": 1.0
1837
+ }
1838
+ },
1839
+ "blimp_superlative_quantifiers_2": {
1840
+ "task": "blimp_superlative_quantifiers_2",
1841
+ "group": "blimp",
1842
+ "dataset_path": "blimp",
1843
+ "dataset_name": "superlative_quantifiers_2",
1844
+ "validation_split": "train",
1845
+ "doc_to_text": "",
1846
+ "doc_to_target": 0,
1847
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1848
+ "description": "",
1849
+ "target_delimiter": " ",
1850
+ "fewshot_delimiter": "\n\n",
1851
+ "num_fewshot": 0,
1852
+ "metric_list": [
1853
+ {
1854
+ "metric": "acc"
1855
+ }
1856
+ ],
1857
+ "output_type": "multiple_choice",
1858
+ "repeats": 1,
1859
+ "should_decontaminate": true,
1860
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1861
+ "metadata": {
1862
+ "version": 1.0
1863
+ }
1864
+ },
1865
+ "blimp_tough_vs_raising_1": {
1866
+ "task": "blimp_tough_vs_raising_1",
1867
+ "group": "blimp",
1868
+ "dataset_path": "blimp",
1869
+ "dataset_name": "tough_vs_raising_1",
1870
+ "validation_split": "train",
1871
+ "doc_to_text": "",
1872
+ "doc_to_target": 0,
1873
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1874
+ "description": "",
1875
+ "target_delimiter": " ",
1876
+ "fewshot_delimiter": "\n\n",
1877
+ "num_fewshot": 0,
1878
+ "metric_list": [
1879
+ {
1880
+ "metric": "acc"
1881
+ }
1882
+ ],
1883
+ "output_type": "multiple_choice",
1884
+ "repeats": 1,
1885
+ "should_decontaminate": true,
1886
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1887
+ "metadata": {
1888
+ "version": 1.0
1889
+ }
1890
+ },
1891
+ "blimp_tough_vs_raising_2": {
1892
+ "task": "blimp_tough_vs_raising_2",
1893
+ "group": "blimp",
1894
+ "dataset_path": "blimp",
1895
+ "dataset_name": "tough_vs_raising_2",
1896
+ "validation_split": "train",
1897
+ "doc_to_text": "",
1898
+ "doc_to_target": 0,
1899
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1900
+ "description": "",
1901
+ "target_delimiter": " ",
1902
+ "fewshot_delimiter": "\n\n",
1903
+ "num_fewshot": 0,
1904
+ "metric_list": [
1905
+ {
1906
+ "metric": "acc"
1907
+ }
1908
+ ],
1909
+ "output_type": "multiple_choice",
1910
+ "repeats": 1,
1911
+ "should_decontaminate": true,
1912
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1913
+ "metadata": {
1914
+ "version": 1.0
1915
+ }
1916
+ },
1917
+ "blimp_transitive": {
1918
+ "task": "blimp_transitive",
1919
+ "group": "blimp",
1920
+ "dataset_path": "blimp",
1921
+ "dataset_name": "transitive",
1922
+ "validation_split": "train",
1923
+ "doc_to_text": "",
1924
+ "doc_to_target": 0,
1925
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1926
+ "description": "",
1927
+ "target_delimiter": " ",
1928
+ "fewshot_delimiter": "\n\n",
1929
+ "num_fewshot": 0,
1930
+ "metric_list": [
1931
+ {
1932
+ "metric": "acc"
1933
+ }
1934
+ ],
1935
+ "output_type": "multiple_choice",
1936
+ "repeats": 1,
1937
+ "should_decontaminate": true,
1938
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1939
+ "metadata": {
1940
+ "version": 1.0
1941
+ }
1942
+ },
1943
+ "blimp_wh_island": {
1944
+ "task": "blimp_wh_island",
1945
+ "group": "blimp",
1946
+ "dataset_path": "blimp",
1947
+ "dataset_name": "wh_island",
1948
+ "validation_split": "train",
1949
+ "doc_to_text": "",
1950
+ "doc_to_target": 0,
1951
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1952
+ "description": "",
1953
+ "target_delimiter": " ",
1954
+ "fewshot_delimiter": "\n\n",
1955
+ "num_fewshot": 0,
1956
+ "metric_list": [
1957
+ {
1958
+ "metric": "acc"
1959
+ }
1960
+ ],
1961
+ "output_type": "multiple_choice",
1962
+ "repeats": 1,
1963
+ "should_decontaminate": true,
1964
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1965
+ "metadata": {
1966
+ "version": 1.0
1967
+ }
1968
+ },
1969
+ "blimp_wh_questions_object_gap": {
1970
+ "task": "blimp_wh_questions_object_gap",
1971
+ "group": "blimp",
1972
+ "dataset_path": "blimp",
1973
+ "dataset_name": "wh_questions_object_gap",
1974
+ "validation_split": "train",
1975
+ "doc_to_text": "",
1976
+ "doc_to_target": 0,
1977
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1978
+ "description": "",
1979
+ "target_delimiter": " ",
1980
+ "fewshot_delimiter": "\n\n",
1981
+ "num_fewshot": 0,
1982
+ "metric_list": [
1983
+ {
1984
+ "metric": "acc"
1985
+ }
1986
+ ],
1987
+ "output_type": "multiple_choice",
1988
+ "repeats": 1,
1989
+ "should_decontaminate": true,
1990
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1991
+ "metadata": {
1992
+ "version": 1.0
1993
+ }
1994
+ },
1995
+ "blimp_wh_questions_subject_gap": {
1996
+ "task": "blimp_wh_questions_subject_gap",
1997
+ "group": "blimp",
1998
+ "dataset_path": "blimp",
1999
+ "dataset_name": "wh_questions_subject_gap",
2000
+ "validation_split": "train",
2001
+ "doc_to_text": "",
2002
+ "doc_to_target": 0,
2003
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2004
+ "description": "",
2005
+ "target_delimiter": " ",
2006
+ "fewshot_delimiter": "\n\n",
2007
+ "num_fewshot": 0,
2008
+ "metric_list": [
2009
+ {
2010
+ "metric": "acc"
2011
+ }
2012
+ ],
2013
+ "output_type": "multiple_choice",
2014
+ "repeats": 1,
2015
+ "should_decontaminate": true,
2016
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2017
+ "metadata": {
2018
+ "version": 1.0
2019
+ }
2020
+ },
2021
+ "blimp_wh_questions_subject_gap_long_distance": {
2022
+ "task": "blimp_wh_questions_subject_gap_long_distance",
2023
+ "group": "blimp",
2024
+ "dataset_path": "blimp",
2025
+ "dataset_name": "wh_questions_subject_gap_long_distance",
2026
+ "validation_split": "train",
2027
+ "doc_to_text": "",
2028
+ "doc_to_target": 0,
2029
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2030
+ "description": "",
2031
+ "target_delimiter": " ",
2032
+ "fewshot_delimiter": "\n\n",
2033
+ "num_fewshot": 0,
2034
+ "metric_list": [
2035
+ {
2036
+ "metric": "acc"
2037
+ }
2038
+ ],
2039
+ "output_type": "multiple_choice",
2040
+ "repeats": 1,
2041
+ "should_decontaminate": true,
2042
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2043
+ "metadata": {
2044
+ "version": 1.0
2045
+ }
2046
+ },
2047
+ "blimp_wh_vs_that_no_gap": {
2048
+ "task": "blimp_wh_vs_that_no_gap",
2049
+ "group": "blimp",
2050
+ "dataset_path": "blimp",
2051
+ "dataset_name": "wh_vs_that_no_gap",
2052
+ "validation_split": "train",
2053
+ "doc_to_text": "",
2054
+ "doc_to_target": 0,
2055
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2056
+ "description": "",
2057
+ "target_delimiter": " ",
2058
+ "fewshot_delimiter": "\n\n",
2059
+ "num_fewshot": 0,
2060
+ "metric_list": [
2061
+ {
2062
+ "metric": "acc"
2063
+ }
2064
+ ],
2065
+ "output_type": "multiple_choice",
2066
+ "repeats": 1,
2067
+ "should_decontaminate": true,
2068
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2069
+ "metadata": {
2070
+ "version": 1.0
2071
+ }
2072
+ },
2073
+ "blimp_wh_vs_that_no_gap_long_distance": {
2074
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2075
+ "group": "blimp",
2076
+ "dataset_path": "blimp",
2077
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2078
+ "validation_split": "train",
2079
+ "doc_to_text": "",
2080
+ "doc_to_target": 0,
2081
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2082
+ "description": "",
2083
+ "target_delimiter": " ",
2084
+ "fewshot_delimiter": "\n\n",
2085
+ "num_fewshot": 0,
2086
+ "metric_list": [
2087
+ {
2088
+ "metric": "acc"
2089
+ }
2090
+ ],
2091
+ "output_type": "multiple_choice",
2092
+ "repeats": 1,
2093
+ "should_decontaminate": true,
2094
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2095
+ "metadata": {
2096
+ "version": 1.0
2097
+ }
2098
+ },
2099
+ "blimp_wh_vs_that_with_gap": {
2100
+ "task": "blimp_wh_vs_that_with_gap",
2101
+ "group": "blimp",
2102
+ "dataset_path": "blimp",
2103
+ "dataset_name": "wh_vs_that_with_gap",
2104
+ "validation_split": "train",
2105
+ "doc_to_text": "",
2106
+ "doc_to_target": 0,
2107
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2108
+ "description": "",
2109
+ "target_delimiter": " ",
2110
+ "fewshot_delimiter": "\n\n",
2111
+ "num_fewshot": 0,
2112
+ "metric_list": [
2113
+ {
2114
+ "metric": "acc"
2115
+ }
2116
+ ],
2117
+ "output_type": "multiple_choice",
2118
+ "repeats": 1,
2119
+ "should_decontaminate": true,
2120
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2121
+ "metadata": {
2122
+ "version": 1.0
2123
+ }
2124
+ },
2125
+ "blimp_wh_vs_that_with_gap_long_distance": {
2126
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2127
+ "group": "blimp",
2128
+ "dataset_path": "blimp",
2129
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2130
+ "validation_split": "train",
2131
+ "doc_to_text": "",
2132
+ "doc_to_target": 0,
2133
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2134
+ "description": "",
2135
+ "target_delimiter": " ",
2136
+ "fewshot_delimiter": "\n\n",
2137
+ "num_fewshot": 0,
2138
+ "metric_list": [
2139
+ {
2140
+ "metric": "acc"
2141
+ }
2142
+ ],
2143
+ "output_type": "multiple_choice",
2144
+ "repeats": 1,
2145
+ "should_decontaminate": true,
2146
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2147
+ "metadata": {
2148
+ "version": 1.0
2149
+ }
2150
+ }
2151
+ },
2152
+ "versions": {
2153
+ "blimp_adjunct_island": 1.0,
2154
+ "blimp_anaphor_gender_agreement": 1.0,
2155
+ "blimp_anaphor_number_agreement": 1.0,
2156
+ "blimp_animate_subject_passive": 1.0,
2157
+ "blimp_animate_subject_trans": 1.0,
2158
+ "blimp_causative": 1.0,
2159
+ "blimp_complex_NP_island": 1.0,
2160
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2161
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2162
+ "blimp_determiner_noun_agreement_1": 1.0,
2163
+ "blimp_determiner_noun_agreement_2": 1.0,
2164
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2165
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2166
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2167
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2168
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2169
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2170
+ "blimp_distractor_agreement_relational_noun": 1.0,
2171
+ "blimp_distractor_agreement_relative_clause": 1.0,
2172
+ "blimp_drop_argument": 1.0,
2173
+ "blimp_ellipsis_n_bar_1": 1.0,
2174
+ "blimp_ellipsis_n_bar_2": 1.0,
2175
+ "blimp_existential_there_object_raising": 1.0,
2176
+ "blimp_existential_there_quantifiers_1": 1.0,
2177
+ "blimp_existential_there_quantifiers_2": 1.0,
2178
+ "blimp_existential_there_subject_raising": 1.0,
2179
+ "blimp_expletive_it_object_raising": 1.0,
2180
+ "blimp_inchoative": 1.0,
2181
+ "blimp_intransitive": 1.0,
2182
+ "blimp_irregular_past_participle_adjectives": 1.0,
2183
+ "blimp_irregular_past_participle_verbs": 1.0,
2184
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2185
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2186
+ "blimp_left_branch_island_echo_question": 1.0,
2187
+ "blimp_left_branch_island_simple_question": 1.0,
2188
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2189
+ "blimp_npi_present_1": 1.0,
2190
+ "blimp_npi_present_2": 1.0,
2191
+ "blimp_only_npi_licensor_present": 1.0,
2192
+ "blimp_only_npi_scope": 1.0,
2193
+ "blimp_passive_1": 1.0,
2194
+ "blimp_passive_2": 1.0,
2195
+ "blimp_principle_A_c_command": 1.0,
2196
+ "blimp_principle_A_case_1": 1.0,
2197
+ "blimp_principle_A_case_2": 1.0,
2198
+ "blimp_principle_A_domain_1": 1.0,
2199
+ "blimp_principle_A_domain_2": 1.0,
2200
+ "blimp_principle_A_domain_3": 1.0,
2201
+ "blimp_principle_A_reconstruction": 1.0,
2202
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2203
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2204
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2205
+ "blimp_sentential_negation_npi_scope": 1.0,
2206
+ "blimp_sentential_subject_island": 1.0,
2207
+ "blimp_superlative_quantifiers_1": 1.0,
2208
+ "blimp_superlative_quantifiers_2": 1.0,
2209
+ "blimp_tough_vs_raising_1": 1.0,
2210
+ "blimp_tough_vs_raising_2": 1.0,
2211
+ "blimp_transitive": 1.0,
2212
+ "blimp_wh_island": 1.0,
2213
+ "blimp_wh_questions_object_gap": 1.0,
2214
+ "blimp_wh_questions_subject_gap": 1.0,
2215
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2216
+ "blimp_wh_vs_that_no_gap": 1.0,
2217
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2218
+ "blimp_wh_vs_that_with_gap": 1.0,
2219
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2220
+ },
2221
+ "n-shot": {
2222
+ "blimp_adjunct_island": 0,
2223
+ "blimp_anaphor_gender_agreement": 0,
2224
+ "blimp_anaphor_number_agreement": 0,
2225
+ "blimp_animate_subject_passive": 0,
2226
+ "blimp_animate_subject_trans": 0,
2227
+ "blimp_causative": 0,
2228
+ "blimp_complex_NP_island": 0,
2229
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2230
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2231
+ "blimp_determiner_noun_agreement_1": 0,
2232
+ "blimp_determiner_noun_agreement_2": 0,
2233
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2234
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2235
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2236
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2237
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2238
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2239
+ "blimp_distractor_agreement_relational_noun": 0,
2240
+ "blimp_distractor_agreement_relative_clause": 0,
2241
+ "blimp_drop_argument": 0,
2242
+ "blimp_ellipsis_n_bar_1": 0,
2243
+ "blimp_ellipsis_n_bar_2": 0,
2244
+ "blimp_existential_there_object_raising": 0,
2245
+ "blimp_existential_there_quantifiers_1": 0,
2246
+ "blimp_existential_there_quantifiers_2": 0,
2247
+ "blimp_existential_there_subject_raising": 0,
2248
+ "blimp_expletive_it_object_raising": 0,
2249
+ "blimp_inchoative": 0,
2250
+ "blimp_intransitive": 0,
2251
+ "blimp_irregular_past_participle_adjectives": 0,
2252
+ "blimp_irregular_past_participle_verbs": 0,
2253
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2254
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2255
+ "blimp_left_branch_island_echo_question": 0,
2256
+ "blimp_left_branch_island_simple_question": 0,
2257
+ "blimp_matrix_question_npi_licensor_present": 0,
2258
+ "blimp_npi_present_1": 0,
2259
+ "blimp_npi_present_2": 0,
2260
+ "blimp_only_npi_licensor_present": 0,
2261
+ "blimp_only_npi_scope": 0,
2262
+ "blimp_passive_1": 0,
2263
+ "blimp_passive_2": 0,
2264
+ "blimp_principle_A_c_command": 0,
2265
+ "blimp_principle_A_case_1": 0,
2266
+ "blimp_principle_A_case_2": 0,
2267
+ "blimp_principle_A_domain_1": 0,
2268
+ "blimp_principle_A_domain_2": 0,
2269
+ "blimp_principle_A_domain_3": 0,
2270
+ "blimp_principle_A_reconstruction": 0,
2271
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2272
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2273
+ "blimp_sentential_negation_npi_licensor_present": 0,
2274
+ "blimp_sentential_negation_npi_scope": 0,
2275
+ "blimp_sentential_subject_island": 0,
2276
+ "blimp_superlative_quantifiers_1": 0,
2277
+ "blimp_superlative_quantifiers_2": 0,
2278
+ "blimp_tough_vs_raising_1": 0,
2279
+ "blimp_tough_vs_raising_2": 0,
2280
+ "blimp_transitive": 0,
2281
+ "blimp_wh_island": 0,
2282
+ "blimp_wh_questions_object_gap": 0,
2283
+ "blimp_wh_questions_subject_gap": 0,
2284
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2285
+ "blimp_wh_vs_that_no_gap": 0,
2286
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2287
+ "blimp_wh_vs_that_with_gap": 0,
2288
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2289
+ },
2290
+ "n-samples": {
2291
+ "blimp_wh_vs_that_with_gap_long_distance": {
2292
+ "original": 1000,
2293
+ "effective": 1000
2294
+ },
2295
+ "blimp_wh_vs_that_with_gap": {
2296
+ "original": 1000,
2297
+ "effective": 1000
2298
+ },
2299
+ "blimp_wh_vs_that_no_gap_long_distance": {
2300
+ "original": 1000,
2301
+ "effective": 1000
2302
+ },
2303
+ "blimp_wh_vs_that_no_gap": {
2304
+ "original": 1000,
2305
+ "effective": 1000
2306
+ },
2307
+ "blimp_wh_questions_subject_gap_long_distance": {
2308
+ "original": 1000,
2309
+ "effective": 1000
2310
+ },
2311
+ "blimp_wh_questions_subject_gap": {
2312
+ "original": 1000,
2313
+ "effective": 1000
2314
+ },
2315
+ "blimp_wh_questions_object_gap": {
2316
+ "original": 1000,
2317
+ "effective": 1000
2318
+ },
2319
+ "blimp_wh_island": {
2320
+ "original": 1000,
2321
+ "effective": 1000
2322
+ },
2323
+ "blimp_transitive": {
2324
+ "original": 1000,
2325
+ "effective": 1000
2326
+ },
2327
+ "blimp_tough_vs_raising_2": {
2328
+ "original": 1000,
2329
+ "effective": 1000
2330
+ },
2331
+ "blimp_tough_vs_raising_1": {
2332
+ "original": 1000,
2333
+ "effective": 1000
2334
+ },
2335
+ "blimp_superlative_quantifiers_2": {
2336
+ "original": 1000,
2337
+ "effective": 1000
2338
+ },
2339
+ "blimp_superlative_quantifiers_1": {
2340
+ "original": 1000,
2341
+ "effective": 1000
2342
+ },
2343
+ "blimp_sentential_subject_island": {
2344
+ "original": 1000,
2345
+ "effective": 1000
2346
+ },
2347
+ "blimp_sentential_negation_npi_scope": {
2348
+ "original": 1000,
2349
+ "effective": 1000
2350
+ },
2351
+ "blimp_sentential_negation_npi_licensor_present": {
2352
+ "original": 1000,
2353
+ "effective": 1000
2354
+ },
2355
+ "blimp_regular_plural_subject_verb_agreement_2": {
2356
+ "original": 1000,
2357
+ "effective": 1000
2358
+ },
2359
+ "blimp_regular_plural_subject_verb_agreement_1": {
2360
+ "original": 1000,
2361
+ "effective": 1000
2362
+ },
2363
+ "blimp_principle_A_reconstruction": {
2364
+ "original": 1000,
2365
+ "effective": 1000
2366
+ },
2367
+ "blimp_principle_A_domain_3": {
2368
+ "original": 1000,
2369
+ "effective": 1000
2370
+ },
2371
+ "blimp_principle_A_domain_2": {
2372
+ "original": 1000,
2373
+ "effective": 1000
2374
+ },
2375
+ "blimp_principle_A_domain_1": {
2376
+ "original": 1000,
2377
+ "effective": 1000
2378
+ },
2379
+ "blimp_principle_A_case_2": {
2380
+ "original": 1000,
2381
+ "effective": 1000
2382
+ },
2383
+ "blimp_principle_A_case_1": {
2384
+ "original": 1000,
2385
+ "effective": 1000
2386
+ },
2387
+ "blimp_principle_A_c_command": {
2388
+ "original": 1000,
2389
+ "effective": 1000
2390
+ },
2391
+ "blimp_passive_2": {
2392
+ "original": 1000,
2393
+ "effective": 1000
2394
+ },
2395
+ "blimp_passive_1": {
2396
+ "original": 1000,
2397
+ "effective": 1000
2398
+ },
2399
+ "blimp_only_npi_scope": {
2400
+ "original": 1000,
2401
+ "effective": 1000
2402
+ },
2403
+ "blimp_only_npi_licensor_present": {
2404
+ "original": 1000,
2405
+ "effective": 1000
2406
+ },
2407
+ "blimp_npi_present_2": {
2408
+ "original": 1000,
2409
+ "effective": 1000
2410
+ },
2411
+ "blimp_npi_present_1": {
2412
+ "original": 1000,
2413
+ "effective": 1000
2414
+ },
2415
+ "blimp_matrix_question_npi_licensor_present": {
2416
+ "original": 1000,
2417
+ "effective": 1000
2418
+ },
2419
+ "blimp_left_branch_island_simple_question": {
2420
+ "original": 1000,
2421
+ "effective": 1000
2422
+ },
2423
+ "blimp_left_branch_island_echo_question": {
2424
+ "original": 1000,
2425
+ "effective": 1000
2426
+ },
2427
+ "blimp_irregular_plural_subject_verb_agreement_2": {
2428
+ "original": 1000,
2429
+ "effective": 1000
2430
+ },
2431
+ "blimp_irregular_plural_subject_verb_agreement_1": {
2432
+ "original": 1000,
2433
+ "effective": 1000
2434
+ },
2435
+ "blimp_irregular_past_participle_verbs": {
2436
+ "original": 1000,
2437
+ "effective": 1000
2438
+ },
2439
+ "blimp_irregular_past_participle_adjectives": {
2440
+ "original": 1000,
2441
+ "effective": 1000
2442
+ },
2443
+ "blimp_intransitive": {
2444
+ "original": 1000,
2445
+ "effective": 1000
2446
+ },
2447
+ "blimp_inchoative": {
2448
+ "original": 1000,
2449
+ "effective": 1000
2450
+ },
2451
+ "blimp_expletive_it_object_raising": {
2452
+ "original": 1000,
2453
+ "effective": 1000
2454
+ },
2455
+ "blimp_existential_there_subject_raising": {
2456
+ "original": 1000,
2457
+ "effective": 1000
2458
+ },
2459
+ "blimp_existential_there_quantifiers_2": {
2460
+ "original": 1000,
2461
+ "effective": 1000
2462
+ },
2463
+ "blimp_existential_there_quantifiers_1": {
2464
+ "original": 1000,
2465
+ "effective": 1000
2466
+ },
2467
+ "blimp_existential_there_object_raising": {
2468
+ "original": 1000,
2469
+ "effective": 1000
2470
+ },
2471
+ "blimp_ellipsis_n_bar_2": {
2472
+ "original": 1000,
2473
+ "effective": 1000
2474
+ },
2475
+ "blimp_ellipsis_n_bar_1": {
2476
+ "original": 1000,
2477
+ "effective": 1000
2478
+ },
2479
+ "blimp_drop_argument": {
2480
+ "original": 1000,
2481
+ "effective": 1000
2482
+ },
2483
+ "blimp_distractor_agreement_relative_clause": {
2484
+ "original": 1000,
2485
+ "effective": 1000
2486
+ },
2487
+ "blimp_distractor_agreement_relational_noun": {
2488
+ "original": 1000,
2489
+ "effective": 1000
2490
+ },
2491
+ "blimp_determiner_noun_agreement_with_adjective_1": {
2492
+ "original": 1000,
2493
+ "effective": 1000
2494
+ },
2495
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
2496
+ "original": 1000,
2497
+ "effective": 1000
2498
+ },
2499
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
2500
+ "original": 1000,
2501
+ "effective": 1000
2502
+ },
2503
+ "blimp_determiner_noun_agreement_with_adj_2": {
2504
+ "original": 1000,
2505
+ "effective": 1000
2506
+ },
2507
+ "blimp_determiner_noun_agreement_irregular_2": {
2508
+ "original": 1000,
2509
+ "effective": 1000
2510
+ },
2511
+ "blimp_determiner_noun_agreement_irregular_1": {
2512
+ "original": 1000,
2513
+ "effective": 1000
2514
+ },
2515
+ "blimp_determiner_noun_agreement_2": {
2516
+ "original": 1000,
2517
+ "effective": 1000
2518
+ },
2519
+ "blimp_determiner_noun_agreement_1": {
2520
+ "original": 1000,
2521
+ "effective": 1000
2522
+ },
2523
+ "blimp_coordinate_structure_constraint_object_extraction": {
2524
+ "original": 1000,
2525
+ "effective": 1000
2526
+ },
2527
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
2528
+ "original": 1000,
2529
+ "effective": 1000
2530
+ },
2531
+ "blimp_complex_NP_island": {
2532
+ "original": 1000,
2533
+ "effective": 1000
2534
+ },
2535
+ "blimp_causative": {
2536
+ "original": 1000,
2537
+ "effective": 1000
2538
+ },
2539
+ "blimp_animate_subject_trans": {
2540
+ "original": 1000,
2541
+ "effective": 1000
2542
+ },
2543
+ "blimp_animate_subject_passive": {
2544
+ "original": 1000,
2545
+ "effective": 1000
2546
+ },
2547
+ "blimp_anaphor_number_agreement": {
2548
+ "original": 1000,
2549
+ "effective": 1000
2550
+ },
2551
+ "blimp_anaphor_gender_agreement": {
2552
+ "original": 1000,
2553
+ "effective": 1000
2554
+ },
2555
+ "blimp_adjunct_island": {
2556
+ "original": 1000,
2557
+ "effective": 1000
2558
+ }
2559
+ },
2560
+ "config": {
2561
+ "model": "hf",
2562
+ "model_args": "pretrained=EleutherAI/pythia-14m-seed1,revision=step45000",
2563
+ "model_num_parameters": 14067712,
2564
+ "model_dtype": "torch.float16",
2565
+ "model_revision": "step45000",
2566
+ "model_sha": "6a56a88dbb56f315216c31814f5c54c2ebbd757f",
2567
+ "batch_size": "1024",
2568
+ "batch_sizes": [],
2569
+ "device": "cuda",
2570
+ "use_cache": null,
2571
+ "limit": null,
2572
+ "bootstrap_iters": 100000,
2573
+ "gen_kwargs": null,
2574
+ "random_seed": 0,
2575
+ "numpy_seed": 1234,
2576
+ "torch_seed": 1234,
2577
+ "fewshot_seed": 1234
2578
+ },
2579
+ "git_hash": "51a7ca9",
2580
+ "date": 1724071860.8847542,
2581
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: NVIDIA GeForce RTX 2080 Ti\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 32\nOn-line CPU(s) list: 0-31\nThread(s) per core: 1\nCore(s) per socket: 32\nSocket(s): 1\nNUMA node(s): 2\nVendor ID: AuthenticAMD\nCPU family: 23\nModel: 49\nModel name: AMD EPYC 7502P 32-Core Processor\nStepping: 0\nCPU MHz: 1500.000\nCPU max MHz: 2500.0000\nCPU min MHz: 1500.0000\nBogoMIPS: 5000.08\nVirtualization: AMD-V\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 512K\nL3 cache: 16384K\nNUMA node0 CPU(s): 0-15\nNUMA node1 CPU(s): 16-31\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc art rep_good nopl nonstop_tsc extd_apicid aperfmperf eagerfpu pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_l2 cpb cat_l3 cdp_l3 hw_pstate sme ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif umip overflow_recov succor smca\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
2582
+ "transformers_version": "4.40.2",
2583
+ "upper_git_hash": null,
2584
+ "task_hashes": {},
2585
+ "model_source": "hf",
2586
+ "model_name": "EleutherAI/pythia-14m-seed1",
2587
+ "model_name_sanitized": "EleutherAI__pythia-14m-seed1",
2588
+ "start_time": 1526570.970456127,
2589
+ "end_time": 1526898.20736222,
2590
+ "total_evaluation_time_seconds": "327.23690609284677"
2591
+ }
pythia-14m-seed1/step46000/EleutherAI__pythia-14m-seed1/results_2024-08-19T06-01-48.537526.json ADDED
@@ -0,0 +1,2591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp_wh_vs_that_with_gap_long_distance": {
4
+ "acc,none": 0.105,
5
+ "acc_stderr,none": 0.00969892102602496,
6
+ "alias": "blimp_wh_vs_that_with_gap_long_distance"
7
+ },
8
+ "blimp_wh_vs_that_with_gap": {
9
+ "acc,none": 0.227,
10
+ "acc_stderr,none": 0.013253174964763978,
11
+ "alias": "blimp_wh_vs_that_with_gap"
12
+ },
13
+ "blimp_wh_vs_that_no_gap_long_distance": {
14
+ "acc,none": 0.961,
15
+ "acc_stderr,none": 0.006125072776426131,
16
+ "alias": "blimp_wh_vs_that_no_gap_long_distance"
17
+ },
18
+ "blimp_wh_vs_that_no_gap": {
19
+ "acc,none": 0.941,
20
+ "acc_stderr,none": 0.007454835650406693,
21
+ "alias": "blimp_wh_vs_that_no_gap"
22
+ },
23
+ "blimp_wh_questions_subject_gap_long_distance": {
24
+ "acc,none": 0.925,
25
+ "acc_stderr,none": 0.008333333333333333,
26
+ "alias": "blimp_wh_questions_subject_gap_long_distance"
27
+ },
28
+ "blimp_wh_questions_subject_gap": {
29
+ "acc,none": 0.885,
30
+ "acc_stderr,none": 0.010093407594904551,
31
+ "alias": "blimp_wh_questions_subject_gap"
32
+ },
33
+ "blimp_wh_questions_object_gap": {
34
+ "acc,none": 0.483,
35
+ "acc_stderr,none": 0.015810153729833274,
36
+ "alias": "blimp_wh_questions_object_gap"
37
+ },
38
+ "blimp_wh_island": {
39
+ "acc,none": 0.664,
40
+ "acc_stderr,none": 0.014944140233794895,
41
+ "alias": "blimp_wh_island"
42
+ },
43
+ "blimp_transitive": {
44
+ "acc,none": 0.83,
45
+ "acc_stderr,none": 0.011884495834541793,
46
+ "alias": "blimp_transitive"
47
+ },
48
+ "blimp_tough_vs_raising_2": {
49
+ "acc,none": 0.74,
50
+ "acc_stderr,none": 0.013877773329774218,
51
+ "alias": "blimp_tough_vs_raising_2"
52
+ },
53
+ "blimp_tough_vs_raising_1": {
54
+ "acc,none": 0.416,
55
+ "acc_stderr,none": 0.015594460144140522,
56
+ "alias": "blimp_tough_vs_raising_1"
57
+ },
58
+ "blimp_superlative_quantifiers_2": {
59
+ "acc,none": 0.243,
60
+ "acc_stderr,none": 0.013569640199177543,
61
+ "alias": "blimp_superlative_quantifiers_2"
62
+ },
63
+ "blimp_superlative_quantifiers_1": {
64
+ "acc,none": 0.094,
65
+ "acc_stderr,none": 0.009233052000787672,
66
+ "alias": "blimp_superlative_quantifiers_1"
67
+ },
68
+ "blimp_sentential_subject_island": {
69
+ "acc,none": 0.361,
70
+ "acc_stderr,none": 0.015195720118175049,
71
+ "alias": "blimp_sentential_subject_island"
72
+ },
73
+ "blimp_sentential_negation_npi_scope": {
74
+ "acc,none": 0.501,
75
+ "acc_stderr,none": 0.015819268290576817,
76
+ "alias": "blimp_sentential_negation_npi_scope"
77
+ },
78
+ "blimp_sentential_negation_npi_licensor_present": {
79
+ "acc,none": 0.979,
80
+ "acc_stderr,none": 0.0045364721513065165,
81
+ "alias": "blimp_sentential_negation_npi_licensor_present"
82
+ },
83
+ "blimp_regular_plural_subject_verb_agreement_2": {
84
+ "acc,none": 0.795,
85
+ "acc_stderr,none": 0.012772554096113201,
86
+ "alias": "blimp_regular_plural_subject_verb_agreement_2"
87
+ },
88
+ "blimp_regular_plural_subject_verb_agreement_1": {
89
+ "acc,none": 0.861,
90
+ "acc_stderr,none": 0.010945263761042892,
91
+ "alias": "blimp_regular_plural_subject_verb_agreement_1"
92
+ },
93
+ "blimp_principle_A_reconstruction": {
94
+ "acc,none": 0.422,
95
+ "acc_stderr,none": 0.015625625112620622,
96
+ "alias": "blimp_principle_A_reconstruction"
97
+ },
98
+ "blimp_principle_A_domain_3": {
99
+ "acc,none": 0.628,
100
+ "acc_stderr,none": 0.01529214994204052,
101
+ "alias": "blimp_principle_A_domain_3"
102
+ },
103
+ "blimp_principle_A_domain_2": {
104
+ "acc,none": 0.627,
105
+ "acc_stderr,none": 0.015300493622922927,
106
+ "alias": "blimp_principle_A_domain_2"
107
+ },
108
+ "blimp_principle_A_domain_1": {
109
+ "acc,none": 0.959,
110
+ "acc_stderr,none": 0.006273624021118764,
111
+ "alias": "blimp_principle_A_domain_1"
112
+ },
113
+ "blimp_principle_A_case_2": {
114
+ "acc,none": 0.808,
115
+ "acc_stderr,none": 0.012461592646660028,
116
+ "alias": "blimp_principle_A_case_2"
117
+ },
118
+ "blimp_principle_A_case_1": {
119
+ "acc,none": 1.0,
120
+ "acc_stderr,none": 0.0,
121
+ "alias": "blimp_principle_A_case_1"
122
+ },
123
+ "blimp_principle_A_c_command": {
124
+ "acc,none": 0.561,
125
+ "acc_stderr,none": 0.015701131345400736,
126
+ "alias": "blimp_principle_A_c_command"
127
+ },
128
+ "blimp_passive_2": {
129
+ "acc,none": 0.884,
130
+ "acc_stderr,none": 0.010131468138756922,
131
+ "alias": "blimp_passive_2"
132
+ },
133
+ "blimp_passive_1": {
134
+ "acc,none": 0.902,
135
+ "acc_stderr,none": 0.009406619184621247,
136
+ "alias": "blimp_passive_1"
137
+ },
138
+ "blimp_only_npi_scope": {
139
+ "acc,none": 0.683,
140
+ "acc_stderr,none": 0.014721675438880174,
141
+ "alias": "blimp_only_npi_scope"
142
+ },
143
+ "blimp_only_npi_licensor_present": {
144
+ "acc,none": 0.972,
145
+ "acc_stderr,none": 0.005219506034410081,
146
+ "alias": "blimp_only_npi_licensor_present"
147
+ },
148
+ "blimp_npi_present_2": {
149
+ "acc,none": 0.38,
150
+ "acc_stderr,none": 0.015356947477797658,
151
+ "alias": "blimp_npi_present_2"
152
+ },
153
+ "blimp_npi_present_1": {
154
+ "acc,none": 0.344,
155
+ "acc_stderr,none": 0.015029633724408919,
156
+ "alias": "blimp_npi_present_1"
157
+ },
158
+ "blimp_matrix_question_npi_licensor_present": {
159
+ "acc,none": 0.138,
160
+ "acc_stderr,none": 0.010912152632504508,
161
+ "alias": "blimp_matrix_question_npi_licensor_present"
162
+ },
163
+ "blimp_left_branch_island_simple_question": {
164
+ "acc,none": 0.311,
165
+ "acc_stderr,none": 0.014645596385722692,
166
+ "alias": "blimp_left_branch_island_simple_question"
167
+ },
168
+ "blimp_left_branch_island_echo_question": {
169
+ "acc,none": 0.398,
170
+ "acc_stderr,none": 0.015486634102859016,
171
+ "alias": "blimp_left_branch_island_echo_question"
172
+ },
173
+ "blimp_irregular_plural_subject_verb_agreement_2": {
174
+ "acc,none": 0.845,
175
+ "acc_stderr,none": 0.011450157470799522,
176
+ "alias": "blimp_irregular_plural_subject_verb_agreement_2"
177
+ },
178
+ "blimp_irregular_plural_subject_verb_agreement_1": {
179
+ "acc,none": 0.775,
180
+ "acc_stderr,none": 0.013211720158614833,
181
+ "alias": "blimp_irregular_plural_subject_verb_agreement_1"
182
+ },
183
+ "blimp_irregular_past_participle_verbs": {
184
+ "acc,none": 0.907,
185
+ "acc_stderr,none": 0.009188875634996669,
186
+ "alias": "blimp_irregular_past_participle_verbs"
187
+ },
188
+ "blimp_irregular_past_participle_adjectives": {
189
+ "acc,none": 0.999,
190
+ "acc_stderr,none": 0.001,
191
+ "alias": "blimp_irregular_past_participle_adjectives"
192
+ },
193
+ "blimp_intransitive": {
194
+ "acc,none": 0.635,
195
+ "acc_stderr,none": 0.015231776226264848,
196
+ "alias": "blimp_intransitive"
197
+ },
198
+ "blimp_inchoative": {
199
+ "acc,none": 0.504,
200
+ "acc_stderr,none": 0.01581879370351084,
201
+ "alias": "blimp_inchoative"
202
+ },
203
+ "blimp_expletive_it_object_raising": {
204
+ "acc,none": 0.751,
205
+ "acc_stderr,none": 0.013681600278702275,
206
+ "alias": "blimp_expletive_it_object_raising"
207
+ },
208
+ "blimp_existential_there_subject_raising": {
209
+ "acc,none": 0.771,
210
+ "acc_stderr,none": 0.013294199326613684,
211
+ "alias": "blimp_existential_there_subject_raising"
212
+ },
213
+ "blimp_existential_there_quantifiers_2": {
214
+ "acc,none": 0.28,
215
+ "acc_stderr,none": 0.014205696104091548,
216
+ "alias": "blimp_existential_there_quantifiers_2"
217
+ },
218
+ "blimp_existential_there_quantifiers_1": {
219
+ "acc,none": 0.955,
220
+ "acc_stderr,none": 0.006558812241406063,
221
+ "alias": "blimp_existential_there_quantifiers_1"
222
+ },
223
+ "blimp_existential_there_object_raising": {
224
+ "acc,none": 0.804,
225
+ "acc_stderr,none": 0.012559527926707347,
226
+ "alias": "blimp_existential_there_object_raising"
227
+ },
228
+ "blimp_ellipsis_n_bar_2": {
229
+ "acc,none": 0.758,
230
+ "acc_stderr,none": 0.013550631705556003,
231
+ "alias": "blimp_ellipsis_n_bar_2"
232
+ },
233
+ "blimp_ellipsis_n_bar_1": {
234
+ "acc,none": 0.546,
235
+ "acc_stderr,none": 0.01575221038877186,
236
+ "alias": "blimp_ellipsis_n_bar_1"
237
+ },
238
+ "blimp_drop_argument": {
239
+ "acc,none": 0.76,
240
+ "acc_stderr,none": 0.013512312258920847,
241
+ "alias": "blimp_drop_argument"
242
+ },
243
+ "blimp_distractor_agreement_relative_clause": {
244
+ "acc,none": 0.262,
245
+ "acc_stderr,none": 0.013912208651021217,
246
+ "alias": "blimp_distractor_agreement_relative_clause"
247
+ },
248
+ "blimp_distractor_agreement_relational_noun": {
249
+ "acc,none": 0.361,
250
+ "acc_stderr,none": 0.015195720118175049,
251
+ "alias": "blimp_distractor_agreement_relational_noun"
252
+ },
253
+ "blimp_determiner_noun_agreement_with_adjective_1": {
254
+ "acc,none": 0.864,
255
+ "acc_stderr,none": 0.01084535023047304,
256
+ "alias": "blimp_determiner_noun_agreement_with_adjective_1"
257
+ },
258
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
259
+ "acc,none": 0.809,
260
+ "acc_stderr,none": 0.012436787112179482,
261
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_2"
262
+ },
263
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
264
+ "acc,none": 0.743,
265
+ "acc_stderr,none": 0.013825416526895026,
266
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_1"
267
+ },
268
+ "blimp_determiner_noun_agreement_with_adj_2": {
269
+ "acc,none": 0.842,
270
+ "acc_stderr,none": 0.011539894677559635,
271
+ "alias": "blimp_determiner_noun_agreement_with_adj_2"
272
+ },
273
+ "blimp_determiner_noun_agreement_irregular_2": {
274
+ "acc,none": 0.852,
275
+ "acc_stderr,none": 0.011234866364235145,
276
+ "alias": "blimp_determiner_noun_agreement_irregular_2"
277
+ },
278
+ "blimp_determiner_noun_agreement_irregular_1": {
279
+ "acc,none": 0.78,
280
+ "acc_stderr,none": 0.013106173040661862,
281
+ "alias": "blimp_determiner_noun_agreement_irregular_1"
282
+ },
283
+ "blimp_determiner_noun_agreement_2": {
284
+ "acc,none": 0.93,
285
+ "acc_stderr,none": 0.008072494358323525,
286
+ "alias": "blimp_determiner_noun_agreement_2"
287
+ },
288
+ "blimp_determiner_noun_agreement_1": {
289
+ "acc,none": 0.914,
290
+ "acc_stderr,none": 0.008870325962594761,
291
+ "alias": "blimp_determiner_noun_agreement_1"
292
+ },
293
+ "blimp_coordinate_structure_constraint_object_extraction": {
294
+ "acc,none": 0.522,
295
+ "acc_stderr,none": 0.01580397942816194,
296
+ "alias": "blimp_coordinate_structure_constraint_object_extraction"
297
+ },
298
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
299
+ "acc,none": 0.357,
300
+ "acc_stderr,none": 0.01515852172148659,
301
+ "alias": "blimp_coordinate_structure_constraint_complex_left_branch"
302
+ },
303
+ "blimp_complex_NP_island": {
304
+ "acc,none": 0.388,
305
+ "acc_stderr,none": 0.015417317979911216,
306
+ "alias": "blimp_complex_NP_island"
307
+ },
308
+ "blimp_causative": {
309
+ "acc,none": 0.629,
310
+ "acc_stderr,none": 0.015283736211823096,
311
+ "alias": "blimp_causative"
312
+ },
313
+ "blimp_animate_subject_trans": {
314
+ "acc,none": 0.872,
315
+ "acc_stderr,none": 0.010570133761108595,
316
+ "alias": "blimp_animate_subject_trans"
317
+ },
318
+ "blimp_animate_subject_passive": {
319
+ "acc,none": 0.755,
320
+ "acc_stderr,none": 0.01360735683959821,
321
+ "alias": "blimp_animate_subject_passive"
322
+ },
323
+ "blimp_anaphor_number_agreement": {
324
+ "acc,none": 0.946,
325
+ "acc_stderr,none": 0.007150883521295473,
326
+ "alias": "blimp_anaphor_number_agreement"
327
+ },
328
+ "blimp_anaphor_gender_agreement": {
329
+ "acc,none": 0.749,
330
+ "acc_stderr,none": 0.013718133516888775,
331
+ "alias": "blimp_anaphor_gender_agreement"
332
+ },
333
+ "blimp_adjunct_island": {
334
+ "acc,none": 0.775,
335
+ "acc_stderr,none": 0.013211720158614833,
336
+ "alias": "blimp_adjunct_island"
337
+ }
338
+ },
339
+ "group_subtasks": {
340
+ "blimp_adjunct_island": [],
341
+ "blimp_anaphor_gender_agreement": [],
342
+ "blimp_anaphor_number_agreement": [],
343
+ "blimp_animate_subject_passive": [],
344
+ "blimp_animate_subject_trans": [],
345
+ "blimp_causative": [],
346
+ "blimp_complex_NP_island": [],
347
+ "blimp_coordinate_structure_constraint_complex_left_branch": [],
348
+ "blimp_coordinate_structure_constraint_object_extraction": [],
349
+ "blimp_determiner_noun_agreement_1": [],
350
+ "blimp_determiner_noun_agreement_2": [],
351
+ "blimp_determiner_noun_agreement_irregular_1": [],
352
+ "blimp_determiner_noun_agreement_irregular_2": [],
353
+ "blimp_determiner_noun_agreement_with_adj_2": [],
354
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": [],
355
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": [],
356
+ "blimp_determiner_noun_agreement_with_adjective_1": [],
357
+ "blimp_distractor_agreement_relational_noun": [],
358
+ "blimp_distractor_agreement_relative_clause": [],
359
+ "blimp_drop_argument": [],
360
+ "blimp_ellipsis_n_bar_1": [],
361
+ "blimp_ellipsis_n_bar_2": [],
362
+ "blimp_existential_there_object_raising": [],
363
+ "blimp_existential_there_quantifiers_1": [],
364
+ "blimp_existential_there_quantifiers_2": [],
365
+ "blimp_existential_there_subject_raising": [],
366
+ "blimp_expletive_it_object_raising": [],
367
+ "blimp_inchoative": [],
368
+ "blimp_intransitive": [],
369
+ "blimp_irregular_past_participle_adjectives": [],
370
+ "blimp_irregular_past_participle_verbs": [],
371
+ "blimp_irregular_plural_subject_verb_agreement_1": [],
372
+ "blimp_irregular_plural_subject_verb_agreement_2": [],
373
+ "blimp_left_branch_island_echo_question": [],
374
+ "blimp_left_branch_island_simple_question": [],
375
+ "blimp_matrix_question_npi_licensor_present": [],
376
+ "blimp_npi_present_1": [],
377
+ "blimp_npi_present_2": [],
378
+ "blimp_only_npi_licensor_present": [],
379
+ "blimp_only_npi_scope": [],
380
+ "blimp_passive_1": [],
381
+ "blimp_passive_2": [],
382
+ "blimp_principle_A_c_command": [],
383
+ "blimp_principle_A_case_1": [],
384
+ "blimp_principle_A_case_2": [],
385
+ "blimp_principle_A_domain_1": [],
386
+ "blimp_principle_A_domain_2": [],
387
+ "blimp_principle_A_domain_3": [],
388
+ "blimp_principle_A_reconstruction": [],
389
+ "blimp_regular_plural_subject_verb_agreement_1": [],
390
+ "blimp_regular_plural_subject_verb_agreement_2": [],
391
+ "blimp_sentential_negation_npi_licensor_present": [],
392
+ "blimp_sentential_negation_npi_scope": [],
393
+ "blimp_sentential_subject_island": [],
394
+ "blimp_superlative_quantifiers_1": [],
395
+ "blimp_superlative_quantifiers_2": [],
396
+ "blimp_tough_vs_raising_1": [],
397
+ "blimp_tough_vs_raising_2": [],
398
+ "blimp_transitive": [],
399
+ "blimp_wh_island": [],
400
+ "blimp_wh_questions_object_gap": [],
401
+ "blimp_wh_questions_subject_gap": [],
402
+ "blimp_wh_questions_subject_gap_long_distance": [],
403
+ "blimp_wh_vs_that_no_gap": [],
404
+ "blimp_wh_vs_that_no_gap_long_distance": [],
405
+ "blimp_wh_vs_that_with_gap": [],
406
+ "blimp_wh_vs_that_with_gap_long_distance": []
407
+ },
408
+ "configs": {
409
+ "blimp_adjunct_island": {
410
+ "task": "blimp_adjunct_island",
411
+ "group": "blimp",
412
+ "dataset_path": "blimp",
413
+ "dataset_name": "adjunct_island",
414
+ "validation_split": "train",
415
+ "doc_to_text": "",
416
+ "doc_to_target": 0,
417
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
418
+ "description": "",
419
+ "target_delimiter": " ",
420
+ "fewshot_delimiter": "\n\n",
421
+ "num_fewshot": 0,
422
+ "metric_list": [
423
+ {
424
+ "metric": "acc"
425
+ }
426
+ ],
427
+ "output_type": "multiple_choice",
428
+ "repeats": 1,
429
+ "should_decontaminate": true,
430
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
431
+ "metadata": {
432
+ "version": 1.0
433
+ }
434
+ },
435
+ "blimp_anaphor_gender_agreement": {
436
+ "task": "blimp_anaphor_gender_agreement",
437
+ "group": "blimp",
438
+ "dataset_path": "blimp",
439
+ "dataset_name": "anaphor_gender_agreement",
440
+ "validation_split": "train",
441
+ "doc_to_text": "",
442
+ "doc_to_target": 0,
443
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
444
+ "description": "",
445
+ "target_delimiter": " ",
446
+ "fewshot_delimiter": "\n\n",
447
+ "num_fewshot": 0,
448
+ "metric_list": [
449
+ {
450
+ "metric": "acc"
451
+ }
452
+ ],
453
+ "output_type": "multiple_choice",
454
+ "repeats": 1,
455
+ "should_decontaminate": true,
456
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
457
+ "metadata": {
458
+ "version": 1.0
459
+ }
460
+ },
461
+ "blimp_anaphor_number_agreement": {
462
+ "task": "blimp_anaphor_number_agreement",
463
+ "group": "blimp",
464
+ "dataset_path": "blimp",
465
+ "dataset_name": "anaphor_number_agreement",
466
+ "validation_split": "train",
467
+ "doc_to_text": "",
468
+ "doc_to_target": 0,
469
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
470
+ "description": "",
471
+ "target_delimiter": " ",
472
+ "fewshot_delimiter": "\n\n",
473
+ "num_fewshot": 0,
474
+ "metric_list": [
475
+ {
476
+ "metric": "acc"
477
+ }
478
+ ],
479
+ "output_type": "multiple_choice",
480
+ "repeats": 1,
481
+ "should_decontaminate": true,
482
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
483
+ "metadata": {
484
+ "version": 1.0
485
+ }
486
+ },
487
+ "blimp_animate_subject_passive": {
488
+ "task": "blimp_animate_subject_passive",
489
+ "group": "blimp",
490
+ "dataset_path": "blimp",
491
+ "dataset_name": "animate_subject_passive",
492
+ "validation_split": "train",
493
+ "doc_to_text": "",
494
+ "doc_to_target": 0,
495
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
496
+ "description": "",
497
+ "target_delimiter": " ",
498
+ "fewshot_delimiter": "\n\n",
499
+ "num_fewshot": 0,
500
+ "metric_list": [
501
+ {
502
+ "metric": "acc"
503
+ }
504
+ ],
505
+ "output_type": "multiple_choice",
506
+ "repeats": 1,
507
+ "should_decontaminate": true,
508
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
509
+ "metadata": {
510
+ "version": 1.0
511
+ }
512
+ },
513
+ "blimp_animate_subject_trans": {
514
+ "task": "blimp_animate_subject_trans",
515
+ "group": "blimp",
516
+ "dataset_path": "blimp",
517
+ "dataset_name": "animate_subject_trans",
518
+ "validation_split": "train",
519
+ "doc_to_text": "",
520
+ "doc_to_target": 0,
521
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
522
+ "description": "",
523
+ "target_delimiter": " ",
524
+ "fewshot_delimiter": "\n\n",
525
+ "num_fewshot": 0,
526
+ "metric_list": [
527
+ {
528
+ "metric": "acc"
529
+ }
530
+ ],
531
+ "output_type": "multiple_choice",
532
+ "repeats": 1,
533
+ "should_decontaminate": true,
534
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
535
+ "metadata": {
536
+ "version": 1.0
537
+ }
538
+ },
539
+ "blimp_causative": {
540
+ "task": "blimp_causative",
541
+ "group": "blimp",
542
+ "dataset_path": "blimp",
543
+ "dataset_name": "causative",
544
+ "validation_split": "train",
545
+ "doc_to_text": "",
546
+ "doc_to_target": 0,
547
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
548
+ "description": "",
549
+ "target_delimiter": " ",
550
+ "fewshot_delimiter": "\n\n",
551
+ "num_fewshot": 0,
552
+ "metric_list": [
553
+ {
554
+ "metric": "acc"
555
+ }
556
+ ],
557
+ "output_type": "multiple_choice",
558
+ "repeats": 1,
559
+ "should_decontaminate": true,
560
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
561
+ "metadata": {
562
+ "version": 1.0
563
+ }
564
+ },
565
+ "blimp_complex_NP_island": {
566
+ "task": "blimp_complex_NP_island",
567
+ "group": "blimp",
568
+ "dataset_path": "blimp",
569
+ "dataset_name": "complex_NP_island",
570
+ "validation_split": "train",
571
+ "doc_to_text": "",
572
+ "doc_to_target": 0,
573
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
574
+ "description": "",
575
+ "target_delimiter": " ",
576
+ "fewshot_delimiter": "\n\n",
577
+ "num_fewshot": 0,
578
+ "metric_list": [
579
+ {
580
+ "metric": "acc"
581
+ }
582
+ ],
583
+ "output_type": "multiple_choice",
584
+ "repeats": 1,
585
+ "should_decontaminate": true,
586
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
587
+ "metadata": {
588
+ "version": 1.0
589
+ }
590
+ },
591
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
592
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
593
+ "group": "blimp",
594
+ "dataset_path": "blimp",
595
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
596
+ "validation_split": "train",
597
+ "doc_to_text": "",
598
+ "doc_to_target": 0,
599
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
600
+ "description": "",
601
+ "target_delimiter": " ",
602
+ "fewshot_delimiter": "\n\n",
603
+ "num_fewshot": 0,
604
+ "metric_list": [
605
+ {
606
+ "metric": "acc"
607
+ }
608
+ ],
609
+ "output_type": "multiple_choice",
610
+ "repeats": 1,
611
+ "should_decontaminate": true,
612
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
613
+ "metadata": {
614
+ "version": 1.0
615
+ }
616
+ },
617
+ "blimp_coordinate_structure_constraint_object_extraction": {
618
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
619
+ "group": "blimp",
620
+ "dataset_path": "blimp",
621
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
622
+ "validation_split": "train",
623
+ "doc_to_text": "",
624
+ "doc_to_target": 0,
625
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
626
+ "description": "",
627
+ "target_delimiter": " ",
628
+ "fewshot_delimiter": "\n\n",
629
+ "num_fewshot": 0,
630
+ "metric_list": [
631
+ {
632
+ "metric": "acc"
633
+ }
634
+ ],
635
+ "output_type": "multiple_choice",
636
+ "repeats": 1,
637
+ "should_decontaminate": true,
638
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
639
+ "metadata": {
640
+ "version": 1.0
641
+ }
642
+ },
643
+ "blimp_determiner_noun_agreement_1": {
644
+ "task": "blimp_determiner_noun_agreement_1",
645
+ "group": "blimp",
646
+ "dataset_path": "blimp",
647
+ "dataset_name": "determiner_noun_agreement_1",
648
+ "validation_split": "train",
649
+ "doc_to_text": "",
650
+ "doc_to_target": 0,
651
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
652
+ "description": "",
653
+ "target_delimiter": " ",
654
+ "fewshot_delimiter": "\n\n",
655
+ "num_fewshot": 0,
656
+ "metric_list": [
657
+ {
658
+ "metric": "acc"
659
+ }
660
+ ],
661
+ "output_type": "multiple_choice",
662
+ "repeats": 1,
663
+ "should_decontaminate": true,
664
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
665
+ "metadata": {
666
+ "version": 1.0
667
+ }
668
+ },
669
+ "blimp_determiner_noun_agreement_2": {
670
+ "task": "blimp_determiner_noun_agreement_2",
671
+ "group": "blimp",
672
+ "dataset_path": "blimp",
673
+ "dataset_name": "determiner_noun_agreement_2",
674
+ "validation_split": "train",
675
+ "doc_to_text": "",
676
+ "doc_to_target": 0,
677
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
678
+ "description": "",
679
+ "target_delimiter": " ",
680
+ "fewshot_delimiter": "\n\n",
681
+ "num_fewshot": 0,
682
+ "metric_list": [
683
+ {
684
+ "metric": "acc"
685
+ }
686
+ ],
687
+ "output_type": "multiple_choice",
688
+ "repeats": 1,
689
+ "should_decontaminate": true,
690
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
691
+ "metadata": {
692
+ "version": 1.0
693
+ }
694
+ },
695
+ "blimp_determiner_noun_agreement_irregular_1": {
696
+ "task": "blimp_determiner_noun_agreement_irregular_1",
697
+ "group": "blimp",
698
+ "dataset_path": "blimp",
699
+ "dataset_name": "determiner_noun_agreement_irregular_1",
700
+ "validation_split": "train",
701
+ "doc_to_text": "",
702
+ "doc_to_target": 0,
703
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
704
+ "description": "",
705
+ "target_delimiter": " ",
706
+ "fewshot_delimiter": "\n\n",
707
+ "num_fewshot": 0,
708
+ "metric_list": [
709
+ {
710
+ "metric": "acc"
711
+ }
712
+ ],
713
+ "output_type": "multiple_choice",
714
+ "repeats": 1,
715
+ "should_decontaminate": true,
716
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
717
+ "metadata": {
718
+ "version": 1.0
719
+ }
720
+ },
721
+ "blimp_determiner_noun_agreement_irregular_2": {
722
+ "task": "blimp_determiner_noun_agreement_irregular_2",
723
+ "group": "blimp",
724
+ "dataset_path": "blimp",
725
+ "dataset_name": "determiner_noun_agreement_irregular_2",
726
+ "validation_split": "train",
727
+ "doc_to_text": "",
728
+ "doc_to_target": 0,
729
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
730
+ "description": "",
731
+ "target_delimiter": " ",
732
+ "fewshot_delimiter": "\n\n",
733
+ "num_fewshot": 0,
734
+ "metric_list": [
735
+ {
736
+ "metric": "acc"
737
+ }
738
+ ],
739
+ "output_type": "multiple_choice",
740
+ "repeats": 1,
741
+ "should_decontaminate": true,
742
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
743
+ "metadata": {
744
+ "version": 1.0
745
+ }
746
+ },
747
+ "blimp_determiner_noun_agreement_with_adj_2": {
748
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
749
+ "group": "blimp",
750
+ "dataset_path": "blimp",
751
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
752
+ "validation_split": "train",
753
+ "doc_to_text": "",
754
+ "doc_to_target": 0,
755
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
756
+ "description": "",
757
+ "target_delimiter": " ",
758
+ "fewshot_delimiter": "\n\n",
759
+ "num_fewshot": 0,
760
+ "metric_list": [
761
+ {
762
+ "metric": "acc"
763
+ }
764
+ ],
765
+ "output_type": "multiple_choice",
766
+ "repeats": 1,
767
+ "should_decontaminate": true,
768
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
769
+ "metadata": {
770
+ "version": 1.0
771
+ }
772
+ },
773
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
774
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
775
+ "group": "blimp",
776
+ "dataset_path": "blimp",
777
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
778
+ "validation_split": "train",
779
+ "doc_to_text": "",
780
+ "doc_to_target": 0,
781
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
782
+ "description": "",
783
+ "target_delimiter": " ",
784
+ "fewshot_delimiter": "\n\n",
785
+ "num_fewshot": 0,
786
+ "metric_list": [
787
+ {
788
+ "metric": "acc"
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
795
+ "metadata": {
796
+ "version": 1.0
797
+ }
798
+ },
799
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
800
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
801
+ "group": "blimp",
802
+ "dataset_path": "blimp",
803
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
804
+ "validation_split": "train",
805
+ "doc_to_text": "",
806
+ "doc_to_target": 0,
807
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
808
+ "description": "",
809
+ "target_delimiter": " ",
810
+ "fewshot_delimiter": "\n\n",
811
+ "num_fewshot": 0,
812
+ "metric_list": [
813
+ {
814
+ "metric": "acc"
815
+ }
816
+ ],
817
+ "output_type": "multiple_choice",
818
+ "repeats": 1,
819
+ "should_decontaminate": true,
820
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
821
+ "metadata": {
822
+ "version": 1.0
823
+ }
824
+ },
825
+ "blimp_determiner_noun_agreement_with_adjective_1": {
826
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
827
+ "group": "blimp",
828
+ "dataset_path": "blimp",
829
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
830
+ "validation_split": "train",
831
+ "doc_to_text": "",
832
+ "doc_to_target": 0,
833
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
834
+ "description": "",
835
+ "target_delimiter": " ",
836
+ "fewshot_delimiter": "\n\n",
837
+ "num_fewshot": 0,
838
+ "metric_list": [
839
+ {
840
+ "metric": "acc"
841
+ }
842
+ ],
843
+ "output_type": "multiple_choice",
844
+ "repeats": 1,
845
+ "should_decontaminate": true,
846
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
847
+ "metadata": {
848
+ "version": 1.0
849
+ }
850
+ },
851
+ "blimp_distractor_agreement_relational_noun": {
852
+ "task": "blimp_distractor_agreement_relational_noun",
853
+ "group": "blimp",
854
+ "dataset_path": "blimp",
855
+ "dataset_name": "distractor_agreement_relational_noun",
856
+ "validation_split": "train",
857
+ "doc_to_text": "",
858
+ "doc_to_target": 0,
859
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
860
+ "description": "",
861
+ "target_delimiter": " ",
862
+ "fewshot_delimiter": "\n\n",
863
+ "num_fewshot": 0,
864
+ "metric_list": [
865
+ {
866
+ "metric": "acc"
867
+ }
868
+ ],
869
+ "output_type": "multiple_choice",
870
+ "repeats": 1,
871
+ "should_decontaminate": true,
872
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
873
+ "metadata": {
874
+ "version": 1.0
875
+ }
876
+ },
877
+ "blimp_distractor_agreement_relative_clause": {
878
+ "task": "blimp_distractor_agreement_relative_clause",
879
+ "group": "blimp",
880
+ "dataset_path": "blimp",
881
+ "dataset_name": "distractor_agreement_relative_clause",
882
+ "validation_split": "train",
883
+ "doc_to_text": "",
884
+ "doc_to_target": 0,
885
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
886
+ "description": "",
887
+ "target_delimiter": " ",
888
+ "fewshot_delimiter": "\n\n",
889
+ "num_fewshot": 0,
890
+ "metric_list": [
891
+ {
892
+ "metric": "acc"
893
+ }
894
+ ],
895
+ "output_type": "multiple_choice",
896
+ "repeats": 1,
897
+ "should_decontaminate": true,
898
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
899
+ "metadata": {
900
+ "version": 1.0
901
+ }
902
+ },
903
+ "blimp_drop_argument": {
904
+ "task": "blimp_drop_argument",
905
+ "group": "blimp",
906
+ "dataset_path": "blimp",
907
+ "dataset_name": "drop_argument",
908
+ "validation_split": "train",
909
+ "doc_to_text": "",
910
+ "doc_to_target": 0,
911
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
912
+ "description": "",
913
+ "target_delimiter": " ",
914
+ "fewshot_delimiter": "\n\n",
915
+ "num_fewshot": 0,
916
+ "metric_list": [
917
+ {
918
+ "metric": "acc"
919
+ }
920
+ ],
921
+ "output_type": "multiple_choice",
922
+ "repeats": 1,
923
+ "should_decontaminate": true,
924
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
925
+ "metadata": {
926
+ "version": 1.0
927
+ }
928
+ },
929
+ "blimp_ellipsis_n_bar_1": {
930
+ "task": "blimp_ellipsis_n_bar_1",
931
+ "group": "blimp",
932
+ "dataset_path": "blimp",
933
+ "dataset_name": "ellipsis_n_bar_1",
934
+ "validation_split": "train",
935
+ "doc_to_text": "",
936
+ "doc_to_target": 0,
937
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
938
+ "description": "",
939
+ "target_delimiter": " ",
940
+ "fewshot_delimiter": "\n\n",
941
+ "num_fewshot": 0,
942
+ "metric_list": [
943
+ {
944
+ "metric": "acc"
945
+ }
946
+ ],
947
+ "output_type": "multiple_choice",
948
+ "repeats": 1,
949
+ "should_decontaminate": true,
950
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
951
+ "metadata": {
952
+ "version": 1.0
953
+ }
954
+ },
955
+ "blimp_ellipsis_n_bar_2": {
956
+ "task": "blimp_ellipsis_n_bar_2",
957
+ "group": "blimp",
958
+ "dataset_path": "blimp",
959
+ "dataset_name": "ellipsis_n_bar_2",
960
+ "validation_split": "train",
961
+ "doc_to_text": "",
962
+ "doc_to_target": 0,
963
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
964
+ "description": "",
965
+ "target_delimiter": " ",
966
+ "fewshot_delimiter": "\n\n",
967
+ "num_fewshot": 0,
968
+ "metric_list": [
969
+ {
970
+ "metric": "acc"
971
+ }
972
+ ],
973
+ "output_type": "multiple_choice",
974
+ "repeats": 1,
975
+ "should_decontaminate": true,
976
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
977
+ "metadata": {
978
+ "version": 1.0
979
+ }
980
+ },
981
+ "blimp_existential_there_object_raising": {
982
+ "task": "blimp_existential_there_object_raising",
983
+ "group": "blimp",
984
+ "dataset_path": "blimp",
985
+ "dataset_name": "existential_there_object_raising",
986
+ "validation_split": "train",
987
+ "doc_to_text": "",
988
+ "doc_to_target": 0,
989
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
990
+ "description": "",
991
+ "target_delimiter": " ",
992
+ "fewshot_delimiter": "\n\n",
993
+ "num_fewshot": 0,
994
+ "metric_list": [
995
+ {
996
+ "metric": "acc"
997
+ }
998
+ ],
999
+ "output_type": "multiple_choice",
1000
+ "repeats": 1,
1001
+ "should_decontaminate": true,
1002
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1003
+ "metadata": {
1004
+ "version": 1.0
1005
+ }
1006
+ },
1007
+ "blimp_existential_there_quantifiers_1": {
1008
+ "task": "blimp_existential_there_quantifiers_1",
1009
+ "group": "blimp",
1010
+ "dataset_path": "blimp",
1011
+ "dataset_name": "existential_there_quantifiers_1",
1012
+ "validation_split": "train",
1013
+ "doc_to_text": "",
1014
+ "doc_to_target": 0,
1015
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1016
+ "description": "",
1017
+ "target_delimiter": " ",
1018
+ "fewshot_delimiter": "\n\n",
1019
+ "num_fewshot": 0,
1020
+ "metric_list": [
1021
+ {
1022
+ "metric": "acc"
1023
+ }
1024
+ ],
1025
+ "output_type": "multiple_choice",
1026
+ "repeats": 1,
1027
+ "should_decontaminate": true,
1028
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1029
+ "metadata": {
1030
+ "version": 1.0
1031
+ }
1032
+ },
1033
+ "blimp_existential_there_quantifiers_2": {
1034
+ "task": "blimp_existential_there_quantifiers_2",
1035
+ "group": "blimp",
1036
+ "dataset_path": "blimp",
1037
+ "dataset_name": "existential_there_quantifiers_2",
1038
+ "validation_split": "train",
1039
+ "doc_to_text": "",
1040
+ "doc_to_target": 0,
1041
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1042
+ "description": "",
1043
+ "target_delimiter": " ",
1044
+ "fewshot_delimiter": "\n\n",
1045
+ "num_fewshot": 0,
1046
+ "metric_list": [
1047
+ {
1048
+ "metric": "acc"
1049
+ }
1050
+ ],
1051
+ "output_type": "multiple_choice",
1052
+ "repeats": 1,
1053
+ "should_decontaminate": true,
1054
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1055
+ "metadata": {
1056
+ "version": 1.0
1057
+ }
1058
+ },
1059
+ "blimp_existential_there_subject_raising": {
1060
+ "task": "blimp_existential_there_subject_raising",
1061
+ "group": "blimp",
1062
+ "dataset_path": "blimp",
1063
+ "dataset_name": "existential_there_subject_raising",
1064
+ "validation_split": "train",
1065
+ "doc_to_text": "",
1066
+ "doc_to_target": 0,
1067
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1068
+ "description": "",
1069
+ "target_delimiter": " ",
1070
+ "fewshot_delimiter": "\n\n",
1071
+ "num_fewshot": 0,
1072
+ "metric_list": [
1073
+ {
1074
+ "metric": "acc"
1075
+ }
1076
+ ],
1077
+ "output_type": "multiple_choice",
1078
+ "repeats": 1,
1079
+ "should_decontaminate": true,
1080
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1081
+ "metadata": {
1082
+ "version": 1.0
1083
+ }
1084
+ },
1085
+ "blimp_expletive_it_object_raising": {
1086
+ "task": "blimp_expletive_it_object_raising",
1087
+ "group": "blimp",
1088
+ "dataset_path": "blimp",
1089
+ "dataset_name": "expletive_it_object_raising",
1090
+ "validation_split": "train",
1091
+ "doc_to_text": "",
1092
+ "doc_to_target": 0,
1093
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1094
+ "description": "",
1095
+ "target_delimiter": " ",
1096
+ "fewshot_delimiter": "\n\n",
1097
+ "num_fewshot": 0,
1098
+ "metric_list": [
1099
+ {
1100
+ "metric": "acc"
1101
+ }
1102
+ ],
1103
+ "output_type": "multiple_choice",
1104
+ "repeats": 1,
1105
+ "should_decontaminate": true,
1106
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1107
+ "metadata": {
1108
+ "version": 1.0
1109
+ }
1110
+ },
1111
+ "blimp_inchoative": {
1112
+ "task": "blimp_inchoative",
1113
+ "group": "blimp",
1114
+ "dataset_path": "blimp",
1115
+ "dataset_name": "inchoative",
1116
+ "validation_split": "train",
1117
+ "doc_to_text": "",
1118
+ "doc_to_target": 0,
1119
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1120
+ "description": "",
1121
+ "target_delimiter": " ",
1122
+ "fewshot_delimiter": "\n\n",
1123
+ "num_fewshot": 0,
1124
+ "metric_list": [
1125
+ {
1126
+ "metric": "acc"
1127
+ }
1128
+ ],
1129
+ "output_type": "multiple_choice",
1130
+ "repeats": 1,
1131
+ "should_decontaminate": true,
1132
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1133
+ "metadata": {
1134
+ "version": 1.0
1135
+ }
1136
+ },
1137
+ "blimp_intransitive": {
1138
+ "task": "blimp_intransitive",
1139
+ "group": "blimp",
1140
+ "dataset_path": "blimp",
1141
+ "dataset_name": "intransitive",
1142
+ "validation_split": "train",
1143
+ "doc_to_text": "",
1144
+ "doc_to_target": 0,
1145
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1146
+ "description": "",
1147
+ "target_delimiter": " ",
1148
+ "fewshot_delimiter": "\n\n",
1149
+ "num_fewshot": 0,
1150
+ "metric_list": [
1151
+ {
1152
+ "metric": "acc"
1153
+ }
1154
+ ],
1155
+ "output_type": "multiple_choice",
1156
+ "repeats": 1,
1157
+ "should_decontaminate": true,
1158
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1159
+ "metadata": {
1160
+ "version": 1.0
1161
+ }
1162
+ },
1163
+ "blimp_irregular_past_participle_adjectives": {
1164
+ "task": "blimp_irregular_past_participle_adjectives",
1165
+ "group": "blimp",
1166
+ "dataset_path": "blimp",
1167
+ "dataset_name": "irregular_past_participle_adjectives",
1168
+ "validation_split": "train",
1169
+ "doc_to_text": "",
1170
+ "doc_to_target": 0,
1171
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1172
+ "description": "",
1173
+ "target_delimiter": " ",
1174
+ "fewshot_delimiter": "\n\n",
1175
+ "num_fewshot": 0,
1176
+ "metric_list": [
1177
+ {
1178
+ "metric": "acc"
1179
+ }
1180
+ ],
1181
+ "output_type": "multiple_choice",
1182
+ "repeats": 1,
1183
+ "should_decontaminate": true,
1184
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1185
+ "metadata": {
1186
+ "version": 1.0
1187
+ }
1188
+ },
1189
+ "blimp_irregular_past_participle_verbs": {
1190
+ "task": "blimp_irregular_past_participle_verbs",
1191
+ "group": "blimp",
1192
+ "dataset_path": "blimp",
1193
+ "dataset_name": "irregular_past_participle_verbs",
1194
+ "validation_split": "train",
1195
+ "doc_to_text": "",
1196
+ "doc_to_target": 0,
1197
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1198
+ "description": "",
1199
+ "target_delimiter": " ",
1200
+ "fewshot_delimiter": "\n\n",
1201
+ "num_fewshot": 0,
1202
+ "metric_list": [
1203
+ {
1204
+ "metric": "acc"
1205
+ }
1206
+ ],
1207
+ "output_type": "multiple_choice",
1208
+ "repeats": 1,
1209
+ "should_decontaminate": true,
1210
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1211
+ "metadata": {
1212
+ "version": 1.0
1213
+ }
1214
+ },
1215
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1216
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1217
+ "group": "blimp",
1218
+ "dataset_path": "blimp",
1219
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1220
+ "validation_split": "train",
1221
+ "doc_to_text": "",
1222
+ "doc_to_target": 0,
1223
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1224
+ "description": "",
1225
+ "target_delimiter": " ",
1226
+ "fewshot_delimiter": "\n\n",
1227
+ "num_fewshot": 0,
1228
+ "metric_list": [
1229
+ {
1230
+ "metric": "acc"
1231
+ }
1232
+ ],
1233
+ "output_type": "multiple_choice",
1234
+ "repeats": 1,
1235
+ "should_decontaminate": true,
1236
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1237
+ "metadata": {
1238
+ "version": 1.0
1239
+ }
1240
+ },
1241
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1242
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1243
+ "group": "blimp",
1244
+ "dataset_path": "blimp",
1245
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1246
+ "validation_split": "train",
1247
+ "doc_to_text": "",
1248
+ "doc_to_target": 0,
1249
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1250
+ "description": "",
1251
+ "target_delimiter": " ",
1252
+ "fewshot_delimiter": "\n\n",
1253
+ "num_fewshot": 0,
1254
+ "metric_list": [
1255
+ {
1256
+ "metric": "acc"
1257
+ }
1258
+ ],
1259
+ "output_type": "multiple_choice",
1260
+ "repeats": 1,
1261
+ "should_decontaminate": true,
1262
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1263
+ "metadata": {
1264
+ "version": 1.0
1265
+ }
1266
+ },
1267
+ "blimp_left_branch_island_echo_question": {
1268
+ "task": "blimp_left_branch_island_echo_question",
1269
+ "group": "blimp",
1270
+ "dataset_path": "blimp",
1271
+ "dataset_name": "left_branch_island_echo_question",
1272
+ "validation_split": "train",
1273
+ "doc_to_text": "",
1274
+ "doc_to_target": 0,
1275
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1276
+ "description": "",
1277
+ "target_delimiter": " ",
1278
+ "fewshot_delimiter": "\n\n",
1279
+ "num_fewshot": 0,
1280
+ "metric_list": [
1281
+ {
1282
+ "metric": "acc"
1283
+ }
1284
+ ],
1285
+ "output_type": "multiple_choice",
1286
+ "repeats": 1,
1287
+ "should_decontaminate": true,
1288
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1289
+ "metadata": {
1290
+ "version": 1.0
1291
+ }
1292
+ },
1293
+ "blimp_left_branch_island_simple_question": {
1294
+ "task": "blimp_left_branch_island_simple_question",
1295
+ "group": "blimp",
1296
+ "dataset_path": "blimp",
1297
+ "dataset_name": "left_branch_island_simple_question",
1298
+ "validation_split": "train",
1299
+ "doc_to_text": "",
1300
+ "doc_to_target": 0,
1301
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1302
+ "description": "",
1303
+ "target_delimiter": " ",
1304
+ "fewshot_delimiter": "\n\n",
1305
+ "num_fewshot": 0,
1306
+ "metric_list": [
1307
+ {
1308
+ "metric": "acc"
1309
+ }
1310
+ ],
1311
+ "output_type": "multiple_choice",
1312
+ "repeats": 1,
1313
+ "should_decontaminate": true,
1314
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1315
+ "metadata": {
1316
+ "version": 1.0
1317
+ }
1318
+ },
1319
+ "blimp_matrix_question_npi_licensor_present": {
1320
+ "task": "blimp_matrix_question_npi_licensor_present",
1321
+ "group": "blimp",
1322
+ "dataset_path": "blimp",
1323
+ "dataset_name": "matrix_question_npi_licensor_present",
1324
+ "validation_split": "train",
1325
+ "doc_to_text": "",
1326
+ "doc_to_target": 0,
1327
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1328
+ "description": "",
1329
+ "target_delimiter": " ",
1330
+ "fewshot_delimiter": "\n\n",
1331
+ "num_fewshot": 0,
1332
+ "metric_list": [
1333
+ {
1334
+ "metric": "acc"
1335
+ }
1336
+ ],
1337
+ "output_type": "multiple_choice",
1338
+ "repeats": 1,
1339
+ "should_decontaminate": true,
1340
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1341
+ "metadata": {
1342
+ "version": 1.0
1343
+ }
1344
+ },
1345
+ "blimp_npi_present_1": {
1346
+ "task": "blimp_npi_present_1",
1347
+ "group": "blimp",
1348
+ "dataset_path": "blimp",
1349
+ "dataset_name": "npi_present_1",
1350
+ "validation_split": "train",
1351
+ "doc_to_text": "",
1352
+ "doc_to_target": 0,
1353
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1354
+ "description": "",
1355
+ "target_delimiter": " ",
1356
+ "fewshot_delimiter": "\n\n",
1357
+ "num_fewshot": 0,
1358
+ "metric_list": [
1359
+ {
1360
+ "metric": "acc"
1361
+ }
1362
+ ],
1363
+ "output_type": "multiple_choice",
1364
+ "repeats": 1,
1365
+ "should_decontaminate": true,
1366
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1367
+ "metadata": {
1368
+ "version": 1.0
1369
+ }
1370
+ },
1371
+ "blimp_npi_present_2": {
1372
+ "task": "blimp_npi_present_2",
1373
+ "group": "blimp",
1374
+ "dataset_path": "blimp",
1375
+ "dataset_name": "npi_present_2",
1376
+ "validation_split": "train",
1377
+ "doc_to_text": "",
1378
+ "doc_to_target": 0,
1379
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1380
+ "description": "",
1381
+ "target_delimiter": " ",
1382
+ "fewshot_delimiter": "\n\n",
1383
+ "num_fewshot": 0,
1384
+ "metric_list": [
1385
+ {
1386
+ "metric": "acc"
1387
+ }
1388
+ ],
1389
+ "output_type": "multiple_choice",
1390
+ "repeats": 1,
1391
+ "should_decontaminate": true,
1392
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1393
+ "metadata": {
1394
+ "version": 1.0
1395
+ }
1396
+ },
1397
+ "blimp_only_npi_licensor_present": {
1398
+ "task": "blimp_only_npi_licensor_present",
1399
+ "group": "blimp",
1400
+ "dataset_path": "blimp",
1401
+ "dataset_name": "only_npi_licensor_present",
1402
+ "validation_split": "train",
1403
+ "doc_to_text": "",
1404
+ "doc_to_target": 0,
1405
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1406
+ "description": "",
1407
+ "target_delimiter": " ",
1408
+ "fewshot_delimiter": "\n\n",
1409
+ "num_fewshot": 0,
1410
+ "metric_list": [
1411
+ {
1412
+ "metric": "acc"
1413
+ }
1414
+ ],
1415
+ "output_type": "multiple_choice",
1416
+ "repeats": 1,
1417
+ "should_decontaminate": true,
1418
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1419
+ "metadata": {
1420
+ "version": 1.0
1421
+ }
1422
+ },
1423
+ "blimp_only_npi_scope": {
1424
+ "task": "blimp_only_npi_scope",
1425
+ "group": "blimp",
1426
+ "dataset_path": "blimp",
1427
+ "dataset_name": "only_npi_scope",
1428
+ "validation_split": "train",
1429
+ "doc_to_text": "",
1430
+ "doc_to_target": 0,
1431
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1432
+ "description": "",
1433
+ "target_delimiter": " ",
1434
+ "fewshot_delimiter": "\n\n",
1435
+ "num_fewshot": 0,
1436
+ "metric_list": [
1437
+ {
1438
+ "metric": "acc"
1439
+ }
1440
+ ],
1441
+ "output_type": "multiple_choice",
1442
+ "repeats": 1,
1443
+ "should_decontaminate": true,
1444
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1445
+ "metadata": {
1446
+ "version": 1.0
1447
+ }
1448
+ },
1449
+ "blimp_passive_1": {
1450
+ "task": "blimp_passive_1",
1451
+ "group": "blimp",
1452
+ "dataset_path": "blimp",
1453
+ "dataset_name": "passive_1",
1454
+ "validation_split": "train",
1455
+ "doc_to_text": "",
1456
+ "doc_to_target": 0,
1457
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1458
+ "description": "",
1459
+ "target_delimiter": " ",
1460
+ "fewshot_delimiter": "\n\n",
1461
+ "num_fewshot": 0,
1462
+ "metric_list": [
1463
+ {
1464
+ "metric": "acc"
1465
+ }
1466
+ ],
1467
+ "output_type": "multiple_choice",
1468
+ "repeats": 1,
1469
+ "should_decontaminate": true,
1470
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1471
+ "metadata": {
1472
+ "version": 1.0
1473
+ }
1474
+ },
1475
+ "blimp_passive_2": {
1476
+ "task": "blimp_passive_2",
1477
+ "group": "blimp",
1478
+ "dataset_path": "blimp",
1479
+ "dataset_name": "passive_2",
1480
+ "validation_split": "train",
1481
+ "doc_to_text": "",
1482
+ "doc_to_target": 0,
1483
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1484
+ "description": "",
1485
+ "target_delimiter": " ",
1486
+ "fewshot_delimiter": "\n\n",
1487
+ "num_fewshot": 0,
1488
+ "metric_list": [
1489
+ {
1490
+ "metric": "acc"
1491
+ }
1492
+ ],
1493
+ "output_type": "multiple_choice",
1494
+ "repeats": 1,
1495
+ "should_decontaminate": true,
1496
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1497
+ "metadata": {
1498
+ "version": 1.0
1499
+ }
1500
+ },
1501
+ "blimp_principle_A_c_command": {
1502
+ "task": "blimp_principle_A_c_command",
1503
+ "group": "blimp",
1504
+ "dataset_path": "blimp",
1505
+ "dataset_name": "principle_A_c_command",
1506
+ "validation_split": "train",
1507
+ "doc_to_text": "",
1508
+ "doc_to_target": 0,
1509
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1510
+ "description": "",
1511
+ "target_delimiter": " ",
1512
+ "fewshot_delimiter": "\n\n",
1513
+ "num_fewshot": 0,
1514
+ "metric_list": [
1515
+ {
1516
+ "metric": "acc"
1517
+ }
1518
+ ],
1519
+ "output_type": "multiple_choice",
1520
+ "repeats": 1,
1521
+ "should_decontaminate": true,
1522
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1523
+ "metadata": {
1524
+ "version": 1.0
1525
+ }
1526
+ },
1527
+ "blimp_principle_A_case_1": {
1528
+ "task": "blimp_principle_A_case_1",
1529
+ "group": "blimp",
1530
+ "dataset_path": "blimp",
1531
+ "dataset_name": "principle_A_case_1",
1532
+ "validation_split": "train",
1533
+ "doc_to_text": "",
1534
+ "doc_to_target": 0,
1535
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1536
+ "description": "",
1537
+ "target_delimiter": " ",
1538
+ "fewshot_delimiter": "\n\n",
1539
+ "num_fewshot": 0,
1540
+ "metric_list": [
1541
+ {
1542
+ "metric": "acc"
1543
+ }
1544
+ ],
1545
+ "output_type": "multiple_choice",
1546
+ "repeats": 1,
1547
+ "should_decontaminate": true,
1548
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1549
+ "metadata": {
1550
+ "version": 1.0
1551
+ }
1552
+ },
1553
+ "blimp_principle_A_case_2": {
1554
+ "task": "blimp_principle_A_case_2",
1555
+ "group": "blimp",
1556
+ "dataset_path": "blimp",
1557
+ "dataset_name": "principle_A_case_2",
1558
+ "validation_split": "train",
1559
+ "doc_to_text": "",
1560
+ "doc_to_target": 0,
1561
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1562
+ "description": "",
1563
+ "target_delimiter": " ",
1564
+ "fewshot_delimiter": "\n\n",
1565
+ "num_fewshot": 0,
1566
+ "metric_list": [
1567
+ {
1568
+ "metric": "acc"
1569
+ }
1570
+ ],
1571
+ "output_type": "multiple_choice",
1572
+ "repeats": 1,
1573
+ "should_decontaminate": true,
1574
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1575
+ "metadata": {
1576
+ "version": 1.0
1577
+ }
1578
+ },
1579
+ "blimp_principle_A_domain_1": {
1580
+ "task": "blimp_principle_A_domain_1",
1581
+ "group": "blimp",
1582
+ "dataset_path": "blimp",
1583
+ "dataset_name": "principle_A_domain_1",
1584
+ "validation_split": "train",
1585
+ "doc_to_text": "",
1586
+ "doc_to_target": 0,
1587
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1588
+ "description": "",
1589
+ "target_delimiter": " ",
1590
+ "fewshot_delimiter": "\n\n",
1591
+ "num_fewshot": 0,
1592
+ "metric_list": [
1593
+ {
1594
+ "metric": "acc"
1595
+ }
1596
+ ],
1597
+ "output_type": "multiple_choice",
1598
+ "repeats": 1,
1599
+ "should_decontaminate": true,
1600
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1601
+ "metadata": {
1602
+ "version": 1.0
1603
+ }
1604
+ },
1605
+ "blimp_principle_A_domain_2": {
1606
+ "task": "blimp_principle_A_domain_2",
1607
+ "group": "blimp",
1608
+ "dataset_path": "blimp",
1609
+ "dataset_name": "principle_A_domain_2",
1610
+ "validation_split": "train",
1611
+ "doc_to_text": "",
1612
+ "doc_to_target": 0,
1613
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1614
+ "description": "",
1615
+ "target_delimiter": " ",
1616
+ "fewshot_delimiter": "\n\n",
1617
+ "num_fewshot": 0,
1618
+ "metric_list": [
1619
+ {
1620
+ "metric": "acc"
1621
+ }
1622
+ ],
1623
+ "output_type": "multiple_choice",
1624
+ "repeats": 1,
1625
+ "should_decontaminate": true,
1626
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1627
+ "metadata": {
1628
+ "version": 1.0
1629
+ }
1630
+ },
1631
+ "blimp_principle_A_domain_3": {
1632
+ "task": "blimp_principle_A_domain_3",
1633
+ "group": "blimp",
1634
+ "dataset_path": "blimp",
1635
+ "dataset_name": "principle_A_domain_3",
1636
+ "validation_split": "train",
1637
+ "doc_to_text": "",
1638
+ "doc_to_target": 0,
1639
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1640
+ "description": "",
1641
+ "target_delimiter": " ",
1642
+ "fewshot_delimiter": "\n\n",
1643
+ "num_fewshot": 0,
1644
+ "metric_list": [
1645
+ {
1646
+ "metric": "acc"
1647
+ }
1648
+ ],
1649
+ "output_type": "multiple_choice",
1650
+ "repeats": 1,
1651
+ "should_decontaminate": true,
1652
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1653
+ "metadata": {
1654
+ "version": 1.0
1655
+ }
1656
+ },
1657
+ "blimp_principle_A_reconstruction": {
1658
+ "task": "blimp_principle_A_reconstruction",
1659
+ "group": "blimp",
1660
+ "dataset_path": "blimp",
1661
+ "dataset_name": "principle_A_reconstruction",
1662
+ "validation_split": "train",
1663
+ "doc_to_text": "",
1664
+ "doc_to_target": 0,
1665
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1666
+ "description": "",
1667
+ "target_delimiter": " ",
1668
+ "fewshot_delimiter": "\n\n",
1669
+ "num_fewshot": 0,
1670
+ "metric_list": [
1671
+ {
1672
+ "metric": "acc"
1673
+ }
1674
+ ],
1675
+ "output_type": "multiple_choice",
1676
+ "repeats": 1,
1677
+ "should_decontaminate": true,
1678
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1679
+ "metadata": {
1680
+ "version": 1.0
1681
+ }
1682
+ },
1683
+ "blimp_regular_plural_subject_verb_agreement_1": {
1684
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1685
+ "group": "blimp",
1686
+ "dataset_path": "blimp",
1687
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1688
+ "validation_split": "train",
1689
+ "doc_to_text": "",
1690
+ "doc_to_target": 0,
1691
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1692
+ "description": "",
1693
+ "target_delimiter": " ",
1694
+ "fewshot_delimiter": "\n\n",
1695
+ "num_fewshot": 0,
1696
+ "metric_list": [
1697
+ {
1698
+ "metric": "acc"
1699
+ }
1700
+ ],
1701
+ "output_type": "multiple_choice",
1702
+ "repeats": 1,
1703
+ "should_decontaminate": true,
1704
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1705
+ "metadata": {
1706
+ "version": 1.0
1707
+ }
1708
+ },
1709
+ "blimp_regular_plural_subject_verb_agreement_2": {
1710
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1711
+ "group": "blimp",
1712
+ "dataset_path": "blimp",
1713
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1714
+ "validation_split": "train",
1715
+ "doc_to_text": "",
1716
+ "doc_to_target": 0,
1717
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1718
+ "description": "",
1719
+ "target_delimiter": " ",
1720
+ "fewshot_delimiter": "\n\n",
1721
+ "num_fewshot": 0,
1722
+ "metric_list": [
1723
+ {
1724
+ "metric": "acc"
1725
+ }
1726
+ ],
1727
+ "output_type": "multiple_choice",
1728
+ "repeats": 1,
1729
+ "should_decontaminate": true,
1730
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1731
+ "metadata": {
1732
+ "version": 1.0
1733
+ }
1734
+ },
1735
+ "blimp_sentential_negation_npi_licensor_present": {
1736
+ "task": "blimp_sentential_negation_npi_licensor_present",
1737
+ "group": "blimp",
1738
+ "dataset_path": "blimp",
1739
+ "dataset_name": "sentential_negation_npi_licensor_present",
1740
+ "validation_split": "train",
1741
+ "doc_to_text": "",
1742
+ "doc_to_target": 0,
1743
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1744
+ "description": "",
1745
+ "target_delimiter": " ",
1746
+ "fewshot_delimiter": "\n\n",
1747
+ "num_fewshot": 0,
1748
+ "metric_list": [
1749
+ {
1750
+ "metric": "acc"
1751
+ }
1752
+ ],
1753
+ "output_type": "multiple_choice",
1754
+ "repeats": 1,
1755
+ "should_decontaminate": true,
1756
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1757
+ "metadata": {
1758
+ "version": 1.0
1759
+ }
1760
+ },
1761
+ "blimp_sentential_negation_npi_scope": {
1762
+ "task": "blimp_sentential_negation_npi_scope",
1763
+ "group": "blimp",
1764
+ "dataset_path": "blimp",
1765
+ "dataset_name": "sentential_negation_npi_scope",
1766
+ "validation_split": "train",
1767
+ "doc_to_text": "",
1768
+ "doc_to_target": 0,
1769
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1770
+ "description": "",
1771
+ "target_delimiter": " ",
1772
+ "fewshot_delimiter": "\n\n",
1773
+ "num_fewshot": 0,
1774
+ "metric_list": [
1775
+ {
1776
+ "metric": "acc"
1777
+ }
1778
+ ],
1779
+ "output_type": "multiple_choice",
1780
+ "repeats": 1,
1781
+ "should_decontaminate": true,
1782
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1783
+ "metadata": {
1784
+ "version": 1.0
1785
+ }
1786
+ },
1787
+ "blimp_sentential_subject_island": {
1788
+ "task": "blimp_sentential_subject_island",
1789
+ "group": "blimp",
1790
+ "dataset_path": "blimp",
1791
+ "dataset_name": "sentential_subject_island",
1792
+ "validation_split": "train",
1793
+ "doc_to_text": "",
1794
+ "doc_to_target": 0,
1795
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1796
+ "description": "",
1797
+ "target_delimiter": " ",
1798
+ "fewshot_delimiter": "\n\n",
1799
+ "num_fewshot": 0,
1800
+ "metric_list": [
1801
+ {
1802
+ "metric": "acc"
1803
+ }
1804
+ ],
1805
+ "output_type": "multiple_choice",
1806
+ "repeats": 1,
1807
+ "should_decontaminate": true,
1808
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1809
+ "metadata": {
1810
+ "version": 1.0
1811
+ }
1812
+ },
1813
+ "blimp_superlative_quantifiers_1": {
1814
+ "task": "blimp_superlative_quantifiers_1",
1815
+ "group": "blimp",
1816
+ "dataset_path": "blimp",
1817
+ "dataset_name": "superlative_quantifiers_1",
1818
+ "validation_split": "train",
1819
+ "doc_to_text": "",
1820
+ "doc_to_target": 0,
1821
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1822
+ "description": "",
1823
+ "target_delimiter": " ",
1824
+ "fewshot_delimiter": "\n\n",
1825
+ "num_fewshot": 0,
1826
+ "metric_list": [
1827
+ {
1828
+ "metric": "acc"
1829
+ }
1830
+ ],
1831
+ "output_type": "multiple_choice",
1832
+ "repeats": 1,
1833
+ "should_decontaminate": true,
1834
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1835
+ "metadata": {
1836
+ "version": 1.0
1837
+ }
1838
+ },
1839
+ "blimp_superlative_quantifiers_2": {
1840
+ "task": "blimp_superlative_quantifiers_2",
1841
+ "group": "blimp",
1842
+ "dataset_path": "blimp",
1843
+ "dataset_name": "superlative_quantifiers_2",
1844
+ "validation_split": "train",
1845
+ "doc_to_text": "",
1846
+ "doc_to_target": 0,
1847
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1848
+ "description": "",
1849
+ "target_delimiter": " ",
1850
+ "fewshot_delimiter": "\n\n",
1851
+ "num_fewshot": 0,
1852
+ "metric_list": [
1853
+ {
1854
+ "metric": "acc"
1855
+ }
1856
+ ],
1857
+ "output_type": "multiple_choice",
1858
+ "repeats": 1,
1859
+ "should_decontaminate": true,
1860
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1861
+ "metadata": {
1862
+ "version": 1.0
1863
+ }
1864
+ },
1865
+ "blimp_tough_vs_raising_1": {
1866
+ "task": "blimp_tough_vs_raising_1",
1867
+ "group": "blimp",
1868
+ "dataset_path": "blimp",
1869
+ "dataset_name": "tough_vs_raising_1",
1870
+ "validation_split": "train",
1871
+ "doc_to_text": "",
1872
+ "doc_to_target": 0,
1873
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1874
+ "description": "",
1875
+ "target_delimiter": " ",
1876
+ "fewshot_delimiter": "\n\n",
1877
+ "num_fewshot": 0,
1878
+ "metric_list": [
1879
+ {
1880
+ "metric": "acc"
1881
+ }
1882
+ ],
1883
+ "output_type": "multiple_choice",
1884
+ "repeats": 1,
1885
+ "should_decontaminate": true,
1886
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1887
+ "metadata": {
1888
+ "version": 1.0
1889
+ }
1890
+ },
1891
+ "blimp_tough_vs_raising_2": {
1892
+ "task": "blimp_tough_vs_raising_2",
1893
+ "group": "blimp",
1894
+ "dataset_path": "blimp",
1895
+ "dataset_name": "tough_vs_raising_2",
1896
+ "validation_split": "train",
1897
+ "doc_to_text": "",
1898
+ "doc_to_target": 0,
1899
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1900
+ "description": "",
1901
+ "target_delimiter": " ",
1902
+ "fewshot_delimiter": "\n\n",
1903
+ "num_fewshot": 0,
1904
+ "metric_list": [
1905
+ {
1906
+ "metric": "acc"
1907
+ }
1908
+ ],
1909
+ "output_type": "multiple_choice",
1910
+ "repeats": 1,
1911
+ "should_decontaminate": true,
1912
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1913
+ "metadata": {
1914
+ "version": 1.0
1915
+ }
1916
+ },
1917
+ "blimp_transitive": {
1918
+ "task": "blimp_transitive",
1919
+ "group": "blimp",
1920
+ "dataset_path": "blimp",
1921
+ "dataset_name": "transitive",
1922
+ "validation_split": "train",
1923
+ "doc_to_text": "",
1924
+ "doc_to_target": 0,
1925
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1926
+ "description": "",
1927
+ "target_delimiter": " ",
1928
+ "fewshot_delimiter": "\n\n",
1929
+ "num_fewshot": 0,
1930
+ "metric_list": [
1931
+ {
1932
+ "metric": "acc"
1933
+ }
1934
+ ],
1935
+ "output_type": "multiple_choice",
1936
+ "repeats": 1,
1937
+ "should_decontaminate": true,
1938
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1939
+ "metadata": {
1940
+ "version": 1.0
1941
+ }
1942
+ },
1943
+ "blimp_wh_island": {
1944
+ "task": "blimp_wh_island",
1945
+ "group": "blimp",
1946
+ "dataset_path": "blimp",
1947
+ "dataset_name": "wh_island",
1948
+ "validation_split": "train",
1949
+ "doc_to_text": "",
1950
+ "doc_to_target": 0,
1951
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1952
+ "description": "",
1953
+ "target_delimiter": " ",
1954
+ "fewshot_delimiter": "\n\n",
1955
+ "num_fewshot": 0,
1956
+ "metric_list": [
1957
+ {
1958
+ "metric": "acc"
1959
+ }
1960
+ ],
1961
+ "output_type": "multiple_choice",
1962
+ "repeats": 1,
1963
+ "should_decontaminate": true,
1964
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1965
+ "metadata": {
1966
+ "version": 1.0
1967
+ }
1968
+ },
1969
+ "blimp_wh_questions_object_gap": {
1970
+ "task": "blimp_wh_questions_object_gap",
1971
+ "group": "blimp",
1972
+ "dataset_path": "blimp",
1973
+ "dataset_name": "wh_questions_object_gap",
1974
+ "validation_split": "train",
1975
+ "doc_to_text": "",
1976
+ "doc_to_target": 0,
1977
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1978
+ "description": "",
1979
+ "target_delimiter": " ",
1980
+ "fewshot_delimiter": "\n\n",
1981
+ "num_fewshot": 0,
1982
+ "metric_list": [
1983
+ {
1984
+ "metric": "acc"
1985
+ }
1986
+ ],
1987
+ "output_type": "multiple_choice",
1988
+ "repeats": 1,
1989
+ "should_decontaminate": true,
1990
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1991
+ "metadata": {
1992
+ "version": 1.0
1993
+ }
1994
+ },
1995
+ "blimp_wh_questions_subject_gap": {
1996
+ "task": "blimp_wh_questions_subject_gap",
1997
+ "group": "blimp",
1998
+ "dataset_path": "blimp",
1999
+ "dataset_name": "wh_questions_subject_gap",
2000
+ "validation_split": "train",
2001
+ "doc_to_text": "",
2002
+ "doc_to_target": 0,
2003
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2004
+ "description": "",
2005
+ "target_delimiter": " ",
2006
+ "fewshot_delimiter": "\n\n",
2007
+ "num_fewshot": 0,
2008
+ "metric_list": [
2009
+ {
2010
+ "metric": "acc"
2011
+ }
2012
+ ],
2013
+ "output_type": "multiple_choice",
2014
+ "repeats": 1,
2015
+ "should_decontaminate": true,
2016
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2017
+ "metadata": {
2018
+ "version": 1.0
2019
+ }
2020
+ },
2021
+ "blimp_wh_questions_subject_gap_long_distance": {
2022
+ "task": "blimp_wh_questions_subject_gap_long_distance",
2023
+ "group": "blimp",
2024
+ "dataset_path": "blimp",
2025
+ "dataset_name": "wh_questions_subject_gap_long_distance",
2026
+ "validation_split": "train",
2027
+ "doc_to_text": "",
2028
+ "doc_to_target": 0,
2029
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2030
+ "description": "",
2031
+ "target_delimiter": " ",
2032
+ "fewshot_delimiter": "\n\n",
2033
+ "num_fewshot": 0,
2034
+ "metric_list": [
2035
+ {
2036
+ "metric": "acc"
2037
+ }
2038
+ ],
2039
+ "output_type": "multiple_choice",
2040
+ "repeats": 1,
2041
+ "should_decontaminate": true,
2042
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2043
+ "metadata": {
2044
+ "version": 1.0
2045
+ }
2046
+ },
2047
+ "blimp_wh_vs_that_no_gap": {
2048
+ "task": "blimp_wh_vs_that_no_gap",
2049
+ "group": "blimp",
2050
+ "dataset_path": "blimp",
2051
+ "dataset_name": "wh_vs_that_no_gap",
2052
+ "validation_split": "train",
2053
+ "doc_to_text": "",
2054
+ "doc_to_target": 0,
2055
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2056
+ "description": "",
2057
+ "target_delimiter": " ",
2058
+ "fewshot_delimiter": "\n\n",
2059
+ "num_fewshot": 0,
2060
+ "metric_list": [
2061
+ {
2062
+ "metric": "acc"
2063
+ }
2064
+ ],
2065
+ "output_type": "multiple_choice",
2066
+ "repeats": 1,
2067
+ "should_decontaminate": true,
2068
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2069
+ "metadata": {
2070
+ "version": 1.0
2071
+ }
2072
+ },
2073
+ "blimp_wh_vs_that_no_gap_long_distance": {
2074
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2075
+ "group": "blimp",
2076
+ "dataset_path": "blimp",
2077
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2078
+ "validation_split": "train",
2079
+ "doc_to_text": "",
2080
+ "doc_to_target": 0,
2081
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2082
+ "description": "",
2083
+ "target_delimiter": " ",
2084
+ "fewshot_delimiter": "\n\n",
2085
+ "num_fewshot": 0,
2086
+ "metric_list": [
2087
+ {
2088
+ "metric": "acc"
2089
+ }
2090
+ ],
2091
+ "output_type": "multiple_choice",
2092
+ "repeats": 1,
2093
+ "should_decontaminate": true,
2094
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2095
+ "metadata": {
2096
+ "version": 1.0
2097
+ }
2098
+ },
2099
+ "blimp_wh_vs_that_with_gap": {
2100
+ "task": "blimp_wh_vs_that_with_gap",
2101
+ "group": "blimp",
2102
+ "dataset_path": "blimp",
2103
+ "dataset_name": "wh_vs_that_with_gap",
2104
+ "validation_split": "train",
2105
+ "doc_to_text": "",
2106
+ "doc_to_target": 0,
2107
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2108
+ "description": "",
2109
+ "target_delimiter": " ",
2110
+ "fewshot_delimiter": "\n\n",
2111
+ "num_fewshot": 0,
2112
+ "metric_list": [
2113
+ {
2114
+ "metric": "acc"
2115
+ }
2116
+ ],
2117
+ "output_type": "multiple_choice",
2118
+ "repeats": 1,
2119
+ "should_decontaminate": true,
2120
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2121
+ "metadata": {
2122
+ "version": 1.0
2123
+ }
2124
+ },
2125
+ "blimp_wh_vs_that_with_gap_long_distance": {
2126
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2127
+ "group": "blimp",
2128
+ "dataset_path": "blimp",
2129
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2130
+ "validation_split": "train",
2131
+ "doc_to_text": "",
2132
+ "doc_to_target": 0,
2133
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2134
+ "description": "",
2135
+ "target_delimiter": " ",
2136
+ "fewshot_delimiter": "\n\n",
2137
+ "num_fewshot": 0,
2138
+ "metric_list": [
2139
+ {
2140
+ "metric": "acc"
2141
+ }
2142
+ ],
2143
+ "output_type": "multiple_choice",
2144
+ "repeats": 1,
2145
+ "should_decontaminate": true,
2146
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2147
+ "metadata": {
2148
+ "version": 1.0
2149
+ }
2150
+ }
2151
+ },
2152
+ "versions": {
2153
+ "blimp_adjunct_island": 1.0,
2154
+ "blimp_anaphor_gender_agreement": 1.0,
2155
+ "blimp_anaphor_number_agreement": 1.0,
2156
+ "blimp_animate_subject_passive": 1.0,
2157
+ "blimp_animate_subject_trans": 1.0,
2158
+ "blimp_causative": 1.0,
2159
+ "blimp_complex_NP_island": 1.0,
2160
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2161
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2162
+ "blimp_determiner_noun_agreement_1": 1.0,
2163
+ "blimp_determiner_noun_agreement_2": 1.0,
2164
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2165
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2166
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2167
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2168
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2169
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2170
+ "blimp_distractor_agreement_relational_noun": 1.0,
2171
+ "blimp_distractor_agreement_relative_clause": 1.0,
2172
+ "blimp_drop_argument": 1.0,
2173
+ "blimp_ellipsis_n_bar_1": 1.0,
2174
+ "blimp_ellipsis_n_bar_2": 1.0,
2175
+ "blimp_existential_there_object_raising": 1.0,
2176
+ "blimp_existential_there_quantifiers_1": 1.0,
2177
+ "blimp_existential_there_quantifiers_2": 1.0,
2178
+ "blimp_existential_there_subject_raising": 1.0,
2179
+ "blimp_expletive_it_object_raising": 1.0,
2180
+ "blimp_inchoative": 1.0,
2181
+ "blimp_intransitive": 1.0,
2182
+ "blimp_irregular_past_participle_adjectives": 1.0,
2183
+ "blimp_irregular_past_participle_verbs": 1.0,
2184
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2185
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2186
+ "blimp_left_branch_island_echo_question": 1.0,
2187
+ "blimp_left_branch_island_simple_question": 1.0,
2188
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2189
+ "blimp_npi_present_1": 1.0,
2190
+ "blimp_npi_present_2": 1.0,
2191
+ "blimp_only_npi_licensor_present": 1.0,
2192
+ "blimp_only_npi_scope": 1.0,
2193
+ "blimp_passive_1": 1.0,
2194
+ "blimp_passive_2": 1.0,
2195
+ "blimp_principle_A_c_command": 1.0,
2196
+ "blimp_principle_A_case_1": 1.0,
2197
+ "blimp_principle_A_case_2": 1.0,
2198
+ "blimp_principle_A_domain_1": 1.0,
2199
+ "blimp_principle_A_domain_2": 1.0,
2200
+ "blimp_principle_A_domain_3": 1.0,
2201
+ "blimp_principle_A_reconstruction": 1.0,
2202
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2203
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2204
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2205
+ "blimp_sentential_negation_npi_scope": 1.0,
2206
+ "blimp_sentential_subject_island": 1.0,
2207
+ "blimp_superlative_quantifiers_1": 1.0,
2208
+ "blimp_superlative_quantifiers_2": 1.0,
2209
+ "blimp_tough_vs_raising_1": 1.0,
2210
+ "blimp_tough_vs_raising_2": 1.0,
2211
+ "blimp_transitive": 1.0,
2212
+ "blimp_wh_island": 1.0,
2213
+ "blimp_wh_questions_object_gap": 1.0,
2214
+ "blimp_wh_questions_subject_gap": 1.0,
2215
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2216
+ "blimp_wh_vs_that_no_gap": 1.0,
2217
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2218
+ "blimp_wh_vs_that_with_gap": 1.0,
2219
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2220
+ },
2221
+ "n-shot": {
2222
+ "blimp_adjunct_island": 0,
2223
+ "blimp_anaphor_gender_agreement": 0,
2224
+ "blimp_anaphor_number_agreement": 0,
2225
+ "blimp_animate_subject_passive": 0,
2226
+ "blimp_animate_subject_trans": 0,
2227
+ "blimp_causative": 0,
2228
+ "blimp_complex_NP_island": 0,
2229
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2230
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2231
+ "blimp_determiner_noun_agreement_1": 0,
2232
+ "blimp_determiner_noun_agreement_2": 0,
2233
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2234
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2235
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2236
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2237
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2238
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2239
+ "blimp_distractor_agreement_relational_noun": 0,
2240
+ "blimp_distractor_agreement_relative_clause": 0,
2241
+ "blimp_drop_argument": 0,
2242
+ "blimp_ellipsis_n_bar_1": 0,
2243
+ "blimp_ellipsis_n_bar_2": 0,
2244
+ "blimp_existential_there_object_raising": 0,
2245
+ "blimp_existential_there_quantifiers_1": 0,
2246
+ "blimp_existential_there_quantifiers_2": 0,
2247
+ "blimp_existential_there_subject_raising": 0,
2248
+ "blimp_expletive_it_object_raising": 0,
2249
+ "blimp_inchoative": 0,
2250
+ "blimp_intransitive": 0,
2251
+ "blimp_irregular_past_participle_adjectives": 0,
2252
+ "blimp_irregular_past_participle_verbs": 0,
2253
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2254
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2255
+ "blimp_left_branch_island_echo_question": 0,
2256
+ "blimp_left_branch_island_simple_question": 0,
2257
+ "blimp_matrix_question_npi_licensor_present": 0,
2258
+ "blimp_npi_present_1": 0,
2259
+ "blimp_npi_present_2": 0,
2260
+ "blimp_only_npi_licensor_present": 0,
2261
+ "blimp_only_npi_scope": 0,
2262
+ "blimp_passive_1": 0,
2263
+ "blimp_passive_2": 0,
2264
+ "blimp_principle_A_c_command": 0,
2265
+ "blimp_principle_A_case_1": 0,
2266
+ "blimp_principle_A_case_2": 0,
2267
+ "blimp_principle_A_domain_1": 0,
2268
+ "blimp_principle_A_domain_2": 0,
2269
+ "blimp_principle_A_domain_3": 0,
2270
+ "blimp_principle_A_reconstruction": 0,
2271
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2272
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2273
+ "blimp_sentential_negation_npi_licensor_present": 0,
2274
+ "blimp_sentential_negation_npi_scope": 0,
2275
+ "blimp_sentential_subject_island": 0,
2276
+ "blimp_superlative_quantifiers_1": 0,
2277
+ "blimp_superlative_quantifiers_2": 0,
2278
+ "blimp_tough_vs_raising_1": 0,
2279
+ "blimp_tough_vs_raising_2": 0,
2280
+ "blimp_transitive": 0,
2281
+ "blimp_wh_island": 0,
2282
+ "blimp_wh_questions_object_gap": 0,
2283
+ "blimp_wh_questions_subject_gap": 0,
2284
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2285
+ "blimp_wh_vs_that_no_gap": 0,
2286
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2287
+ "blimp_wh_vs_that_with_gap": 0,
2288
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2289
+ },
2290
+ "n-samples": {
2291
+ "blimp_wh_vs_that_with_gap_long_distance": {
2292
+ "original": 1000,
2293
+ "effective": 1000
2294
+ },
2295
+ "blimp_wh_vs_that_with_gap": {
2296
+ "original": 1000,
2297
+ "effective": 1000
2298
+ },
2299
+ "blimp_wh_vs_that_no_gap_long_distance": {
2300
+ "original": 1000,
2301
+ "effective": 1000
2302
+ },
2303
+ "blimp_wh_vs_that_no_gap": {
2304
+ "original": 1000,
2305
+ "effective": 1000
2306
+ },
2307
+ "blimp_wh_questions_subject_gap_long_distance": {
2308
+ "original": 1000,
2309
+ "effective": 1000
2310
+ },
2311
+ "blimp_wh_questions_subject_gap": {
2312
+ "original": 1000,
2313
+ "effective": 1000
2314
+ },
2315
+ "blimp_wh_questions_object_gap": {
2316
+ "original": 1000,
2317
+ "effective": 1000
2318
+ },
2319
+ "blimp_wh_island": {
2320
+ "original": 1000,
2321
+ "effective": 1000
2322
+ },
2323
+ "blimp_transitive": {
2324
+ "original": 1000,
2325
+ "effective": 1000
2326
+ },
2327
+ "blimp_tough_vs_raising_2": {
2328
+ "original": 1000,
2329
+ "effective": 1000
2330
+ },
2331
+ "blimp_tough_vs_raising_1": {
2332
+ "original": 1000,
2333
+ "effective": 1000
2334
+ },
2335
+ "blimp_superlative_quantifiers_2": {
2336
+ "original": 1000,
2337
+ "effective": 1000
2338
+ },
2339
+ "blimp_superlative_quantifiers_1": {
2340
+ "original": 1000,
2341
+ "effective": 1000
2342
+ },
2343
+ "blimp_sentential_subject_island": {
2344
+ "original": 1000,
2345
+ "effective": 1000
2346
+ },
2347
+ "blimp_sentential_negation_npi_scope": {
2348
+ "original": 1000,
2349
+ "effective": 1000
2350
+ },
2351
+ "blimp_sentential_negation_npi_licensor_present": {
2352
+ "original": 1000,
2353
+ "effective": 1000
2354
+ },
2355
+ "blimp_regular_plural_subject_verb_agreement_2": {
2356
+ "original": 1000,
2357
+ "effective": 1000
2358
+ },
2359
+ "blimp_regular_plural_subject_verb_agreement_1": {
2360
+ "original": 1000,
2361
+ "effective": 1000
2362
+ },
2363
+ "blimp_principle_A_reconstruction": {
2364
+ "original": 1000,
2365
+ "effective": 1000
2366
+ },
2367
+ "blimp_principle_A_domain_3": {
2368
+ "original": 1000,
2369
+ "effective": 1000
2370
+ },
2371
+ "blimp_principle_A_domain_2": {
2372
+ "original": 1000,
2373
+ "effective": 1000
2374
+ },
2375
+ "blimp_principle_A_domain_1": {
2376
+ "original": 1000,
2377
+ "effective": 1000
2378
+ },
2379
+ "blimp_principle_A_case_2": {
2380
+ "original": 1000,
2381
+ "effective": 1000
2382
+ },
2383
+ "blimp_principle_A_case_1": {
2384
+ "original": 1000,
2385
+ "effective": 1000
2386
+ },
2387
+ "blimp_principle_A_c_command": {
2388
+ "original": 1000,
2389
+ "effective": 1000
2390
+ },
2391
+ "blimp_passive_2": {
2392
+ "original": 1000,
2393
+ "effective": 1000
2394
+ },
2395
+ "blimp_passive_1": {
2396
+ "original": 1000,
2397
+ "effective": 1000
2398
+ },
2399
+ "blimp_only_npi_scope": {
2400
+ "original": 1000,
2401
+ "effective": 1000
2402
+ },
2403
+ "blimp_only_npi_licensor_present": {
2404
+ "original": 1000,
2405
+ "effective": 1000
2406
+ },
2407
+ "blimp_npi_present_2": {
2408
+ "original": 1000,
2409
+ "effective": 1000
2410
+ },
2411
+ "blimp_npi_present_1": {
2412
+ "original": 1000,
2413
+ "effective": 1000
2414
+ },
2415
+ "blimp_matrix_question_npi_licensor_present": {
2416
+ "original": 1000,
2417
+ "effective": 1000
2418
+ },
2419
+ "blimp_left_branch_island_simple_question": {
2420
+ "original": 1000,
2421
+ "effective": 1000
2422
+ },
2423
+ "blimp_left_branch_island_echo_question": {
2424
+ "original": 1000,
2425
+ "effective": 1000
2426
+ },
2427
+ "blimp_irregular_plural_subject_verb_agreement_2": {
2428
+ "original": 1000,
2429
+ "effective": 1000
2430
+ },
2431
+ "blimp_irregular_plural_subject_verb_agreement_1": {
2432
+ "original": 1000,
2433
+ "effective": 1000
2434
+ },
2435
+ "blimp_irregular_past_participle_verbs": {
2436
+ "original": 1000,
2437
+ "effective": 1000
2438
+ },
2439
+ "blimp_irregular_past_participle_adjectives": {
2440
+ "original": 1000,
2441
+ "effective": 1000
2442
+ },
2443
+ "blimp_intransitive": {
2444
+ "original": 1000,
2445
+ "effective": 1000
2446
+ },
2447
+ "blimp_inchoative": {
2448
+ "original": 1000,
2449
+ "effective": 1000
2450
+ },
2451
+ "blimp_expletive_it_object_raising": {
2452
+ "original": 1000,
2453
+ "effective": 1000
2454
+ },
2455
+ "blimp_existential_there_subject_raising": {
2456
+ "original": 1000,
2457
+ "effective": 1000
2458
+ },
2459
+ "blimp_existential_there_quantifiers_2": {
2460
+ "original": 1000,
2461
+ "effective": 1000
2462
+ },
2463
+ "blimp_existential_there_quantifiers_1": {
2464
+ "original": 1000,
2465
+ "effective": 1000
2466
+ },
2467
+ "blimp_existential_there_object_raising": {
2468
+ "original": 1000,
2469
+ "effective": 1000
2470
+ },
2471
+ "blimp_ellipsis_n_bar_2": {
2472
+ "original": 1000,
2473
+ "effective": 1000
2474
+ },
2475
+ "blimp_ellipsis_n_bar_1": {
2476
+ "original": 1000,
2477
+ "effective": 1000
2478
+ },
2479
+ "blimp_drop_argument": {
2480
+ "original": 1000,
2481
+ "effective": 1000
2482
+ },
2483
+ "blimp_distractor_agreement_relative_clause": {
2484
+ "original": 1000,
2485
+ "effective": 1000
2486
+ },
2487
+ "blimp_distractor_agreement_relational_noun": {
2488
+ "original": 1000,
2489
+ "effective": 1000
2490
+ },
2491
+ "blimp_determiner_noun_agreement_with_adjective_1": {
2492
+ "original": 1000,
2493
+ "effective": 1000
2494
+ },
2495
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
2496
+ "original": 1000,
2497
+ "effective": 1000
2498
+ },
2499
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
2500
+ "original": 1000,
2501
+ "effective": 1000
2502
+ },
2503
+ "blimp_determiner_noun_agreement_with_adj_2": {
2504
+ "original": 1000,
2505
+ "effective": 1000
2506
+ },
2507
+ "blimp_determiner_noun_agreement_irregular_2": {
2508
+ "original": 1000,
2509
+ "effective": 1000
2510
+ },
2511
+ "blimp_determiner_noun_agreement_irregular_1": {
2512
+ "original": 1000,
2513
+ "effective": 1000
2514
+ },
2515
+ "blimp_determiner_noun_agreement_2": {
2516
+ "original": 1000,
2517
+ "effective": 1000
2518
+ },
2519
+ "blimp_determiner_noun_agreement_1": {
2520
+ "original": 1000,
2521
+ "effective": 1000
2522
+ },
2523
+ "blimp_coordinate_structure_constraint_object_extraction": {
2524
+ "original": 1000,
2525
+ "effective": 1000
2526
+ },
2527
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
2528
+ "original": 1000,
2529
+ "effective": 1000
2530
+ },
2531
+ "blimp_complex_NP_island": {
2532
+ "original": 1000,
2533
+ "effective": 1000
2534
+ },
2535
+ "blimp_causative": {
2536
+ "original": 1000,
2537
+ "effective": 1000
2538
+ },
2539
+ "blimp_animate_subject_trans": {
2540
+ "original": 1000,
2541
+ "effective": 1000
2542
+ },
2543
+ "blimp_animate_subject_passive": {
2544
+ "original": 1000,
2545
+ "effective": 1000
2546
+ },
2547
+ "blimp_anaphor_number_agreement": {
2548
+ "original": 1000,
2549
+ "effective": 1000
2550
+ },
2551
+ "blimp_anaphor_gender_agreement": {
2552
+ "original": 1000,
2553
+ "effective": 1000
2554
+ },
2555
+ "blimp_adjunct_island": {
2556
+ "original": 1000,
2557
+ "effective": 1000
2558
+ }
2559
+ },
2560
+ "config": {
2561
+ "model": "hf",
2562
+ "model_args": "pretrained=EleutherAI/pythia-14m-seed1,revision=step46000",
2563
+ "model_num_parameters": 14067712,
2564
+ "model_dtype": "torch.float16",
2565
+ "model_revision": "step46000",
2566
+ "model_sha": "ea504dda5ef5ad3fe7d86b864c69515f12b56015",
2567
+ "batch_size": "1024",
2568
+ "batch_sizes": [],
2569
+ "device": "cuda",
2570
+ "use_cache": null,
2571
+ "limit": null,
2572
+ "bootstrap_iters": 100000,
2573
+ "gen_kwargs": null,
2574
+ "random_seed": 0,
2575
+ "numpy_seed": 1234,
2576
+ "torch_seed": 1234,
2577
+ "fewshot_seed": 1234
2578
+ },
2579
+ "git_hash": "51a7ca9",
2580
+ "date": 1724072196.011075,
2581
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: NVIDIA GeForce RTX 2080 Ti\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 32\nOn-line CPU(s) list: 0-31\nThread(s) per core: 1\nCore(s) per socket: 32\nSocket(s): 1\nNUMA node(s): 2\nVendor ID: AuthenticAMD\nCPU family: 23\nModel: 49\nModel name: AMD EPYC 7502P 32-Core Processor\nStepping: 0\nCPU MHz: 2500.000\nCPU max MHz: 2500.0000\nCPU min MHz: 1500.0000\nBogoMIPS: 5000.08\nVirtualization: AMD-V\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 512K\nL3 cache: 16384K\nNUMA node0 CPU(s): 0-15\nNUMA node1 CPU(s): 16-31\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc art rep_good nopl nonstop_tsc extd_apicid aperfmperf eagerfpu pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_l2 cpb cat_l3 cdp_l3 hw_pstate sme ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif umip overflow_recov succor smca\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
2582
+ "transformers_version": "4.40.2",
2583
+ "upper_git_hash": null,
2584
+ "task_hashes": {},
2585
+ "model_source": "hf",
2586
+ "model_name": "EleutherAI/pythia-14m-seed1",
2587
+ "model_name_sanitized": "EleutherAI__pythia-14m-seed1",
2588
+ "start_time": 1526906.28702774,
2589
+ "end_time": 1527231.980429371,
2590
+ "total_evaluation_time_seconds": "325.69340163096786"
2591
+ }
pythia-14m-seed1/step47000/EleutherAI__pythia-14m-seed1/results_2024-08-19T06-07-21.455136.json ADDED
@@ -0,0 +1,2591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp_wh_vs_that_with_gap_long_distance": {
4
+ "acc,none": 0.094,
5
+ "acc_stderr,none": 0.009233052000787672,
6
+ "alias": "blimp_wh_vs_that_with_gap_long_distance"
7
+ },
8
+ "blimp_wh_vs_that_with_gap": {
9
+ "acc,none": 0.229,
10
+ "acc_stderr,none": 0.013294199326613684,
11
+ "alias": "blimp_wh_vs_that_with_gap"
12
+ },
13
+ "blimp_wh_vs_that_no_gap_long_distance": {
14
+ "acc,none": 0.956,
15
+ "acc_stderr,none": 0.006488921798427387,
16
+ "alias": "blimp_wh_vs_that_no_gap_long_distance"
17
+ },
18
+ "blimp_wh_vs_that_no_gap": {
19
+ "acc,none": 0.93,
20
+ "acc_stderr,none": 0.008072494358323525,
21
+ "alias": "blimp_wh_vs_that_no_gap"
22
+ },
23
+ "blimp_wh_questions_subject_gap_long_distance": {
24
+ "acc,none": 0.925,
25
+ "acc_stderr,none": 0.008333333333333333,
26
+ "alias": "blimp_wh_questions_subject_gap_long_distance"
27
+ },
28
+ "blimp_wh_questions_subject_gap": {
29
+ "acc,none": 0.859,
30
+ "acc_stderr,none": 0.01101091459599248,
31
+ "alias": "blimp_wh_questions_subject_gap"
32
+ },
33
+ "blimp_wh_questions_object_gap": {
34
+ "acc,none": 0.415,
35
+ "acc_stderr,none": 0.015589035185604594,
36
+ "alias": "blimp_wh_questions_object_gap"
37
+ },
38
+ "blimp_wh_island": {
39
+ "acc,none": 0.676,
40
+ "acc_stderr,none": 0.014806864733738986,
41
+ "alias": "blimp_wh_island"
42
+ },
43
+ "blimp_transitive": {
44
+ "acc,none": 0.818,
45
+ "acc_stderr,none": 0.0122075806376622,
46
+ "alias": "blimp_transitive"
47
+ },
48
+ "blimp_tough_vs_raising_2": {
49
+ "acc,none": 0.738,
50
+ "acc_stderr,none": 0.013912208651021217,
51
+ "alias": "blimp_tough_vs_raising_2"
52
+ },
53
+ "blimp_tough_vs_raising_1": {
54
+ "acc,none": 0.373,
55
+ "acc_stderr,none": 0.015300493622922927,
56
+ "alias": "blimp_tough_vs_raising_1"
57
+ },
58
+ "blimp_superlative_quantifiers_2": {
59
+ "acc,none": 0.231,
60
+ "acc_stderr,none": 0.013334797216936478,
61
+ "alias": "blimp_superlative_quantifiers_2"
62
+ },
63
+ "blimp_superlative_quantifiers_1": {
64
+ "acc,none": 0.163,
65
+ "acc_stderr,none": 0.011686212712746913,
66
+ "alias": "blimp_superlative_quantifiers_1"
67
+ },
68
+ "blimp_sentential_subject_island": {
69
+ "acc,none": 0.392,
70
+ "acc_stderr,none": 0.015445859463771338,
71
+ "alias": "blimp_sentential_subject_island"
72
+ },
73
+ "blimp_sentential_negation_npi_scope": {
74
+ "acc,none": 0.436,
75
+ "acc_stderr,none": 0.015689173023144022,
76
+ "alias": "blimp_sentential_negation_npi_scope"
77
+ },
78
+ "blimp_sentential_negation_npi_licensor_present": {
79
+ "acc,none": 0.987,
80
+ "acc_stderr,none": 0.003583830889403662,
81
+ "alias": "blimp_sentential_negation_npi_licensor_present"
82
+ },
83
+ "blimp_regular_plural_subject_verb_agreement_2": {
84
+ "acc,none": 0.817,
85
+ "acc_stderr,none": 0.012233587399477745,
86
+ "alias": "blimp_regular_plural_subject_verb_agreement_2"
87
+ },
88
+ "blimp_regular_plural_subject_verb_agreement_1": {
89
+ "acc,none": 0.858,
90
+ "acc_stderr,none": 0.011043457699378305,
91
+ "alias": "blimp_regular_plural_subject_verb_agreement_1"
92
+ },
93
+ "blimp_principle_A_reconstruction": {
94
+ "acc,none": 0.469,
95
+ "acc_stderr,none": 0.015788865959538965,
96
+ "alias": "blimp_principle_A_reconstruction"
97
+ },
98
+ "blimp_principle_A_domain_3": {
99
+ "acc,none": 0.634,
100
+ "acc_stderr,none": 0.0152406127264056,
101
+ "alias": "blimp_principle_A_domain_3"
102
+ },
103
+ "blimp_principle_A_domain_2": {
104
+ "acc,none": 0.632,
105
+ "acc_stderr,none": 0.015258073561521743,
106
+ "alias": "blimp_principle_A_domain_2"
107
+ },
108
+ "blimp_principle_A_domain_1": {
109
+ "acc,none": 0.917,
110
+ "acc_stderr,none": 0.008728527206074756,
111
+ "alias": "blimp_principle_A_domain_1"
112
+ },
113
+ "blimp_principle_A_case_2": {
114
+ "acc,none": 0.786,
115
+ "acc_stderr,none": 0.012975838021968847,
116
+ "alias": "blimp_principle_A_case_2"
117
+ },
118
+ "blimp_principle_A_case_1": {
119
+ "acc,none": 1.0,
120
+ "acc_stderr,none": 0.0,
121
+ "alias": "blimp_principle_A_case_1"
122
+ },
123
+ "blimp_principle_A_c_command": {
124
+ "acc,none": 0.518,
125
+ "acc_stderr,none": 0.01580904569940659,
126
+ "alias": "blimp_principle_A_c_command"
127
+ },
128
+ "blimp_passive_2": {
129
+ "acc,none": 0.879,
130
+ "acc_stderr,none": 0.010318210380946179,
131
+ "alias": "blimp_passive_2"
132
+ },
133
+ "blimp_passive_1": {
134
+ "acc,none": 0.875,
135
+ "acc_stderr,none": 0.010463483381956722,
136
+ "alias": "blimp_passive_1"
137
+ },
138
+ "blimp_only_npi_scope": {
139
+ "acc,none": 0.749,
140
+ "acc_stderr,none": 0.013718133516888775,
141
+ "alias": "blimp_only_npi_scope"
142
+ },
143
+ "blimp_only_npi_licensor_present": {
144
+ "acc,none": 0.979,
145
+ "acc_stderr,none": 0.0045364721513065165,
146
+ "alias": "blimp_only_npi_licensor_present"
147
+ },
148
+ "blimp_npi_present_2": {
149
+ "acc,none": 0.402,
150
+ "acc_stderr,none": 0.015512467135714959,
151
+ "alias": "blimp_npi_present_2"
152
+ },
153
+ "blimp_npi_present_1": {
154
+ "acc,none": 0.374,
155
+ "acc_stderr,none": 0.015308767369006505,
156
+ "alias": "blimp_npi_present_1"
157
+ },
158
+ "blimp_matrix_question_npi_licensor_present": {
159
+ "acc,none": 0.143,
160
+ "acc_stderr,none": 0.011075814808567074,
161
+ "alias": "blimp_matrix_question_npi_licensor_present"
162
+ },
163
+ "blimp_left_branch_island_simple_question": {
164
+ "acc,none": 0.339,
165
+ "acc_stderr,none": 0.014976758771620224,
166
+ "alias": "blimp_left_branch_island_simple_question"
167
+ },
168
+ "blimp_left_branch_island_echo_question": {
169
+ "acc,none": 0.443,
170
+ "acc_stderr,none": 0.015716169953204184,
171
+ "alias": "blimp_left_branch_island_echo_question"
172
+ },
173
+ "blimp_irregular_plural_subject_verb_agreement_2": {
174
+ "acc,none": 0.845,
175
+ "acc_stderr,none": 0.011450157470799522,
176
+ "alias": "blimp_irregular_plural_subject_verb_agreement_2"
177
+ },
178
+ "blimp_irregular_plural_subject_verb_agreement_1": {
179
+ "acc,none": 0.758,
180
+ "acc_stderr,none": 0.013550631705556003,
181
+ "alias": "blimp_irregular_plural_subject_verb_agreement_1"
182
+ },
183
+ "blimp_irregular_past_participle_verbs": {
184
+ "acc,none": 0.88,
185
+ "acc_stderr,none": 0.010281328012747462,
186
+ "alias": "blimp_irregular_past_participle_verbs"
187
+ },
188
+ "blimp_irregular_past_participle_adjectives": {
189
+ "acc,none": 0.993,
190
+ "acc_stderr,none": 0.002637794146243781,
191
+ "alias": "blimp_irregular_past_participle_adjectives"
192
+ },
193
+ "blimp_intransitive": {
194
+ "acc,none": 0.627,
195
+ "acc_stderr,none": 0.015300493622922927,
196
+ "alias": "blimp_intransitive"
197
+ },
198
+ "blimp_inchoative": {
199
+ "acc,none": 0.499,
200
+ "acc_stderr,none": 0.015819268290576817,
201
+ "alias": "blimp_inchoative"
202
+ },
203
+ "blimp_expletive_it_object_raising": {
204
+ "acc,none": 0.732,
205
+ "acc_stderr,none": 0.014013292702729519,
206
+ "alias": "blimp_expletive_it_object_raising"
207
+ },
208
+ "blimp_existential_there_subject_raising": {
209
+ "acc,none": 0.781,
210
+ "acc_stderr,none": 0.013084731950262116,
211
+ "alias": "blimp_existential_there_subject_raising"
212
+ },
213
+ "blimp_existential_there_quantifiers_2": {
214
+ "acc,none": 0.289,
215
+ "acc_stderr,none": 0.014341711358296287,
216
+ "alias": "blimp_existential_there_quantifiers_2"
217
+ },
218
+ "blimp_existential_there_quantifiers_1": {
219
+ "acc,none": 0.955,
220
+ "acc_stderr,none": 0.006558812241406063,
221
+ "alias": "blimp_existential_there_quantifiers_1"
222
+ },
223
+ "blimp_existential_there_object_raising": {
224
+ "acc,none": 0.807,
225
+ "acc_stderr,none": 0.012486268734370044,
226
+ "alias": "blimp_existential_there_object_raising"
227
+ },
228
+ "blimp_ellipsis_n_bar_2": {
229
+ "acc,none": 0.81,
230
+ "acc_stderr,none": 0.012411851354816254,
231
+ "alias": "blimp_ellipsis_n_bar_2"
232
+ },
233
+ "blimp_ellipsis_n_bar_1": {
234
+ "acc,none": 0.556,
235
+ "acc_stderr,none": 0.015719768163402127,
236
+ "alias": "blimp_ellipsis_n_bar_1"
237
+ },
238
+ "blimp_drop_argument": {
239
+ "acc,none": 0.75,
240
+ "acc_stderr,none": 0.013699915608779773,
241
+ "alias": "blimp_drop_argument"
242
+ },
243
+ "blimp_distractor_agreement_relative_clause": {
244
+ "acc,none": 0.256,
245
+ "acc_stderr,none": 0.013807775152234145,
246
+ "alias": "blimp_distractor_agreement_relative_clause"
247
+ },
248
+ "blimp_distractor_agreement_relational_noun": {
249
+ "acc,none": 0.34,
250
+ "acc_stderr,none": 0.014987482264363972,
251
+ "alias": "blimp_distractor_agreement_relational_noun"
252
+ },
253
+ "blimp_determiner_noun_agreement_with_adjective_1": {
254
+ "acc,none": 0.887,
255
+ "acc_stderr,none": 0.010016552866696773,
256
+ "alias": "blimp_determiner_noun_agreement_with_adjective_1"
257
+ },
258
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
259
+ "acc,none": 0.824,
260
+ "acc_stderr,none": 0.012048616898597498,
261
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_2"
262
+ },
263
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
264
+ "acc,none": 0.762,
265
+ "acc_stderr,none": 0.013473586661967093,
266
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_1"
267
+ },
268
+ "blimp_determiner_noun_agreement_with_adj_2": {
269
+ "acc,none": 0.851,
270
+ "acc_stderr,none": 0.011266140684632246,
271
+ "alias": "blimp_determiner_noun_agreement_with_adj_2"
272
+ },
273
+ "blimp_determiner_noun_agreement_irregular_2": {
274
+ "acc,none": 0.849,
275
+ "acc_stderr,none": 0.011328165223341657,
276
+ "alias": "blimp_determiner_noun_agreement_irregular_2"
277
+ },
278
+ "blimp_determiner_noun_agreement_irregular_1": {
279
+ "acc,none": 0.769,
280
+ "acc_stderr,none": 0.013334797216936478,
281
+ "alias": "blimp_determiner_noun_agreement_irregular_1"
282
+ },
283
+ "blimp_determiner_noun_agreement_2": {
284
+ "acc,none": 0.938,
285
+ "acc_stderr,none": 0.007629823996280269,
286
+ "alias": "blimp_determiner_noun_agreement_2"
287
+ },
288
+ "blimp_determiner_noun_agreement_1": {
289
+ "acc,none": 0.923,
290
+ "acc_stderr,none": 0.00843458014024066,
291
+ "alias": "blimp_determiner_noun_agreement_1"
292
+ },
293
+ "blimp_coordinate_structure_constraint_object_extraction": {
294
+ "acc,none": 0.509,
295
+ "acc_stderr,none": 0.0158167369950053,
296
+ "alias": "blimp_coordinate_structure_constraint_object_extraction"
297
+ },
298
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
299
+ "acc,none": 0.395,
300
+ "acc_stderr,none": 0.015466551464829328,
301
+ "alias": "blimp_coordinate_structure_constraint_complex_left_branch"
302
+ },
303
+ "blimp_complex_NP_island": {
304
+ "acc,none": 0.367,
305
+ "acc_stderr,none": 0.015249378464171699,
306
+ "alias": "blimp_complex_NP_island"
307
+ },
308
+ "blimp_causative": {
309
+ "acc,none": 0.602,
310
+ "acc_stderr,none": 0.015486634102859016,
311
+ "alias": "blimp_causative"
312
+ },
313
+ "blimp_animate_subject_trans": {
314
+ "acc,none": 0.846,
315
+ "acc_stderr,none": 0.011419913065098805,
316
+ "alias": "blimp_animate_subject_trans"
317
+ },
318
+ "blimp_animate_subject_passive": {
319
+ "acc,none": 0.749,
320
+ "acc_stderr,none": 0.013718133516888775,
321
+ "alias": "blimp_animate_subject_passive"
322
+ },
323
+ "blimp_anaphor_number_agreement": {
324
+ "acc,none": 0.954,
325
+ "acc_stderr,none": 0.006627814717380679,
326
+ "alias": "blimp_anaphor_number_agreement"
327
+ },
328
+ "blimp_anaphor_gender_agreement": {
329
+ "acc,none": 0.783,
330
+ "acc_stderr,none": 0.013041513757270706,
331
+ "alias": "blimp_anaphor_gender_agreement"
332
+ },
333
+ "blimp_adjunct_island": {
334
+ "acc,none": 0.777,
335
+ "acc_stderr,none": 0.013169830843425608,
336
+ "alias": "blimp_adjunct_island"
337
+ }
338
+ },
339
+ "group_subtasks": {
340
+ "blimp_adjunct_island": [],
341
+ "blimp_anaphor_gender_agreement": [],
342
+ "blimp_anaphor_number_agreement": [],
343
+ "blimp_animate_subject_passive": [],
344
+ "blimp_animate_subject_trans": [],
345
+ "blimp_causative": [],
346
+ "blimp_complex_NP_island": [],
347
+ "blimp_coordinate_structure_constraint_complex_left_branch": [],
348
+ "blimp_coordinate_structure_constraint_object_extraction": [],
349
+ "blimp_determiner_noun_agreement_1": [],
350
+ "blimp_determiner_noun_agreement_2": [],
351
+ "blimp_determiner_noun_agreement_irregular_1": [],
352
+ "blimp_determiner_noun_agreement_irregular_2": [],
353
+ "blimp_determiner_noun_agreement_with_adj_2": [],
354
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": [],
355
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": [],
356
+ "blimp_determiner_noun_agreement_with_adjective_1": [],
357
+ "blimp_distractor_agreement_relational_noun": [],
358
+ "blimp_distractor_agreement_relative_clause": [],
359
+ "blimp_drop_argument": [],
360
+ "blimp_ellipsis_n_bar_1": [],
361
+ "blimp_ellipsis_n_bar_2": [],
362
+ "blimp_existential_there_object_raising": [],
363
+ "blimp_existential_there_quantifiers_1": [],
364
+ "blimp_existential_there_quantifiers_2": [],
365
+ "blimp_existential_there_subject_raising": [],
366
+ "blimp_expletive_it_object_raising": [],
367
+ "blimp_inchoative": [],
368
+ "blimp_intransitive": [],
369
+ "blimp_irregular_past_participle_adjectives": [],
370
+ "blimp_irregular_past_participle_verbs": [],
371
+ "blimp_irregular_plural_subject_verb_agreement_1": [],
372
+ "blimp_irregular_plural_subject_verb_agreement_2": [],
373
+ "blimp_left_branch_island_echo_question": [],
374
+ "blimp_left_branch_island_simple_question": [],
375
+ "blimp_matrix_question_npi_licensor_present": [],
376
+ "blimp_npi_present_1": [],
377
+ "blimp_npi_present_2": [],
378
+ "blimp_only_npi_licensor_present": [],
379
+ "blimp_only_npi_scope": [],
380
+ "blimp_passive_1": [],
381
+ "blimp_passive_2": [],
382
+ "blimp_principle_A_c_command": [],
383
+ "blimp_principle_A_case_1": [],
384
+ "blimp_principle_A_case_2": [],
385
+ "blimp_principle_A_domain_1": [],
386
+ "blimp_principle_A_domain_2": [],
387
+ "blimp_principle_A_domain_3": [],
388
+ "blimp_principle_A_reconstruction": [],
389
+ "blimp_regular_plural_subject_verb_agreement_1": [],
390
+ "blimp_regular_plural_subject_verb_agreement_2": [],
391
+ "blimp_sentential_negation_npi_licensor_present": [],
392
+ "blimp_sentential_negation_npi_scope": [],
393
+ "blimp_sentential_subject_island": [],
394
+ "blimp_superlative_quantifiers_1": [],
395
+ "blimp_superlative_quantifiers_2": [],
396
+ "blimp_tough_vs_raising_1": [],
397
+ "blimp_tough_vs_raising_2": [],
398
+ "blimp_transitive": [],
399
+ "blimp_wh_island": [],
400
+ "blimp_wh_questions_object_gap": [],
401
+ "blimp_wh_questions_subject_gap": [],
402
+ "blimp_wh_questions_subject_gap_long_distance": [],
403
+ "blimp_wh_vs_that_no_gap": [],
404
+ "blimp_wh_vs_that_no_gap_long_distance": [],
405
+ "blimp_wh_vs_that_with_gap": [],
406
+ "blimp_wh_vs_that_with_gap_long_distance": []
407
+ },
408
+ "configs": {
409
+ "blimp_adjunct_island": {
410
+ "task": "blimp_adjunct_island",
411
+ "group": "blimp",
412
+ "dataset_path": "blimp",
413
+ "dataset_name": "adjunct_island",
414
+ "validation_split": "train",
415
+ "doc_to_text": "",
416
+ "doc_to_target": 0,
417
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
418
+ "description": "",
419
+ "target_delimiter": " ",
420
+ "fewshot_delimiter": "\n\n",
421
+ "num_fewshot": 0,
422
+ "metric_list": [
423
+ {
424
+ "metric": "acc"
425
+ }
426
+ ],
427
+ "output_type": "multiple_choice",
428
+ "repeats": 1,
429
+ "should_decontaminate": true,
430
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
431
+ "metadata": {
432
+ "version": 1.0
433
+ }
434
+ },
435
+ "blimp_anaphor_gender_agreement": {
436
+ "task": "blimp_anaphor_gender_agreement",
437
+ "group": "blimp",
438
+ "dataset_path": "blimp",
439
+ "dataset_name": "anaphor_gender_agreement",
440
+ "validation_split": "train",
441
+ "doc_to_text": "",
442
+ "doc_to_target": 0,
443
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
444
+ "description": "",
445
+ "target_delimiter": " ",
446
+ "fewshot_delimiter": "\n\n",
447
+ "num_fewshot": 0,
448
+ "metric_list": [
449
+ {
450
+ "metric": "acc"
451
+ }
452
+ ],
453
+ "output_type": "multiple_choice",
454
+ "repeats": 1,
455
+ "should_decontaminate": true,
456
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
457
+ "metadata": {
458
+ "version": 1.0
459
+ }
460
+ },
461
+ "blimp_anaphor_number_agreement": {
462
+ "task": "blimp_anaphor_number_agreement",
463
+ "group": "blimp",
464
+ "dataset_path": "blimp",
465
+ "dataset_name": "anaphor_number_agreement",
466
+ "validation_split": "train",
467
+ "doc_to_text": "",
468
+ "doc_to_target": 0,
469
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
470
+ "description": "",
471
+ "target_delimiter": " ",
472
+ "fewshot_delimiter": "\n\n",
473
+ "num_fewshot": 0,
474
+ "metric_list": [
475
+ {
476
+ "metric": "acc"
477
+ }
478
+ ],
479
+ "output_type": "multiple_choice",
480
+ "repeats": 1,
481
+ "should_decontaminate": true,
482
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
483
+ "metadata": {
484
+ "version": 1.0
485
+ }
486
+ },
487
+ "blimp_animate_subject_passive": {
488
+ "task": "blimp_animate_subject_passive",
489
+ "group": "blimp",
490
+ "dataset_path": "blimp",
491
+ "dataset_name": "animate_subject_passive",
492
+ "validation_split": "train",
493
+ "doc_to_text": "",
494
+ "doc_to_target": 0,
495
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
496
+ "description": "",
497
+ "target_delimiter": " ",
498
+ "fewshot_delimiter": "\n\n",
499
+ "num_fewshot": 0,
500
+ "metric_list": [
501
+ {
502
+ "metric": "acc"
503
+ }
504
+ ],
505
+ "output_type": "multiple_choice",
506
+ "repeats": 1,
507
+ "should_decontaminate": true,
508
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
509
+ "metadata": {
510
+ "version": 1.0
511
+ }
512
+ },
513
+ "blimp_animate_subject_trans": {
514
+ "task": "blimp_animate_subject_trans",
515
+ "group": "blimp",
516
+ "dataset_path": "blimp",
517
+ "dataset_name": "animate_subject_trans",
518
+ "validation_split": "train",
519
+ "doc_to_text": "",
520
+ "doc_to_target": 0,
521
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
522
+ "description": "",
523
+ "target_delimiter": " ",
524
+ "fewshot_delimiter": "\n\n",
525
+ "num_fewshot": 0,
526
+ "metric_list": [
527
+ {
528
+ "metric": "acc"
529
+ }
530
+ ],
531
+ "output_type": "multiple_choice",
532
+ "repeats": 1,
533
+ "should_decontaminate": true,
534
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
535
+ "metadata": {
536
+ "version": 1.0
537
+ }
538
+ },
539
+ "blimp_causative": {
540
+ "task": "blimp_causative",
541
+ "group": "blimp",
542
+ "dataset_path": "blimp",
543
+ "dataset_name": "causative",
544
+ "validation_split": "train",
545
+ "doc_to_text": "",
546
+ "doc_to_target": 0,
547
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
548
+ "description": "",
549
+ "target_delimiter": " ",
550
+ "fewshot_delimiter": "\n\n",
551
+ "num_fewshot": 0,
552
+ "metric_list": [
553
+ {
554
+ "metric": "acc"
555
+ }
556
+ ],
557
+ "output_type": "multiple_choice",
558
+ "repeats": 1,
559
+ "should_decontaminate": true,
560
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
561
+ "metadata": {
562
+ "version": 1.0
563
+ }
564
+ },
565
+ "blimp_complex_NP_island": {
566
+ "task": "blimp_complex_NP_island",
567
+ "group": "blimp",
568
+ "dataset_path": "blimp",
569
+ "dataset_name": "complex_NP_island",
570
+ "validation_split": "train",
571
+ "doc_to_text": "",
572
+ "doc_to_target": 0,
573
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
574
+ "description": "",
575
+ "target_delimiter": " ",
576
+ "fewshot_delimiter": "\n\n",
577
+ "num_fewshot": 0,
578
+ "metric_list": [
579
+ {
580
+ "metric": "acc"
581
+ }
582
+ ],
583
+ "output_type": "multiple_choice",
584
+ "repeats": 1,
585
+ "should_decontaminate": true,
586
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
587
+ "metadata": {
588
+ "version": 1.0
589
+ }
590
+ },
591
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
592
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
593
+ "group": "blimp",
594
+ "dataset_path": "blimp",
595
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
596
+ "validation_split": "train",
597
+ "doc_to_text": "",
598
+ "doc_to_target": 0,
599
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
600
+ "description": "",
601
+ "target_delimiter": " ",
602
+ "fewshot_delimiter": "\n\n",
603
+ "num_fewshot": 0,
604
+ "metric_list": [
605
+ {
606
+ "metric": "acc"
607
+ }
608
+ ],
609
+ "output_type": "multiple_choice",
610
+ "repeats": 1,
611
+ "should_decontaminate": true,
612
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
613
+ "metadata": {
614
+ "version": 1.0
615
+ }
616
+ },
617
+ "blimp_coordinate_structure_constraint_object_extraction": {
618
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
619
+ "group": "blimp",
620
+ "dataset_path": "blimp",
621
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
622
+ "validation_split": "train",
623
+ "doc_to_text": "",
624
+ "doc_to_target": 0,
625
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
626
+ "description": "",
627
+ "target_delimiter": " ",
628
+ "fewshot_delimiter": "\n\n",
629
+ "num_fewshot": 0,
630
+ "metric_list": [
631
+ {
632
+ "metric": "acc"
633
+ }
634
+ ],
635
+ "output_type": "multiple_choice",
636
+ "repeats": 1,
637
+ "should_decontaminate": true,
638
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
639
+ "metadata": {
640
+ "version": 1.0
641
+ }
642
+ },
643
+ "blimp_determiner_noun_agreement_1": {
644
+ "task": "blimp_determiner_noun_agreement_1",
645
+ "group": "blimp",
646
+ "dataset_path": "blimp",
647
+ "dataset_name": "determiner_noun_agreement_1",
648
+ "validation_split": "train",
649
+ "doc_to_text": "",
650
+ "doc_to_target": 0,
651
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
652
+ "description": "",
653
+ "target_delimiter": " ",
654
+ "fewshot_delimiter": "\n\n",
655
+ "num_fewshot": 0,
656
+ "metric_list": [
657
+ {
658
+ "metric": "acc"
659
+ }
660
+ ],
661
+ "output_type": "multiple_choice",
662
+ "repeats": 1,
663
+ "should_decontaminate": true,
664
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
665
+ "metadata": {
666
+ "version": 1.0
667
+ }
668
+ },
669
+ "blimp_determiner_noun_agreement_2": {
670
+ "task": "blimp_determiner_noun_agreement_2",
671
+ "group": "blimp",
672
+ "dataset_path": "blimp",
673
+ "dataset_name": "determiner_noun_agreement_2",
674
+ "validation_split": "train",
675
+ "doc_to_text": "",
676
+ "doc_to_target": 0,
677
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
678
+ "description": "",
679
+ "target_delimiter": " ",
680
+ "fewshot_delimiter": "\n\n",
681
+ "num_fewshot": 0,
682
+ "metric_list": [
683
+ {
684
+ "metric": "acc"
685
+ }
686
+ ],
687
+ "output_type": "multiple_choice",
688
+ "repeats": 1,
689
+ "should_decontaminate": true,
690
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
691
+ "metadata": {
692
+ "version": 1.0
693
+ }
694
+ },
695
+ "blimp_determiner_noun_agreement_irregular_1": {
696
+ "task": "blimp_determiner_noun_agreement_irregular_1",
697
+ "group": "blimp",
698
+ "dataset_path": "blimp",
699
+ "dataset_name": "determiner_noun_agreement_irregular_1",
700
+ "validation_split": "train",
701
+ "doc_to_text": "",
702
+ "doc_to_target": 0,
703
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
704
+ "description": "",
705
+ "target_delimiter": " ",
706
+ "fewshot_delimiter": "\n\n",
707
+ "num_fewshot": 0,
708
+ "metric_list": [
709
+ {
710
+ "metric": "acc"
711
+ }
712
+ ],
713
+ "output_type": "multiple_choice",
714
+ "repeats": 1,
715
+ "should_decontaminate": true,
716
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
717
+ "metadata": {
718
+ "version": 1.0
719
+ }
720
+ },
721
+ "blimp_determiner_noun_agreement_irregular_2": {
722
+ "task": "blimp_determiner_noun_agreement_irregular_2",
723
+ "group": "blimp",
724
+ "dataset_path": "blimp",
725
+ "dataset_name": "determiner_noun_agreement_irregular_2",
726
+ "validation_split": "train",
727
+ "doc_to_text": "",
728
+ "doc_to_target": 0,
729
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
730
+ "description": "",
731
+ "target_delimiter": " ",
732
+ "fewshot_delimiter": "\n\n",
733
+ "num_fewshot": 0,
734
+ "metric_list": [
735
+ {
736
+ "metric": "acc"
737
+ }
738
+ ],
739
+ "output_type": "multiple_choice",
740
+ "repeats": 1,
741
+ "should_decontaminate": true,
742
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
743
+ "metadata": {
744
+ "version": 1.0
745
+ }
746
+ },
747
+ "blimp_determiner_noun_agreement_with_adj_2": {
748
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
749
+ "group": "blimp",
750
+ "dataset_path": "blimp",
751
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
752
+ "validation_split": "train",
753
+ "doc_to_text": "",
754
+ "doc_to_target": 0,
755
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
756
+ "description": "",
757
+ "target_delimiter": " ",
758
+ "fewshot_delimiter": "\n\n",
759
+ "num_fewshot": 0,
760
+ "metric_list": [
761
+ {
762
+ "metric": "acc"
763
+ }
764
+ ],
765
+ "output_type": "multiple_choice",
766
+ "repeats": 1,
767
+ "should_decontaminate": true,
768
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
769
+ "metadata": {
770
+ "version": 1.0
771
+ }
772
+ },
773
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
774
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
775
+ "group": "blimp",
776
+ "dataset_path": "blimp",
777
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
778
+ "validation_split": "train",
779
+ "doc_to_text": "",
780
+ "doc_to_target": 0,
781
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
782
+ "description": "",
783
+ "target_delimiter": " ",
784
+ "fewshot_delimiter": "\n\n",
785
+ "num_fewshot": 0,
786
+ "metric_list": [
787
+ {
788
+ "metric": "acc"
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
795
+ "metadata": {
796
+ "version": 1.0
797
+ }
798
+ },
799
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
800
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
801
+ "group": "blimp",
802
+ "dataset_path": "blimp",
803
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
804
+ "validation_split": "train",
805
+ "doc_to_text": "",
806
+ "doc_to_target": 0,
807
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
808
+ "description": "",
809
+ "target_delimiter": " ",
810
+ "fewshot_delimiter": "\n\n",
811
+ "num_fewshot": 0,
812
+ "metric_list": [
813
+ {
814
+ "metric": "acc"
815
+ }
816
+ ],
817
+ "output_type": "multiple_choice",
818
+ "repeats": 1,
819
+ "should_decontaminate": true,
820
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
821
+ "metadata": {
822
+ "version": 1.0
823
+ }
824
+ },
825
+ "blimp_determiner_noun_agreement_with_adjective_1": {
826
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
827
+ "group": "blimp",
828
+ "dataset_path": "blimp",
829
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
830
+ "validation_split": "train",
831
+ "doc_to_text": "",
832
+ "doc_to_target": 0,
833
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
834
+ "description": "",
835
+ "target_delimiter": " ",
836
+ "fewshot_delimiter": "\n\n",
837
+ "num_fewshot": 0,
838
+ "metric_list": [
839
+ {
840
+ "metric": "acc"
841
+ }
842
+ ],
843
+ "output_type": "multiple_choice",
844
+ "repeats": 1,
845
+ "should_decontaminate": true,
846
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
847
+ "metadata": {
848
+ "version": 1.0
849
+ }
850
+ },
851
+ "blimp_distractor_agreement_relational_noun": {
852
+ "task": "blimp_distractor_agreement_relational_noun",
853
+ "group": "blimp",
854
+ "dataset_path": "blimp",
855
+ "dataset_name": "distractor_agreement_relational_noun",
856
+ "validation_split": "train",
857
+ "doc_to_text": "",
858
+ "doc_to_target": 0,
859
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
860
+ "description": "",
861
+ "target_delimiter": " ",
862
+ "fewshot_delimiter": "\n\n",
863
+ "num_fewshot": 0,
864
+ "metric_list": [
865
+ {
866
+ "metric": "acc"
867
+ }
868
+ ],
869
+ "output_type": "multiple_choice",
870
+ "repeats": 1,
871
+ "should_decontaminate": true,
872
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
873
+ "metadata": {
874
+ "version": 1.0
875
+ }
876
+ },
877
+ "blimp_distractor_agreement_relative_clause": {
878
+ "task": "blimp_distractor_agreement_relative_clause",
879
+ "group": "blimp",
880
+ "dataset_path": "blimp",
881
+ "dataset_name": "distractor_agreement_relative_clause",
882
+ "validation_split": "train",
883
+ "doc_to_text": "",
884
+ "doc_to_target": 0,
885
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
886
+ "description": "",
887
+ "target_delimiter": " ",
888
+ "fewshot_delimiter": "\n\n",
889
+ "num_fewshot": 0,
890
+ "metric_list": [
891
+ {
892
+ "metric": "acc"
893
+ }
894
+ ],
895
+ "output_type": "multiple_choice",
896
+ "repeats": 1,
897
+ "should_decontaminate": true,
898
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
899
+ "metadata": {
900
+ "version": 1.0
901
+ }
902
+ },
903
+ "blimp_drop_argument": {
904
+ "task": "blimp_drop_argument",
905
+ "group": "blimp",
906
+ "dataset_path": "blimp",
907
+ "dataset_name": "drop_argument",
908
+ "validation_split": "train",
909
+ "doc_to_text": "",
910
+ "doc_to_target": 0,
911
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
912
+ "description": "",
913
+ "target_delimiter": " ",
914
+ "fewshot_delimiter": "\n\n",
915
+ "num_fewshot": 0,
916
+ "metric_list": [
917
+ {
918
+ "metric": "acc"
919
+ }
920
+ ],
921
+ "output_type": "multiple_choice",
922
+ "repeats": 1,
923
+ "should_decontaminate": true,
924
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
925
+ "metadata": {
926
+ "version": 1.0
927
+ }
928
+ },
929
+ "blimp_ellipsis_n_bar_1": {
930
+ "task": "blimp_ellipsis_n_bar_1",
931
+ "group": "blimp",
932
+ "dataset_path": "blimp",
933
+ "dataset_name": "ellipsis_n_bar_1",
934
+ "validation_split": "train",
935
+ "doc_to_text": "",
936
+ "doc_to_target": 0,
937
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
938
+ "description": "",
939
+ "target_delimiter": " ",
940
+ "fewshot_delimiter": "\n\n",
941
+ "num_fewshot": 0,
942
+ "metric_list": [
943
+ {
944
+ "metric": "acc"
945
+ }
946
+ ],
947
+ "output_type": "multiple_choice",
948
+ "repeats": 1,
949
+ "should_decontaminate": true,
950
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
951
+ "metadata": {
952
+ "version": 1.0
953
+ }
954
+ },
955
+ "blimp_ellipsis_n_bar_2": {
956
+ "task": "blimp_ellipsis_n_bar_2",
957
+ "group": "blimp",
958
+ "dataset_path": "blimp",
959
+ "dataset_name": "ellipsis_n_bar_2",
960
+ "validation_split": "train",
961
+ "doc_to_text": "",
962
+ "doc_to_target": 0,
963
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
964
+ "description": "",
965
+ "target_delimiter": " ",
966
+ "fewshot_delimiter": "\n\n",
967
+ "num_fewshot": 0,
968
+ "metric_list": [
969
+ {
970
+ "metric": "acc"
971
+ }
972
+ ],
973
+ "output_type": "multiple_choice",
974
+ "repeats": 1,
975
+ "should_decontaminate": true,
976
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
977
+ "metadata": {
978
+ "version": 1.0
979
+ }
980
+ },
981
+ "blimp_existential_there_object_raising": {
982
+ "task": "blimp_existential_there_object_raising",
983
+ "group": "blimp",
984
+ "dataset_path": "blimp",
985
+ "dataset_name": "existential_there_object_raising",
986
+ "validation_split": "train",
987
+ "doc_to_text": "",
988
+ "doc_to_target": 0,
989
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
990
+ "description": "",
991
+ "target_delimiter": " ",
992
+ "fewshot_delimiter": "\n\n",
993
+ "num_fewshot": 0,
994
+ "metric_list": [
995
+ {
996
+ "metric": "acc"
997
+ }
998
+ ],
999
+ "output_type": "multiple_choice",
1000
+ "repeats": 1,
1001
+ "should_decontaminate": true,
1002
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1003
+ "metadata": {
1004
+ "version": 1.0
1005
+ }
1006
+ },
1007
+ "blimp_existential_there_quantifiers_1": {
1008
+ "task": "blimp_existential_there_quantifiers_1",
1009
+ "group": "blimp",
1010
+ "dataset_path": "blimp",
1011
+ "dataset_name": "existential_there_quantifiers_1",
1012
+ "validation_split": "train",
1013
+ "doc_to_text": "",
1014
+ "doc_to_target": 0,
1015
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1016
+ "description": "",
1017
+ "target_delimiter": " ",
1018
+ "fewshot_delimiter": "\n\n",
1019
+ "num_fewshot": 0,
1020
+ "metric_list": [
1021
+ {
1022
+ "metric": "acc"
1023
+ }
1024
+ ],
1025
+ "output_type": "multiple_choice",
1026
+ "repeats": 1,
1027
+ "should_decontaminate": true,
1028
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1029
+ "metadata": {
1030
+ "version": 1.0
1031
+ }
1032
+ },
1033
+ "blimp_existential_there_quantifiers_2": {
1034
+ "task": "blimp_existential_there_quantifiers_2",
1035
+ "group": "blimp",
1036
+ "dataset_path": "blimp",
1037
+ "dataset_name": "existential_there_quantifiers_2",
1038
+ "validation_split": "train",
1039
+ "doc_to_text": "",
1040
+ "doc_to_target": 0,
1041
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1042
+ "description": "",
1043
+ "target_delimiter": " ",
1044
+ "fewshot_delimiter": "\n\n",
1045
+ "num_fewshot": 0,
1046
+ "metric_list": [
1047
+ {
1048
+ "metric": "acc"
1049
+ }
1050
+ ],
1051
+ "output_type": "multiple_choice",
1052
+ "repeats": 1,
1053
+ "should_decontaminate": true,
1054
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1055
+ "metadata": {
1056
+ "version": 1.0
1057
+ }
1058
+ },
1059
+ "blimp_existential_there_subject_raising": {
1060
+ "task": "blimp_existential_there_subject_raising",
1061
+ "group": "blimp",
1062
+ "dataset_path": "blimp",
1063
+ "dataset_name": "existential_there_subject_raising",
1064
+ "validation_split": "train",
1065
+ "doc_to_text": "",
1066
+ "doc_to_target": 0,
1067
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1068
+ "description": "",
1069
+ "target_delimiter": " ",
1070
+ "fewshot_delimiter": "\n\n",
1071
+ "num_fewshot": 0,
1072
+ "metric_list": [
1073
+ {
1074
+ "metric": "acc"
1075
+ }
1076
+ ],
1077
+ "output_type": "multiple_choice",
1078
+ "repeats": 1,
1079
+ "should_decontaminate": true,
1080
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1081
+ "metadata": {
1082
+ "version": 1.0
1083
+ }
1084
+ },
1085
+ "blimp_expletive_it_object_raising": {
1086
+ "task": "blimp_expletive_it_object_raising",
1087
+ "group": "blimp",
1088
+ "dataset_path": "blimp",
1089
+ "dataset_name": "expletive_it_object_raising",
1090
+ "validation_split": "train",
1091
+ "doc_to_text": "",
1092
+ "doc_to_target": 0,
1093
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1094
+ "description": "",
1095
+ "target_delimiter": " ",
1096
+ "fewshot_delimiter": "\n\n",
1097
+ "num_fewshot": 0,
1098
+ "metric_list": [
1099
+ {
1100
+ "metric": "acc"
1101
+ }
1102
+ ],
1103
+ "output_type": "multiple_choice",
1104
+ "repeats": 1,
1105
+ "should_decontaminate": true,
1106
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1107
+ "metadata": {
1108
+ "version": 1.0
1109
+ }
1110
+ },
1111
+ "blimp_inchoative": {
1112
+ "task": "blimp_inchoative",
1113
+ "group": "blimp",
1114
+ "dataset_path": "blimp",
1115
+ "dataset_name": "inchoative",
1116
+ "validation_split": "train",
1117
+ "doc_to_text": "",
1118
+ "doc_to_target": 0,
1119
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1120
+ "description": "",
1121
+ "target_delimiter": " ",
1122
+ "fewshot_delimiter": "\n\n",
1123
+ "num_fewshot": 0,
1124
+ "metric_list": [
1125
+ {
1126
+ "metric": "acc"
1127
+ }
1128
+ ],
1129
+ "output_type": "multiple_choice",
1130
+ "repeats": 1,
1131
+ "should_decontaminate": true,
1132
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1133
+ "metadata": {
1134
+ "version": 1.0
1135
+ }
1136
+ },
1137
+ "blimp_intransitive": {
1138
+ "task": "blimp_intransitive",
1139
+ "group": "blimp",
1140
+ "dataset_path": "blimp",
1141
+ "dataset_name": "intransitive",
1142
+ "validation_split": "train",
1143
+ "doc_to_text": "",
1144
+ "doc_to_target": 0,
1145
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1146
+ "description": "",
1147
+ "target_delimiter": " ",
1148
+ "fewshot_delimiter": "\n\n",
1149
+ "num_fewshot": 0,
1150
+ "metric_list": [
1151
+ {
1152
+ "metric": "acc"
1153
+ }
1154
+ ],
1155
+ "output_type": "multiple_choice",
1156
+ "repeats": 1,
1157
+ "should_decontaminate": true,
1158
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1159
+ "metadata": {
1160
+ "version": 1.0
1161
+ }
1162
+ },
1163
+ "blimp_irregular_past_participle_adjectives": {
1164
+ "task": "blimp_irregular_past_participle_adjectives",
1165
+ "group": "blimp",
1166
+ "dataset_path": "blimp",
1167
+ "dataset_name": "irregular_past_participle_adjectives",
1168
+ "validation_split": "train",
1169
+ "doc_to_text": "",
1170
+ "doc_to_target": 0,
1171
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1172
+ "description": "",
1173
+ "target_delimiter": " ",
1174
+ "fewshot_delimiter": "\n\n",
1175
+ "num_fewshot": 0,
1176
+ "metric_list": [
1177
+ {
1178
+ "metric": "acc"
1179
+ }
1180
+ ],
1181
+ "output_type": "multiple_choice",
1182
+ "repeats": 1,
1183
+ "should_decontaminate": true,
1184
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1185
+ "metadata": {
1186
+ "version": 1.0
1187
+ }
1188
+ },
1189
+ "blimp_irregular_past_participle_verbs": {
1190
+ "task": "blimp_irregular_past_participle_verbs",
1191
+ "group": "blimp",
1192
+ "dataset_path": "blimp",
1193
+ "dataset_name": "irregular_past_participle_verbs",
1194
+ "validation_split": "train",
1195
+ "doc_to_text": "",
1196
+ "doc_to_target": 0,
1197
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1198
+ "description": "",
1199
+ "target_delimiter": " ",
1200
+ "fewshot_delimiter": "\n\n",
1201
+ "num_fewshot": 0,
1202
+ "metric_list": [
1203
+ {
1204
+ "metric": "acc"
1205
+ }
1206
+ ],
1207
+ "output_type": "multiple_choice",
1208
+ "repeats": 1,
1209
+ "should_decontaminate": true,
1210
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1211
+ "metadata": {
1212
+ "version": 1.0
1213
+ }
1214
+ },
1215
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1216
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1217
+ "group": "blimp",
1218
+ "dataset_path": "blimp",
1219
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1220
+ "validation_split": "train",
1221
+ "doc_to_text": "",
1222
+ "doc_to_target": 0,
1223
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1224
+ "description": "",
1225
+ "target_delimiter": " ",
1226
+ "fewshot_delimiter": "\n\n",
1227
+ "num_fewshot": 0,
1228
+ "metric_list": [
1229
+ {
1230
+ "metric": "acc"
1231
+ }
1232
+ ],
1233
+ "output_type": "multiple_choice",
1234
+ "repeats": 1,
1235
+ "should_decontaminate": true,
1236
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1237
+ "metadata": {
1238
+ "version": 1.0
1239
+ }
1240
+ },
1241
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1242
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1243
+ "group": "blimp",
1244
+ "dataset_path": "blimp",
1245
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1246
+ "validation_split": "train",
1247
+ "doc_to_text": "",
1248
+ "doc_to_target": 0,
1249
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1250
+ "description": "",
1251
+ "target_delimiter": " ",
1252
+ "fewshot_delimiter": "\n\n",
1253
+ "num_fewshot": 0,
1254
+ "metric_list": [
1255
+ {
1256
+ "metric": "acc"
1257
+ }
1258
+ ],
1259
+ "output_type": "multiple_choice",
1260
+ "repeats": 1,
1261
+ "should_decontaminate": true,
1262
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1263
+ "metadata": {
1264
+ "version": 1.0
1265
+ }
1266
+ },
1267
+ "blimp_left_branch_island_echo_question": {
1268
+ "task": "blimp_left_branch_island_echo_question",
1269
+ "group": "blimp",
1270
+ "dataset_path": "blimp",
1271
+ "dataset_name": "left_branch_island_echo_question",
1272
+ "validation_split": "train",
1273
+ "doc_to_text": "",
1274
+ "doc_to_target": 0,
1275
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1276
+ "description": "",
1277
+ "target_delimiter": " ",
1278
+ "fewshot_delimiter": "\n\n",
1279
+ "num_fewshot": 0,
1280
+ "metric_list": [
1281
+ {
1282
+ "metric": "acc"
1283
+ }
1284
+ ],
1285
+ "output_type": "multiple_choice",
1286
+ "repeats": 1,
1287
+ "should_decontaminate": true,
1288
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1289
+ "metadata": {
1290
+ "version": 1.0
1291
+ }
1292
+ },
1293
+ "blimp_left_branch_island_simple_question": {
1294
+ "task": "blimp_left_branch_island_simple_question",
1295
+ "group": "blimp",
1296
+ "dataset_path": "blimp",
1297
+ "dataset_name": "left_branch_island_simple_question",
1298
+ "validation_split": "train",
1299
+ "doc_to_text": "",
1300
+ "doc_to_target": 0,
1301
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1302
+ "description": "",
1303
+ "target_delimiter": " ",
1304
+ "fewshot_delimiter": "\n\n",
1305
+ "num_fewshot": 0,
1306
+ "metric_list": [
1307
+ {
1308
+ "metric": "acc"
1309
+ }
1310
+ ],
1311
+ "output_type": "multiple_choice",
1312
+ "repeats": 1,
1313
+ "should_decontaminate": true,
1314
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1315
+ "metadata": {
1316
+ "version": 1.0
1317
+ }
1318
+ },
1319
+ "blimp_matrix_question_npi_licensor_present": {
1320
+ "task": "blimp_matrix_question_npi_licensor_present",
1321
+ "group": "blimp",
1322
+ "dataset_path": "blimp",
1323
+ "dataset_name": "matrix_question_npi_licensor_present",
1324
+ "validation_split": "train",
1325
+ "doc_to_text": "",
1326
+ "doc_to_target": 0,
1327
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1328
+ "description": "",
1329
+ "target_delimiter": " ",
1330
+ "fewshot_delimiter": "\n\n",
1331
+ "num_fewshot": 0,
1332
+ "metric_list": [
1333
+ {
1334
+ "metric": "acc"
1335
+ }
1336
+ ],
1337
+ "output_type": "multiple_choice",
1338
+ "repeats": 1,
1339
+ "should_decontaminate": true,
1340
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1341
+ "metadata": {
1342
+ "version": 1.0
1343
+ }
1344
+ },
1345
+ "blimp_npi_present_1": {
1346
+ "task": "blimp_npi_present_1",
1347
+ "group": "blimp",
1348
+ "dataset_path": "blimp",
1349
+ "dataset_name": "npi_present_1",
1350
+ "validation_split": "train",
1351
+ "doc_to_text": "",
1352
+ "doc_to_target": 0,
1353
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1354
+ "description": "",
1355
+ "target_delimiter": " ",
1356
+ "fewshot_delimiter": "\n\n",
1357
+ "num_fewshot": 0,
1358
+ "metric_list": [
1359
+ {
1360
+ "metric": "acc"
1361
+ }
1362
+ ],
1363
+ "output_type": "multiple_choice",
1364
+ "repeats": 1,
1365
+ "should_decontaminate": true,
1366
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1367
+ "metadata": {
1368
+ "version": 1.0
1369
+ }
1370
+ },
1371
+ "blimp_npi_present_2": {
1372
+ "task": "blimp_npi_present_2",
1373
+ "group": "blimp",
1374
+ "dataset_path": "blimp",
1375
+ "dataset_name": "npi_present_2",
1376
+ "validation_split": "train",
1377
+ "doc_to_text": "",
1378
+ "doc_to_target": 0,
1379
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1380
+ "description": "",
1381
+ "target_delimiter": " ",
1382
+ "fewshot_delimiter": "\n\n",
1383
+ "num_fewshot": 0,
1384
+ "metric_list": [
1385
+ {
1386
+ "metric": "acc"
1387
+ }
1388
+ ],
1389
+ "output_type": "multiple_choice",
1390
+ "repeats": 1,
1391
+ "should_decontaminate": true,
1392
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1393
+ "metadata": {
1394
+ "version": 1.0
1395
+ }
1396
+ },
1397
+ "blimp_only_npi_licensor_present": {
1398
+ "task": "blimp_only_npi_licensor_present",
1399
+ "group": "blimp",
1400
+ "dataset_path": "blimp",
1401
+ "dataset_name": "only_npi_licensor_present",
1402
+ "validation_split": "train",
1403
+ "doc_to_text": "",
1404
+ "doc_to_target": 0,
1405
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1406
+ "description": "",
1407
+ "target_delimiter": " ",
1408
+ "fewshot_delimiter": "\n\n",
1409
+ "num_fewshot": 0,
1410
+ "metric_list": [
1411
+ {
1412
+ "metric": "acc"
1413
+ }
1414
+ ],
1415
+ "output_type": "multiple_choice",
1416
+ "repeats": 1,
1417
+ "should_decontaminate": true,
1418
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1419
+ "metadata": {
1420
+ "version": 1.0
1421
+ }
1422
+ },
1423
+ "blimp_only_npi_scope": {
1424
+ "task": "blimp_only_npi_scope",
1425
+ "group": "blimp",
1426
+ "dataset_path": "blimp",
1427
+ "dataset_name": "only_npi_scope",
1428
+ "validation_split": "train",
1429
+ "doc_to_text": "",
1430
+ "doc_to_target": 0,
1431
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1432
+ "description": "",
1433
+ "target_delimiter": " ",
1434
+ "fewshot_delimiter": "\n\n",
1435
+ "num_fewshot": 0,
1436
+ "metric_list": [
1437
+ {
1438
+ "metric": "acc"
1439
+ }
1440
+ ],
1441
+ "output_type": "multiple_choice",
1442
+ "repeats": 1,
1443
+ "should_decontaminate": true,
1444
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1445
+ "metadata": {
1446
+ "version": 1.0
1447
+ }
1448
+ },
1449
+ "blimp_passive_1": {
1450
+ "task": "blimp_passive_1",
1451
+ "group": "blimp",
1452
+ "dataset_path": "blimp",
1453
+ "dataset_name": "passive_1",
1454
+ "validation_split": "train",
1455
+ "doc_to_text": "",
1456
+ "doc_to_target": 0,
1457
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1458
+ "description": "",
1459
+ "target_delimiter": " ",
1460
+ "fewshot_delimiter": "\n\n",
1461
+ "num_fewshot": 0,
1462
+ "metric_list": [
1463
+ {
1464
+ "metric": "acc"
1465
+ }
1466
+ ],
1467
+ "output_type": "multiple_choice",
1468
+ "repeats": 1,
1469
+ "should_decontaminate": true,
1470
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1471
+ "metadata": {
1472
+ "version": 1.0
1473
+ }
1474
+ },
1475
+ "blimp_passive_2": {
1476
+ "task": "blimp_passive_2",
1477
+ "group": "blimp",
1478
+ "dataset_path": "blimp",
1479
+ "dataset_name": "passive_2",
1480
+ "validation_split": "train",
1481
+ "doc_to_text": "",
1482
+ "doc_to_target": 0,
1483
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1484
+ "description": "",
1485
+ "target_delimiter": " ",
1486
+ "fewshot_delimiter": "\n\n",
1487
+ "num_fewshot": 0,
1488
+ "metric_list": [
1489
+ {
1490
+ "metric": "acc"
1491
+ }
1492
+ ],
1493
+ "output_type": "multiple_choice",
1494
+ "repeats": 1,
1495
+ "should_decontaminate": true,
1496
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1497
+ "metadata": {
1498
+ "version": 1.0
1499
+ }
1500
+ },
1501
+ "blimp_principle_A_c_command": {
1502
+ "task": "blimp_principle_A_c_command",
1503
+ "group": "blimp",
1504
+ "dataset_path": "blimp",
1505
+ "dataset_name": "principle_A_c_command",
1506
+ "validation_split": "train",
1507
+ "doc_to_text": "",
1508
+ "doc_to_target": 0,
1509
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1510
+ "description": "",
1511
+ "target_delimiter": " ",
1512
+ "fewshot_delimiter": "\n\n",
1513
+ "num_fewshot": 0,
1514
+ "metric_list": [
1515
+ {
1516
+ "metric": "acc"
1517
+ }
1518
+ ],
1519
+ "output_type": "multiple_choice",
1520
+ "repeats": 1,
1521
+ "should_decontaminate": true,
1522
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1523
+ "metadata": {
1524
+ "version": 1.0
1525
+ }
1526
+ },
1527
+ "blimp_principle_A_case_1": {
1528
+ "task": "blimp_principle_A_case_1",
1529
+ "group": "blimp",
1530
+ "dataset_path": "blimp",
1531
+ "dataset_name": "principle_A_case_1",
1532
+ "validation_split": "train",
1533
+ "doc_to_text": "",
1534
+ "doc_to_target": 0,
1535
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1536
+ "description": "",
1537
+ "target_delimiter": " ",
1538
+ "fewshot_delimiter": "\n\n",
1539
+ "num_fewshot": 0,
1540
+ "metric_list": [
1541
+ {
1542
+ "metric": "acc"
1543
+ }
1544
+ ],
1545
+ "output_type": "multiple_choice",
1546
+ "repeats": 1,
1547
+ "should_decontaminate": true,
1548
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1549
+ "metadata": {
1550
+ "version": 1.0
1551
+ }
1552
+ },
1553
+ "blimp_principle_A_case_2": {
1554
+ "task": "blimp_principle_A_case_2",
1555
+ "group": "blimp",
1556
+ "dataset_path": "blimp",
1557
+ "dataset_name": "principle_A_case_2",
1558
+ "validation_split": "train",
1559
+ "doc_to_text": "",
1560
+ "doc_to_target": 0,
1561
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1562
+ "description": "",
1563
+ "target_delimiter": " ",
1564
+ "fewshot_delimiter": "\n\n",
1565
+ "num_fewshot": 0,
1566
+ "metric_list": [
1567
+ {
1568
+ "metric": "acc"
1569
+ }
1570
+ ],
1571
+ "output_type": "multiple_choice",
1572
+ "repeats": 1,
1573
+ "should_decontaminate": true,
1574
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1575
+ "metadata": {
1576
+ "version": 1.0
1577
+ }
1578
+ },
1579
+ "blimp_principle_A_domain_1": {
1580
+ "task": "blimp_principle_A_domain_1",
1581
+ "group": "blimp",
1582
+ "dataset_path": "blimp",
1583
+ "dataset_name": "principle_A_domain_1",
1584
+ "validation_split": "train",
1585
+ "doc_to_text": "",
1586
+ "doc_to_target": 0,
1587
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1588
+ "description": "",
1589
+ "target_delimiter": " ",
1590
+ "fewshot_delimiter": "\n\n",
1591
+ "num_fewshot": 0,
1592
+ "metric_list": [
1593
+ {
1594
+ "metric": "acc"
1595
+ }
1596
+ ],
1597
+ "output_type": "multiple_choice",
1598
+ "repeats": 1,
1599
+ "should_decontaminate": true,
1600
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1601
+ "metadata": {
1602
+ "version": 1.0
1603
+ }
1604
+ },
1605
+ "blimp_principle_A_domain_2": {
1606
+ "task": "blimp_principle_A_domain_2",
1607
+ "group": "blimp",
1608
+ "dataset_path": "blimp",
1609
+ "dataset_name": "principle_A_domain_2",
1610
+ "validation_split": "train",
1611
+ "doc_to_text": "",
1612
+ "doc_to_target": 0,
1613
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1614
+ "description": "",
1615
+ "target_delimiter": " ",
1616
+ "fewshot_delimiter": "\n\n",
1617
+ "num_fewshot": 0,
1618
+ "metric_list": [
1619
+ {
1620
+ "metric": "acc"
1621
+ }
1622
+ ],
1623
+ "output_type": "multiple_choice",
1624
+ "repeats": 1,
1625
+ "should_decontaminate": true,
1626
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1627
+ "metadata": {
1628
+ "version": 1.0
1629
+ }
1630
+ },
1631
+ "blimp_principle_A_domain_3": {
1632
+ "task": "blimp_principle_A_domain_3",
1633
+ "group": "blimp",
1634
+ "dataset_path": "blimp",
1635
+ "dataset_name": "principle_A_domain_3",
1636
+ "validation_split": "train",
1637
+ "doc_to_text": "",
1638
+ "doc_to_target": 0,
1639
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1640
+ "description": "",
1641
+ "target_delimiter": " ",
1642
+ "fewshot_delimiter": "\n\n",
1643
+ "num_fewshot": 0,
1644
+ "metric_list": [
1645
+ {
1646
+ "metric": "acc"
1647
+ }
1648
+ ],
1649
+ "output_type": "multiple_choice",
1650
+ "repeats": 1,
1651
+ "should_decontaminate": true,
1652
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1653
+ "metadata": {
1654
+ "version": 1.0
1655
+ }
1656
+ },
1657
+ "blimp_principle_A_reconstruction": {
1658
+ "task": "blimp_principle_A_reconstruction",
1659
+ "group": "blimp",
1660
+ "dataset_path": "blimp",
1661
+ "dataset_name": "principle_A_reconstruction",
1662
+ "validation_split": "train",
1663
+ "doc_to_text": "",
1664
+ "doc_to_target": 0,
1665
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1666
+ "description": "",
1667
+ "target_delimiter": " ",
1668
+ "fewshot_delimiter": "\n\n",
1669
+ "num_fewshot": 0,
1670
+ "metric_list": [
1671
+ {
1672
+ "metric": "acc"
1673
+ }
1674
+ ],
1675
+ "output_type": "multiple_choice",
1676
+ "repeats": 1,
1677
+ "should_decontaminate": true,
1678
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1679
+ "metadata": {
1680
+ "version": 1.0
1681
+ }
1682
+ },
1683
+ "blimp_regular_plural_subject_verb_agreement_1": {
1684
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1685
+ "group": "blimp",
1686
+ "dataset_path": "blimp",
1687
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1688
+ "validation_split": "train",
1689
+ "doc_to_text": "",
1690
+ "doc_to_target": 0,
1691
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1692
+ "description": "",
1693
+ "target_delimiter": " ",
1694
+ "fewshot_delimiter": "\n\n",
1695
+ "num_fewshot": 0,
1696
+ "metric_list": [
1697
+ {
1698
+ "metric": "acc"
1699
+ }
1700
+ ],
1701
+ "output_type": "multiple_choice",
1702
+ "repeats": 1,
1703
+ "should_decontaminate": true,
1704
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1705
+ "metadata": {
1706
+ "version": 1.0
1707
+ }
1708
+ },
1709
+ "blimp_regular_plural_subject_verb_agreement_2": {
1710
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1711
+ "group": "blimp",
1712
+ "dataset_path": "blimp",
1713
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1714
+ "validation_split": "train",
1715
+ "doc_to_text": "",
1716
+ "doc_to_target": 0,
1717
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1718
+ "description": "",
1719
+ "target_delimiter": " ",
1720
+ "fewshot_delimiter": "\n\n",
1721
+ "num_fewshot": 0,
1722
+ "metric_list": [
1723
+ {
1724
+ "metric": "acc"
1725
+ }
1726
+ ],
1727
+ "output_type": "multiple_choice",
1728
+ "repeats": 1,
1729
+ "should_decontaminate": true,
1730
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1731
+ "metadata": {
1732
+ "version": 1.0
1733
+ }
1734
+ },
1735
+ "blimp_sentential_negation_npi_licensor_present": {
1736
+ "task": "blimp_sentential_negation_npi_licensor_present",
1737
+ "group": "blimp",
1738
+ "dataset_path": "blimp",
1739
+ "dataset_name": "sentential_negation_npi_licensor_present",
1740
+ "validation_split": "train",
1741
+ "doc_to_text": "",
1742
+ "doc_to_target": 0,
1743
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1744
+ "description": "",
1745
+ "target_delimiter": " ",
1746
+ "fewshot_delimiter": "\n\n",
1747
+ "num_fewshot": 0,
1748
+ "metric_list": [
1749
+ {
1750
+ "metric": "acc"
1751
+ }
1752
+ ],
1753
+ "output_type": "multiple_choice",
1754
+ "repeats": 1,
1755
+ "should_decontaminate": true,
1756
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1757
+ "metadata": {
1758
+ "version": 1.0
1759
+ }
1760
+ },
1761
+ "blimp_sentential_negation_npi_scope": {
1762
+ "task": "blimp_sentential_negation_npi_scope",
1763
+ "group": "blimp",
1764
+ "dataset_path": "blimp",
1765
+ "dataset_name": "sentential_negation_npi_scope",
1766
+ "validation_split": "train",
1767
+ "doc_to_text": "",
1768
+ "doc_to_target": 0,
1769
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1770
+ "description": "",
1771
+ "target_delimiter": " ",
1772
+ "fewshot_delimiter": "\n\n",
1773
+ "num_fewshot": 0,
1774
+ "metric_list": [
1775
+ {
1776
+ "metric": "acc"
1777
+ }
1778
+ ],
1779
+ "output_type": "multiple_choice",
1780
+ "repeats": 1,
1781
+ "should_decontaminate": true,
1782
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1783
+ "metadata": {
1784
+ "version": 1.0
1785
+ }
1786
+ },
1787
+ "blimp_sentential_subject_island": {
1788
+ "task": "blimp_sentential_subject_island",
1789
+ "group": "blimp",
1790
+ "dataset_path": "blimp",
1791
+ "dataset_name": "sentential_subject_island",
1792
+ "validation_split": "train",
1793
+ "doc_to_text": "",
1794
+ "doc_to_target": 0,
1795
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1796
+ "description": "",
1797
+ "target_delimiter": " ",
1798
+ "fewshot_delimiter": "\n\n",
1799
+ "num_fewshot": 0,
1800
+ "metric_list": [
1801
+ {
1802
+ "metric": "acc"
1803
+ }
1804
+ ],
1805
+ "output_type": "multiple_choice",
1806
+ "repeats": 1,
1807
+ "should_decontaminate": true,
1808
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1809
+ "metadata": {
1810
+ "version": 1.0
1811
+ }
1812
+ },
1813
+ "blimp_superlative_quantifiers_1": {
1814
+ "task": "blimp_superlative_quantifiers_1",
1815
+ "group": "blimp",
1816
+ "dataset_path": "blimp",
1817
+ "dataset_name": "superlative_quantifiers_1",
1818
+ "validation_split": "train",
1819
+ "doc_to_text": "",
1820
+ "doc_to_target": 0,
1821
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1822
+ "description": "",
1823
+ "target_delimiter": " ",
1824
+ "fewshot_delimiter": "\n\n",
1825
+ "num_fewshot": 0,
1826
+ "metric_list": [
1827
+ {
1828
+ "metric": "acc"
1829
+ }
1830
+ ],
1831
+ "output_type": "multiple_choice",
1832
+ "repeats": 1,
1833
+ "should_decontaminate": true,
1834
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1835
+ "metadata": {
1836
+ "version": 1.0
1837
+ }
1838
+ },
1839
+ "blimp_superlative_quantifiers_2": {
1840
+ "task": "blimp_superlative_quantifiers_2",
1841
+ "group": "blimp",
1842
+ "dataset_path": "blimp",
1843
+ "dataset_name": "superlative_quantifiers_2",
1844
+ "validation_split": "train",
1845
+ "doc_to_text": "",
1846
+ "doc_to_target": 0,
1847
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1848
+ "description": "",
1849
+ "target_delimiter": " ",
1850
+ "fewshot_delimiter": "\n\n",
1851
+ "num_fewshot": 0,
1852
+ "metric_list": [
1853
+ {
1854
+ "metric": "acc"
1855
+ }
1856
+ ],
1857
+ "output_type": "multiple_choice",
1858
+ "repeats": 1,
1859
+ "should_decontaminate": true,
1860
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1861
+ "metadata": {
1862
+ "version": 1.0
1863
+ }
1864
+ },
1865
+ "blimp_tough_vs_raising_1": {
1866
+ "task": "blimp_tough_vs_raising_1",
1867
+ "group": "blimp",
1868
+ "dataset_path": "blimp",
1869
+ "dataset_name": "tough_vs_raising_1",
1870
+ "validation_split": "train",
1871
+ "doc_to_text": "",
1872
+ "doc_to_target": 0,
1873
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1874
+ "description": "",
1875
+ "target_delimiter": " ",
1876
+ "fewshot_delimiter": "\n\n",
1877
+ "num_fewshot": 0,
1878
+ "metric_list": [
1879
+ {
1880
+ "metric": "acc"
1881
+ }
1882
+ ],
1883
+ "output_type": "multiple_choice",
1884
+ "repeats": 1,
1885
+ "should_decontaminate": true,
1886
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1887
+ "metadata": {
1888
+ "version": 1.0
1889
+ }
1890
+ },
1891
+ "blimp_tough_vs_raising_2": {
1892
+ "task": "blimp_tough_vs_raising_2",
1893
+ "group": "blimp",
1894
+ "dataset_path": "blimp",
1895
+ "dataset_name": "tough_vs_raising_2",
1896
+ "validation_split": "train",
1897
+ "doc_to_text": "",
1898
+ "doc_to_target": 0,
1899
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1900
+ "description": "",
1901
+ "target_delimiter": " ",
1902
+ "fewshot_delimiter": "\n\n",
1903
+ "num_fewshot": 0,
1904
+ "metric_list": [
1905
+ {
1906
+ "metric": "acc"
1907
+ }
1908
+ ],
1909
+ "output_type": "multiple_choice",
1910
+ "repeats": 1,
1911
+ "should_decontaminate": true,
1912
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1913
+ "metadata": {
1914
+ "version": 1.0
1915
+ }
1916
+ },
1917
+ "blimp_transitive": {
1918
+ "task": "blimp_transitive",
1919
+ "group": "blimp",
1920
+ "dataset_path": "blimp",
1921
+ "dataset_name": "transitive",
1922
+ "validation_split": "train",
1923
+ "doc_to_text": "",
1924
+ "doc_to_target": 0,
1925
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1926
+ "description": "",
1927
+ "target_delimiter": " ",
1928
+ "fewshot_delimiter": "\n\n",
1929
+ "num_fewshot": 0,
1930
+ "metric_list": [
1931
+ {
1932
+ "metric": "acc"
1933
+ }
1934
+ ],
1935
+ "output_type": "multiple_choice",
1936
+ "repeats": 1,
1937
+ "should_decontaminate": true,
1938
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1939
+ "metadata": {
1940
+ "version": 1.0
1941
+ }
1942
+ },
1943
+ "blimp_wh_island": {
1944
+ "task": "blimp_wh_island",
1945
+ "group": "blimp",
1946
+ "dataset_path": "blimp",
1947
+ "dataset_name": "wh_island",
1948
+ "validation_split": "train",
1949
+ "doc_to_text": "",
1950
+ "doc_to_target": 0,
1951
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1952
+ "description": "",
1953
+ "target_delimiter": " ",
1954
+ "fewshot_delimiter": "\n\n",
1955
+ "num_fewshot": 0,
1956
+ "metric_list": [
1957
+ {
1958
+ "metric": "acc"
1959
+ }
1960
+ ],
1961
+ "output_type": "multiple_choice",
1962
+ "repeats": 1,
1963
+ "should_decontaminate": true,
1964
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1965
+ "metadata": {
1966
+ "version": 1.0
1967
+ }
1968
+ },
1969
+ "blimp_wh_questions_object_gap": {
1970
+ "task": "blimp_wh_questions_object_gap",
1971
+ "group": "blimp",
1972
+ "dataset_path": "blimp",
1973
+ "dataset_name": "wh_questions_object_gap",
1974
+ "validation_split": "train",
1975
+ "doc_to_text": "",
1976
+ "doc_to_target": 0,
1977
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1978
+ "description": "",
1979
+ "target_delimiter": " ",
1980
+ "fewshot_delimiter": "\n\n",
1981
+ "num_fewshot": 0,
1982
+ "metric_list": [
1983
+ {
1984
+ "metric": "acc"
1985
+ }
1986
+ ],
1987
+ "output_type": "multiple_choice",
1988
+ "repeats": 1,
1989
+ "should_decontaminate": true,
1990
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1991
+ "metadata": {
1992
+ "version": 1.0
1993
+ }
1994
+ },
1995
+ "blimp_wh_questions_subject_gap": {
1996
+ "task": "blimp_wh_questions_subject_gap",
1997
+ "group": "blimp",
1998
+ "dataset_path": "blimp",
1999
+ "dataset_name": "wh_questions_subject_gap",
2000
+ "validation_split": "train",
2001
+ "doc_to_text": "",
2002
+ "doc_to_target": 0,
2003
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2004
+ "description": "",
2005
+ "target_delimiter": " ",
2006
+ "fewshot_delimiter": "\n\n",
2007
+ "num_fewshot": 0,
2008
+ "metric_list": [
2009
+ {
2010
+ "metric": "acc"
2011
+ }
2012
+ ],
2013
+ "output_type": "multiple_choice",
2014
+ "repeats": 1,
2015
+ "should_decontaminate": true,
2016
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2017
+ "metadata": {
2018
+ "version": 1.0
2019
+ }
2020
+ },
2021
+ "blimp_wh_questions_subject_gap_long_distance": {
2022
+ "task": "blimp_wh_questions_subject_gap_long_distance",
2023
+ "group": "blimp",
2024
+ "dataset_path": "blimp",
2025
+ "dataset_name": "wh_questions_subject_gap_long_distance",
2026
+ "validation_split": "train",
2027
+ "doc_to_text": "",
2028
+ "doc_to_target": 0,
2029
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2030
+ "description": "",
2031
+ "target_delimiter": " ",
2032
+ "fewshot_delimiter": "\n\n",
2033
+ "num_fewshot": 0,
2034
+ "metric_list": [
2035
+ {
2036
+ "metric": "acc"
2037
+ }
2038
+ ],
2039
+ "output_type": "multiple_choice",
2040
+ "repeats": 1,
2041
+ "should_decontaminate": true,
2042
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2043
+ "metadata": {
2044
+ "version": 1.0
2045
+ }
2046
+ },
2047
+ "blimp_wh_vs_that_no_gap": {
2048
+ "task": "blimp_wh_vs_that_no_gap",
2049
+ "group": "blimp",
2050
+ "dataset_path": "blimp",
2051
+ "dataset_name": "wh_vs_that_no_gap",
2052
+ "validation_split": "train",
2053
+ "doc_to_text": "",
2054
+ "doc_to_target": 0,
2055
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2056
+ "description": "",
2057
+ "target_delimiter": " ",
2058
+ "fewshot_delimiter": "\n\n",
2059
+ "num_fewshot": 0,
2060
+ "metric_list": [
2061
+ {
2062
+ "metric": "acc"
2063
+ }
2064
+ ],
2065
+ "output_type": "multiple_choice",
2066
+ "repeats": 1,
2067
+ "should_decontaminate": true,
2068
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2069
+ "metadata": {
2070
+ "version": 1.0
2071
+ }
2072
+ },
2073
+ "blimp_wh_vs_that_no_gap_long_distance": {
2074
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2075
+ "group": "blimp",
2076
+ "dataset_path": "blimp",
2077
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2078
+ "validation_split": "train",
2079
+ "doc_to_text": "",
2080
+ "doc_to_target": 0,
2081
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2082
+ "description": "",
2083
+ "target_delimiter": " ",
2084
+ "fewshot_delimiter": "\n\n",
2085
+ "num_fewshot": 0,
2086
+ "metric_list": [
2087
+ {
2088
+ "metric": "acc"
2089
+ }
2090
+ ],
2091
+ "output_type": "multiple_choice",
2092
+ "repeats": 1,
2093
+ "should_decontaminate": true,
2094
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2095
+ "metadata": {
2096
+ "version": 1.0
2097
+ }
2098
+ },
2099
+ "blimp_wh_vs_that_with_gap": {
2100
+ "task": "blimp_wh_vs_that_with_gap",
2101
+ "group": "blimp",
2102
+ "dataset_path": "blimp",
2103
+ "dataset_name": "wh_vs_that_with_gap",
2104
+ "validation_split": "train",
2105
+ "doc_to_text": "",
2106
+ "doc_to_target": 0,
2107
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2108
+ "description": "",
2109
+ "target_delimiter": " ",
2110
+ "fewshot_delimiter": "\n\n",
2111
+ "num_fewshot": 0,
2112
+ "metric_list": [
2113
+ {
2114
+ "metric": "acc"
2115
+ }
2116
+ ],
2117
+ "output_type": "multiple_choice",
2118
+ "repeats": 1,
2119
+ "should_decontaminate": true,
2120
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2121
+ "metadata": {
2122
+ "version": 1.0
2123
+ }
2124
+ },
2125
+ "blimp_wh_vs_that_with_gap_long_distance": {
2126
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2127
+ "group": "blimp",
2128
+ "dataset_path": "blimp",
2129
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2130
+ "validation_split": "train",
2131
+ "doc_to_text": "",
2132
+ "doc_to_target": 0,
2133
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2134
+ "description": "",
2135
+ "target_delimiter": " ",
2136
+ "fewshot_delimiter": "\n\n",
2137
+ "num_fewshot": 0,
2138
+ "metric_list": [
2139
+ {
2140
+ "metric": "acc"
2141
+ }
2142
+ ],
2143
+ "output_type": "multiple_choice",
2144
+ "repeats": 1,
2145
+ "should_decontaminate": true,
2146
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2147
+ "metadata": {
2148
+ "version": 1.0
2149
+ }
2150
+ }
2151
+ },
2152
+ "versions": {
2153
+ "blimp_adjunct_island": 1.0,
2154
+ "blimp_anaphor_gender_agreement": 1.0,
2155
+ "blimp_anaphor_number_agreement": 1.0,
2156
+ "blimp_animate_subject_passive": 1.0,
2157
+ "blimp_animate_subject_trans": 1.0,
2158
+ "blimp_causative": 1.0,
2159
+ "blimp_complex_NP_island": 1.0,
2160
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2161
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2162
+ "blimp_determiner_noun_agreement_1": 1.0,
2163
+ "blimp_determiner_noun_agreement_2": 1.0,
2164
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2165
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2166
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2167
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2168
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2169
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2170
+ "blimp_distractor_agreement_relational_noun": 1.0,
2171
+ "blimp_distractor_agreement_relative_clause": 1.0,
2172
+ "blimp_drop_argument": 1.0,
2173
+ "blimp_ellipsis_n_bar_1": 1.0,
2174
+ "blimp_ellipsis_n_bar_2": 1.0,
2175
+ "blimp_existential_there_object_raising": 1.0,
2176
+ "blimp_existential_there_quantifiers_1": 1.0,
2177
+ "blimp_existential_there_quantifiers_2": 1.0,
2178
+ "blimp_existential_there_subject_raising": 1.0,
2179
+ "blimp_expletive_it_object_raising": 1.0,
2180
+ "blimp_inchoative": 1.0,
2181
+ "blimp_intransitive": 1.0,
2182
+ "blimp_irregular_past_participle_adjectives": 1.0,
2183
+ "blimp_irregular_past_participle_verbs": 1.0,
2184
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2185
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2186
+ "blimp_left_branch_island_echo_question": 1.0,
2187
+ "blimp_left_branch_island_simple_question": 1.0,
2188
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2189
+ "blimp_npi_present_1": 1.0,
2190
+ "blimp_npi_present_2": 1.0,
2191
+ "blimp_only_npi_licensor_present": 1.0,
2192
+ "blimp_only_npi_scope": 1.0,
2193
+ "blimp_passive_1": 1.0,
2194
+ "blimp_passive_2": 1.0,
2195
+ "blimp_principle_A_c_command": 1.0,
2196
+ "blimp_principle_A_case_1": 1.0,
2197
+ "blimp_principle_A_case_2": 1.0,
2198
+ "blimp_principle_A_domain_1": 1.0,
2199
+ "blimp_principle_A_domain_2": 1.0,
2200
+ "blimp_principle_A_domain_3": 1.0,
2201
+ "blimp_principle_A_reconstruction": 1.0,
2202
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2203
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2204
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2205
+ "blimp_sentential_negation_npi_scope": 1.0,
2206
+ "blimp_sentential_subject_island": 1.0,
2207
+ "blimp_superlative_quantifiers_1": 1.0,
2208
+ "blimp_superlative_quantifiers_2": 1.0,
2209
+ "blimp_tough_vs_raising_1": 1.0,
2210
+ "blimp_tough_vs_raising_2": 1.0,
2211
+ "blimp_transitive": 1.0,
2212
+ "blimp_wh_island": 1.0,
2213
+ "blimp_wh_questions_object_gap": 1.0,
2214
+ "blimp_wh_questions_subject_gap": 1.0,
2215
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2216
+ "blimp_wh_vs_that_no_gap": 1.0,
2217
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2218
+ "blimp_wh_vs_that_with_gap": 1.0,
2219
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2220
+ },
2221
+ "n-shot": {
2222
+ "blimp_adjunct_island": 0,
2223
+ "blimp_anaphor_gender_agreement": 0,
2224
+ "blimp_anaphor_number_agreement": 0,
2225
+ "blimp_animate_subject_passive": 0,
2226
+ "blimp_animate_subject_trans": 0,
2227
+ "blimp_causative": 0,
2228
+ "blimp_complex_NP_island": 0,
2229
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2230
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2231
+ "blimp_determiner_noun_agreement_1": 0,
2232
+ "blimp_determiner_noun_agreement_2": 0,
2233
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2234
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2235
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2236
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2237
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2238
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2239
+ "blimp_distractor_agreement_relational_noun": 0,
2240
+ "blimp_distractor_agreement_relative_clause": 0,
2241
+ "blimp_drop_argument": 0,
2242
+ "blimp_ellipsis_n_bar_1": 0,
2243
+ "blimp_ellipsis_n_bar_2": 0,
2244
+ "blimp_existential_there_object_raising": 0,
2245
+ "blimp_existential_there_quantifiers_1": 0,
2246
+ "blimp_existential_there_quantifiers_2": 0,
2247
+ "blimp_existential_there_subject_raising": 0,
2248
+ "blimp_expletive_it_object_raising": 0,
2249
+ "blimp_inchoative": 0,
2250
+ "blimp_intransitive": 0,
2251
+ "blimp_irregular_past_participle_adjectives": 0,
2252
+ "blimp_irregular_past_participle_verbs": 0,
2253
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2254
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2255
+ "blimp_left_branch_island_echo_question": 0,
2256
+ "blimp_left_branch_island_simple_question": 0,
2257
+ "blimp_matrix_question_npi_licensor_present": 0,
2258
+ "blimp_npi_present_1": 0,
2259
+ "blimp_npi_present_2": 0,
2260
+ "blimp_only_npi_licensor_present": 0,
2261
+ "blimp_only_npi_scope": 0,
2262
+ "blimp_passive_1": 0,
2263
+ "blimp_passive_2": 0,
2264
+ "blimp_principle_A_c_command": 0,
2265
+ "blimp_principle_A_case_1": 0,
2266
+ "blimp_principle_A_case_2": 0,
2267
+ "blimp_principle_A_domain_1": 0,
2268
+ "blimp_principle_A_domain_2": 0,
2269
+ "blimp_principle_A_domain_3": 0,
2270
+ "blimp_principle_A_reconstruction": 0,
2271
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2272
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2273
+ "blimp_sentential_negation_npi_licensor_present": 0,
2274
+ "blimp_sentential_negation_npi_scope": 0,
2275
+ "blimp_sentential_subject_island": 0,
2276
+ "blimp_superlative_quantifiers_1": 0,
2277
+ "blimp_superlative_quantifiers_2": 0,
2278
+ "blimp_tough_vs_raising_1": 0,
2279
+ "blimp_tough_vs_raising_2": 0,
2280
+ "blimp_transitive": 0,
2281
+ "blimp_wh_island": 0,
2282
+ "blimp_wh_questions_object_gap": 0,
2283
+ "blimp_wh_questions_subject_gap": 0,
2284
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2285
+ "blimp_wh_vs_that_no_gap": 0,
2286
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2287
+ "blimp_wh_vs_that_with_gap": 0,
2288
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2289
+ },
2290
+ "n-samples": {
2291
+ "blimp_wh_vs_that_with_gap_long_distance": {
2292
+ "original": 1000,
2293
+ "effective": 1000
2294
+ },
2295
+ "blimp_wh_vs_that_with_gap": {
2296
+ "original": 1000,
2297
+ "effective": 1000
2298
+ },
2299
+ "blimp_wh_vs_that_no_gap_long_distance": {
2300
+ "original": 1000,
2301
+ "effective": 1000
2302
+ },
2303
+ "blimp_wh_vs_that_no_gap": {
2304
+ "original": 1000,
2305
+ "effective": 1000
2306
+ },
2307
+ "blimp_wh_questions_subject_gap_long_distance": {
2308
+ "original": 1000,
2309
+ "effective": 1000
2310
+ },
2311
+ "blimp_wh_questions_subject_gap": {
2312
+ "original": 1000,
2313
+ "effective": 1000
2314
+ },
2315
+ "blimp_wh_questions_object_gap": {
2316
+ "original": 1000,
2317
+ "effective": 1000
2318
+ },
2319
+ "blimp_wh_island": {
2320
+ "original": 1000,
2321
+ "effective": 1000
2322
+ },
2323
+ "blimp_transitive": {
2324
+ "original": 1000,
2325
+ "effective": 1000
2326
+ },
2327
+ "blimp_tough_vs_raising_2": {
2328
+ "original": 1000,
2329
+ "effective": 1000
2330
+ },
2331
+ "blimp_tough_vs_raising_1": {
2332
+ "original": 1000,
2333
+ "effective": 1000
2334
+ },
2335
+ "blimp_superlative_quantifiers_2": {
2336
+ "original": 1000,
2337
+ "effective": 1000
2338
+ },
2339
+ "blimp_superlative_quantifiers_1": {
2340
+ "original": 1000,
2341
+ "effective": 1000
2342
+ },
2343
+ "blimp_sentential_subject_island": {
2344
+ "original": 1000,
2345
+ "effective": 1000
2346
+ },
2347
+ "blimp_sentential_negation_npi_scope": {
2348
+ "original": 1000,
2349
+ "effective": 1000
2350
+ },
2351
+ "blimp_sentential_negation_npi_licensor_present": {
2352
+ "original": 1000,
2353
+ "effective": 1000
2354
+ },
2355
+ "blimp_regular_plural_subject_verb_agreement_2": {
2356
+ "original": 1000,
2357
+ "effective": 1000
2358
+ },
2359
+ "blimp_regular_plural_subject_verb_agreement_1": {
2360
+ "original": 1000,
2361
+ "effective": 1000
2362
+ },
2363
+ "blimp_principle_A_reconstruction": {
2364
+ "original": 1000,
2365
+ "effective": 1000
2366
+ },
2367
+ "blimp_principle_A_domain_3": {
2368
+ "original": 1000,
2369
+ "effective": 1000
2370
+ },
2371
+ "blimp_principle_A_domain_2": {
2372
+ "original": 1000,
2373
+ "effective": 1000
2374
+ },
2375
+ "blimp_principle_A_domain_1": {
2376
+ "original": 1000,
2377
+ "effective": 1000
2378
+ },
2379
+ "blimp_principle_A_case_2": {
2380
+ "original": 1000,
2381
+ "effective": 1000
2382
+ },
2383
+ "blimp_principle_A_case_1": {
2384
+ "original": 1000,
2385
+ "effective": 1000
2386
+ },
2387
+ "blimp_principle_A_c_command": {
2388
+ "original": 1000,
2389
+ "effective": 1000
2390
+ },
2391
+ "blimp_passive_2": {
2392
+ "original": 1000,
2393
+ "effective": 1000
2394
+ },
2395
+ "blimp_passive_1": {
2396
+ "original": 1000,
2397
+ "effective": 1000
2398
+ },
2399
+ "blimp_only_npi_scope": {
2400
+ "original": 1000,
2401
+ "effective": 1000
2402
+ },
2403
+ "blimp_only_npi_licensor_present": {
2404
+ "original": 1000,
2405
+ "effective": 1000
2406
+ },
2407
+ "blimp_npi_present_2": {
2408
+ "original": 1000,
2409
+ "effective": 1000
2410
+ },
2411
+ "blimp_npi_present_1": {
2412
+ "original": 1000,
2413
+ "effective": 1000
2414
+ },
2415
+ "blimp_matrix_question_npi_licensor_present": {
2416
+ "original": 1000,
2417
+ "effective": 1000
2418
+ },
2419
+ "blimp_left_branch_island_simple_question": {
2420
+ "original": 1000,
2421
+ "effective": 1000
2422
+ },
2423
+ "blimp_left_branch_island_echo_question": {
2424
+ "original": 1000,
2425
+ "effective": 1000
2426
+ },
2427
+ "blimp_irregular_plural_subject_verb_agreement_2": {
2428
+ "original": 1000,
2429
+ "effective": 1000
2430
+ },
2431
+ "blimp_irregular_plural_subject_verb_agreement_1": {
2432
+ "original": 1000,
2433
+ "effective": 1000
2434
+ },
2435
+ "blimp_irregular_past_participle_verbs": {
2436
+ "original": 1000,
2437
+ "effective": 1000
2438
+ },
2439
+ "blimp_irregular_past_participle_adjectives": {
2440
+ "original": 1000,
2441
+ "effective": 1000
2442
+ },
2443
+ "blimp_intransitive": {
2444
+ "original": 1000,
2445
+ "effective": 1000
2446
+ },
2447
+ "blimp_inchoative": {
2448
+ "original": 1000,
2449
+ "effective": 1000
2450
+ },
2451
+ "blimp_expletive_it_object_raising": {
2452
+ "original": 1000,
2453
+ "effective": 1000
2454
+ },
2455
+ "blimp_existential_there_subject_raising": {
2456
+ "original": 1000,
2457
+ "effective": 1000
2458
+ },
2459
+ "blimp_existential_there_quantifiers_2": {
2460
+ "original": 1000,
2461
+ "effective": 1000
2462
+ },
2463
+ "blimp_existential_there_quantifiers_1": {
2464
+ "original": 1000,
2465
+ "effective": 1000
2466
+ },
2467
+ "blimp_existential_there_object_raising": {
2468
+ "original": 1000,
2469
+ "effective": 1000
2470
+ },
2471
+ "blimp_ellipsis_n_bar_2": {
2472
+ "original": 1000,
2473
+ "effective": 1000
2474
+ },
2475
+ "blimp_ellipsis_n_bar_1": {
2476
+ "original": 1000,
2477
+ "effective": 1000
2478
+ },
2479
+ "blimp_drop_argument": {
2480
+ "original": 1000,
2481
+ "effective": 1000
2482
+ },
2483
+ "blimp_distractor_agreement_relative_clause": {
2484
+ "original": 1000,
2485
+ "effective": 1000
2486
+ },
2487
+ "blimp_distractor_agreement_relational_noun": {
2488
+ "original": 1000,
2489
+ "effective": 1000
2490
+ },
2491
+ "blimp_determiner_noun_agreement_with_adjective_1": {
2492
+ "original": 1000,
2493
+ "effective": 1000
2494
+ },
2495
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
2496
+ "original": 1000,
2497
+ "effective": 1000
2498
+ },
2499
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
2500
+ "original": 1000,
2501
+ "effective": 1000
2502
+ },
2503
+ "blimp_determiner_noun_agreement_with_adj_2": {
2504
+ "original": 1000,
2505
+ "effective": 1000
2506
+ },
2507
+ "blimp_determiner_noun_agreement_irregular_2": {
2508
+ "original": 1000,
2509
+ "effective": 1000
2510
+ },
2511
+ "blimp_determiner_noun_agreement_irregular_1": {
2512
+ "original": 1000,
2513
+ "effective": 1000
2514
+ },
2515
+ "blimp_determiner_noun_agreement_2": {
2516
+ "original": 1000,
2517
+ "effective": 1000
2518
+ },
2519
+ "blimp_determiner_noun_agreement_1": {
2520
+ "original": 1000,
2521
+ "effective": 1000
2522
+ },
2523
+ "blimp_coordinate_structure_constraint_object_extraction": {
2524
+ "original": 1000,
2525
+ "effective": 1000
2526
+ },
2527
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
2528
+ "original": 1000,
2529
+ "effective": 1000
2530
+ },
2531
+ "blimp_complex_NP_island": {
2532
+ "original": 1000,
2533
+ "effective": 1000
2534
+ },
2535
+ "blimp_causative": {
2536
+ "original": 1000,
2537
+ "effective": 1000
2538
+ },
2539
+ "blimp_animate_subject_trans": {
2540
+ "original": 1000,
2541
+ "effective": 1000
2542
+ },
2543
+ "blimp_animate_subject_passive": {
2544
+ "original": 1000,
2545
+ "effective": 1000
2546
+ },
2547
+ "blimp_anaphor_number_agreement": {
2548
+ "original": 1000,
2549
+ "effective": 1000
2550
+ },
2551
+ "blimp_anaphor_gender_agreement": {
2552
+ "original": 1000,
2553
+ "effective": 1000
2554
+ },
2555
+ "blimp_adjunct_island": {
2556
+ "original": 1000,
2557
+ "effective": 1000
2558
+ }
2559
+ },
2560
+ "config": {
2561
+ "model": "hf",
2562
+ "model_args": "pretrained=EleutherAI/pythia-14m-seed1,revision=step47000",
2563
+ "model_num_parameters": 14067712,
2564
+ "model_dtype": "torch.float16",
2565
+ "model_revision": "step47000",
2566
+ "model_sha": "ee16443ba4b8519d22f2155d9c5b828b937dfbb2",
2567
+ "batch_size": "1024",
2568
+ "batch_sizes": [],
2569
+ "device": "cuda",
2570
+ "use_cache": null,
2571
+ "limit": null,
2572
+ "bootstrap_iters": 100000,
2573
+ "gen_kwargs": null,
2574
+ "random_seed": 0,
2575
+ "numpy_seed": 1234,
2576
+ "torch_seed": 1234,
2577
+ "fewshot_seed": 1234
2578
+ },
2579
+ "git_hash": "51a7ca9",
2580
+ "date": 1724072529.8142166,
2581
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: NVIDIA GeForce RTX 2080 Ti\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 32\nOn-line CPU(s) list: 0-31\nThread(s) per core: 1\nCore(s) per socket: 32\nSocket(s): 1\nNUMA node(s): 2\nVendor ID: AuthenticAMD\nCPU family: 23\nModel: 49\nModel name: AMD EPYC 7502P 32-Core Processor\nStepping: 0\nCPU MHz: 1500.000\nCPU max MHz: 2500.0000\nCPU min MHz: 1500.0000\nBogoMIPS: 5000.08\nVirtualization: AMD-V\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 512K\nL3 cache: 16384K\nNUMA node0 CPU(s): 0-15\nNUMA node1 CPU(s): 16-31\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc art rep_good nopl nonstop_tsc extd_apicid aperfmperf eagerfpu pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_l2 cpb cat_l3 cdp_l3 hw_pstate sme ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif umip overflow_recov succor smca\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
2582
+ "transformers_version": "4.40.2",
2583
+ "upper_git_hash": null,
2584
+ "task_hashes": {},
2585
+ "model_source": "hf",
2586
+ "model_name": "EleutherAI/pythia-14m-seed1",
2587
+ "model_name_sanitized": "EleutherAI__pythia-14m-seed1",
2588
+ "start_time": 1527240.048781698,
2589
+ "end_time": 1527564.899840044,
2590
+ "total_evaluation_time_seconds": "324.85105834598653"
2591
+ }
pythia-14m-seed1/step48000/EleutherAI__pythia-14m-seed1/results_2024-08-19T06-12-56.244535.json ADDED
@@ -0,0 +1,2591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp_wh_vs_that_with_gap_long_distance": {
4
+ "acc,none": 0.111,
5
+ "acc_stderr,none": 0.009938701010583716,
6
+ "alias": "blimp_wh_vs_that_with_gap_long_distance"
7
+ },
8
+ "blimp_wh_vs_that_with_gap": {
9
+ "acc,none": 0.223,
10
+ "acc_stderr,none": 0.013169830843425608,
11
+ "alias": "blimp_wh_vs_that_with_gap"
12
+ },
13
+ "blimp_wh_vs_that_no_gap_long_distance": {
14
+ "acc,none": 0.956,
15
+ "acc_stderr,none": 0.006488921798427387,
16
+ "alias": "blimp_wh_vs_that_no_gap_long_distance"
17
+ },
18
+ "blimp_wh_vs_that_no_gap": {
19
+ "acc,none": 0.932,
20
+ "acc_stderr,none": 0.007964887911291624,
21
+ "alias": "blimp_wh_vs_that_no_gap"
22
+ },
23
+ "blimp_wh_questions_subject_gap_long_distance": {
24
+ "acc,none": 0.921,
25
+ "acc_stderr,none": 0.00853415677333337,
26
+ "alias": "blimp_wh_questions_subject_gap_long_distance"
27
+ },
28
+ "blimp_wh_questions_subject_gap": {
29
+ "acc,none": 0.873,
30
+ "acc_stderr,none": 0.010534798620855644,
31
+ "alias": "blimp_wh_questions_subject_gap"
32
+ },
33
+ "blimp_wh_questions_object_gap": {
34
+ "acc,none": 0.457,
35
+ "acc_stderr,none": 0.0157606915901365,
36
+ "alias": "blimp_wh_questions_object_gap"
37
+ },
38
+ "blimp_wh_island": {
39
+ "acc,none": 0.602,
40
+ "acc_stderr,none": 0.015486634102859016,
41
+ "alias": "blimp_wh_island"
42
+ },
43
+ "blimp_transitive": {
44
+ "acc,none": 0.826,
45
+ "acc_stderr,none": 0.011994493230973447,
46
+ "alias": "blimp_transitive"
47
+ },
48
+ "blimp_tough_vs_raising_2": {
49
+ "acc,none": 0.734,
50
+ "acc_stderr,none": 0.013979965645145179,
51
+ "alias": "blimp_tough_vs_raising_2"
52
+ },
53
+ "blimp_tough_vs_raising_1": {
54
+ "acc,none": 0.377,
55
+ "acc_stderr,none": 0.0153331701257798,
56
+ "alias": "blimp_tough_vs_raising_1"
57
+ },
58
+ "blimp_superlative_quantifiers_2": {
59
+ "acc,none": 0.247,
60
+ "acc_stderr,none": 0.013644675781314012,
61
+ "alias": "blimp_superlative_quantifiers_2"
62
+ },
63
+ "blimp_superlative_quantifiers_1": {
64
+ "acc,none": 0.165,
65
+ "acc_stderr,none": 0.011743632866916136,
66
+ "alias": "blimp_superlative_quantifiers_1"
67
+ },
68
+ "blimp_sentential_subject_island": {
69
+ "acc,none": 0.357,
70
+ "acc_stderr,none": 0.01515852172148659,
71
+ "alias": "blimp_sentential_subject_island"
72
+ },
73
+ "blimp_sentential_negation_npi_scope": {
74
+ "acc,none": 0.399,
75
+ "acc_stderr,none": 0.015493193313163012,
76
+ "alias": "blimp_sentential_negation_npi_scope"
77
+ },
78
+ "blimp_sentential_negation_npi_licensor_present": {
79
+ "acc,none": 0.956,
80
+ "acc_stderr,none": 0.006488921798427387,
81
+ "alias": "blimp_sentential_negation_npi_licensor_present"
82
+ },
83
+ "blimp_regular_plural_subject_verb_agreement_2": {
84
+ "acc,none": 0.82,
85
+ "acc_stderr,none": 0.012155153135512022,
86
+ "alias": "blimp_regular_plural_subject_verb_agreement_2"
87
+ },
88
+ "blimp_regular_plural_subject_verb_agreement_1": {
89
+ "acc,none": 0.863,
90
+ "acc_stderr,none": 0.01087884871433321,
91
+ "alias": "blimp_regular_plural_subject_verb_agreement_1"
92
+ },
93
+ "blimp_principle_A_reconstruction": {
94
+ "acc,none": 0.479,
95
+ "acc_stderr,none": 0.015805341148131185,
96
+ "alias": "blimp_principle_A_reconstruction"
97
+ },
98
+ "blimp_principle_A_domain_3": {
99
+ "acc,none": 0.618,
100
+ "acc_stderr,none": 0.015372453034968502,
101
+ "alias": "blimp_principle_A_domain_3"
102
+ },
103
+ "blimp_principle_A_domain_2": {
104
+ "acc,none": 0.626,
105
+ "acc_stderr,none": 0.015308767369006505,
106
+ "alias": "blimp_principle_A_domain_2"
107
+ },
108
+ "blimp_principle_A_domain_1": {
109
+ "acc,none": 0.88,
110
+ "acc_stderr,none": 0.010281328012747462,
111
+ "alias": "blimp_principle_A_domain_1"
112
+ },
113
+ "blimp_principle_A_case_2": {
114
+ "acc,none": 0.788,
115
+ "acc_stderr,none": 0.01293148186493811,
116
+ "alias": "blimp_principle_A_case_2"
117
+ },
118
+ "blimp_principle_A_case_1": {
119
+ "acc,none": 1.0,
120
+ "acc_stderr,none": 0.0,
121
+ "alias": "blimp_principle_A_case_1"
122
+ },
123
+ "blimp_principle_A_c_command": {
124
+ "acc,none": 0.586,
125
+ "acc_stderr,none": 0.01558354410417755,
126
+ "alias": "blimp_principle_A_c_command"
127
+ },
128
+ "blimp_passive_2": {
129
+ "acc,none": 0.882,
130
+ "acc_stderr,none": 0.010206869264381718,
131
+ "alias": "blimp_passive_2"
132
+ },
133
+ "blimp_passive_1": {
134
+ "acc,none": 0.906,
135
+ "acc_stderr,none": 0.009233052000787672,
136
+ "alias": "blimp_passive_1"
137
+ },
138
+ "blimp_only_npi_scope": {
139
+ "acc,none": 0.811,
140
+ "acc_stderr,none": 0.01238678458811779,
141
+ "alias": "blimp_only_npi_scope"
142
+ },
143
+ "blimp_only_npi_licensor_present": {
144
+ "acc,none": 0.973,
145
+ "acc_stderr,none": 0.005128089049275305,
146
+ "alias": "blimp_only_npi_licensor_present"
147
+ },
148
+ "blimp_npi_present_2": {
149
+ "acc,none": 0.392,
150
+ "acc_stderr,none": 0.015445859463771338,
151
+ "alias": "blimp_npi_present_2"
152
+ },
153
+ "blimp_npi_present_1": {
154
+ "acc,none": 0.359,
155
+ "acc_stderr,none": 0.01517726422479853,
156
+ "alias": "blimp_npi_present_1"
157
+ },
158
+ "blimp_matrix_question_npi_licensor_present": {
159
+ "acc,none": 0.19,
160
+ "acc_stderr,none": 0.012411851354816256,
161
+ "alias": "blimp_matrix_question_npi_licensor_present"
162
+ },
163
+ "blimp_left_branch_island_simple_question": {
164
+ "acc,none": 0.352,
165
+ "acc_stderr,none": 0.015110404505648562,
166
+ "alias": "blimp_left_branch_island_simple_question"
167
+ },
168
+ "blimp_left_branch_island_echo_question": {
169
+ "acc,none": 0.418,
170
+ "acc_stderr,none": 0.015605111967541904,
171
+ "alias": "blimp_left_branch_island_echo_question"
172
+ },
173
+ "blimp_irregular_plural_subject_verb_agreement_2": {
174
+ "acc,none": 0.847,
175
+ "acc_stderr,none": 0.011389500459665504,
176
+ "alias": "blimp_irregular_plural_subject_verb_agreement_2"
177
+ },
178
+ "blimp_irregular_plural_subject_verb_agreement_1": {
179
+ "acc,none": 0.77,
180
+ "acc_stderr,none": 0.01331455133593608,
181
+ "alias": "blimp_irregular_plural_subject_verb_agreement_1"
182
+ },
183
+ "blimp_irregular_past_participle_verbs": {
184
+ "acc,none": 0.908,
185
+ "acc_stderr,none": 0.009144376393151129,
186
+ "alias": "blimp_irregular_past_participle_verbs"
187
+ },
188
+ "blimp_irregular_past_participle_adjectives": {
189
+ "acc,none": 0.994,
190
+ "acc_stderr,none": 0.002443352199329841,
191
+ "alias": "blimp_irregular_past_participle_adjectives"
192
+ },
193
+ "blimp_intransitive": {
194
+ "acc,none": 0.601,
195
+ "acc_stderr,none": 0.015493193313163012,
196
+ "alias": "blimp_intransitive"
197
+ },
198
+ "blimp_inchoative": {
199
+ "acc,none": 0.48,
200
+ "acc_stderr,none": 0.015806639423035177,
201
+ "alias": "blimp_inchoative"
202
+ },
203
+ "blimp_expletive_it_object_raising": {
204
+ "acc,none": 0.734,
205
+ "acc_stderr,none": 0.013979965645145179,
206
+ "alias": "blimp_expletive_it_object_raising"
207
+ },
208
+ "blimp_existential_there_subject_raising": {
209
+ "acc,none": 0.779,
210
+ "acc_stderr,none": 0.01312750285969618,
211
+ "alias": "blimp_existential_there_subject_raising"
212
+ },
213
+ "blimp_existential_there_quantifiers_2": {
214
+ "acc,none": 0.308,
215
+ "acc_stderr,none": 0.01460648312734278,
216
+ "alias": "blimp_existential_there_quantifiers_2"
217
+ },
218
+ "blimp_existential_there_quantifiers_1": {
219
+ "acc,none": 0.956,
220
+ "acc_stderr,none": 0.006488921798427387,
221
+ "alias": "blimp_existential_there_quantifiers_1"
222
+ },
223
+ "blimp_existential_there_object_raising": {
224
+ "acc,none": 0.797,
225
+ "acc_stderr,none": 0.01272607374459825,
226
+ "alias": "blimp_existential_there_object_raising"
227
+ },
228
+ "blimp_ellipsis_n_bar_2": {
229
+ "acc,none": 0.794,
230
+ "acc_stderr,none": 0.012795613612786522,
231
+ "alias": "blimp_ellipsis_n_bar_2"
232
+ },
233
+ "blimp_ellipsis_n_bar_1": {
234
+ "acc,none": 0.594,
235
+ "acc_stderr,none": 0.01553722643863458,
236
+ "alias": "blimp_ellipsis_n_bar_1"
237
+ },
238
+ "blimp_drop_argument": {
239
+ "acc,none": 0.726,
240
+ "acc_stderr,none": 0.01411109928825969,
241
+ "alias": "blimp_drop_argument"
242
+ },
243
+ "blimp_distractor_agreement_relative_clause": {
244
+ "acc,none": 0.27,
245
+ "acc_stderr,none": 0.01404625563263382,
246
+ "alias": "blimp_distractor_agreement_relative_clause"
247
+ },
248
+ "blimp_distractor_agreement_relational_noun": {
249
+ "acc,none": 0.3,
250
+ "acc_stderr,none": 0.014498627873361335,
251
+ "alias": "blimp_distractor_agreement_relational_noun"
252
+ },
253
+ "blimp_determiner_noun_agreement_with_adjective_1": {
254
+ "acc,none": 0.857,
255
+ "acc_stderr,none": 0.011075814808567074,
256
+ "alias": "blimp_determiner_noun_agreement_with_adjective_1"
257
+ },
258
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
259
+ "acc,none": 0.825,
260
+ "acc_stderr,none": 0.012021627157731998,
261
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_2"
262
+ },
263
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
264
+ "acc,none": 0.749,
265
+ "acc_stderr,none": 0.013718133516888775,
266
+ "alias": "blimp_determiner_noun_agreement_with_adj_irregular_1"
267
+ },
268
+ "blimp_determiner_noun_agreement_with_adj_2": {
269
+ "acc,none": 0.838,
270
+ "acc_stderr,none": 0.011657267771304384,
271
+ "alias": "blimp_determiner_noun_agreement_with_adj_2"
272
+ },
273
+ "blimp_determiner_noun_agreement_irregular_2": {
274
+ "acc,none": 0.834,
275
+ "acc_stderr,none": 0.011772110370812133,
276
+ "alias": "blimp_determiner_noun_agreement_irregular_2"
277
+ },
278
+ "blimp_determiner_noun_agreement_irregular_1": {
279
+ "acc,none": 0.769,
280
+ "acc_stderr,none": 0.013334797216936478,
281
+ "alias": "blimp_determiner_noun_agreement_irregular_1"
282
+ },
283
+ "blimp_determiner_noun_agreement_2": {
284
+ "acc,none": 0.942,
285
+ "acc_stderr,none": 0.0073953154557929906,
286
+ "alias": "blimp_determiner_noun_agreement_2"
287
+ },
288
+ "blimp_determiner_noun_agreement_1": {
289
+ "acc,none": 0.917,
290
+ "acc_stderr,none": 0.008728527206074756,
291
+ "alias": "blimp_determiner_noun_agreement_1"
292
+ },
293
+ "blimp_coordinate_structure_constraint_object_extraction": {
294
+ "acc,none": 0.529,
295
+ "acc_stderr,none": 0.015792669451628764,
296
+ "alias": "blimp_coordinate_structure_constraint_object_extraction"
297
+ },
298
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
299
+ "acc,none": 0.422,
300
+ "acc_stderr,none": 0.015625625112620622,
301
+ "alias": "blimp_coordinate_structure_constraint_complex_left_branch"
302
+ },
303
+ "blimp_complex_NP_island": {
304
+ "acc,none": 0.381,
305
+ "acc_stderr,none": 0.015364734787007391,
306
+ "alias": "blimp_complex_NP_island"
307
+ },
308
+ "blimp_causative": {
309
+ "acc,none": 0.645,
310
+ "acc_stderr,none": 0.015139491543780598,
311
+ "alias": "blimp_causative"
312
+ },
313
+ "blimp_animate_subject_trans": {
314
+ "acc,none": 0.833,
315
+ "acc_stderr,none": 0.011800434324644593,
316
+ "alias": "blimp_animate_subject_trans"
317
+ },
318
+ "blimp_animate_subject_passive": {
319
+ "acc,none": 0.748,
320
+ "acc_stderr,none": 0.013736254390651213,
321
+ "alias": "blimp_animate_subject_passive"
322
+ },
323
+ "blimp_anaphor_number_agreement": {
324
+ "acc,none": 0.937,
325
+ "acc_stderr,none": 0.007687007876286461,
326
+ "alias": "blimp_anaphor_number_agreement"
327
+ },
328
+ "blimp_anaphor_gender_agreement": {
329
+ "acc,none": 0.759,
330
+ "acc_stderr,none": 0.01353152253451555,
331
+ "alias": "blimp_anaphor_gender_agreement"
332
+ },
333
+ "blimp_adjunct_island": {
334
+ "acc,none": 0.778,
335
+ "acc_stderr,none": 0.013148721948877349,
336
+ "alias": "blimp_adjunct_island"
337
+ }
338
+ },
339
+ "group_subtasks": {
340
+ "blimp_adjunct_island": [],
341
+ "blimp_anaphor_gender_agreement": [],
342
+ "blimp_anaphor_number_agreement": [],
343
+ "blimp_animate_subject_passive": [],
344
+ "blimp_animate_subject_trans": [],
345
+ "blimp_causative": [],
346
+ "blimp_complex_NP_island": [],
347
+ "blimp_coordinate_structure_constraint_complex_left_branch": [],
348
+ "blimp_coordinate_structure_constraint_object_extraction": [],
349
+ "blimp_determiner_noun_agreement_1": [],
350
+ "blimp_determiner_noun_agreement_2": [],
351
+ "blimp_determiner_noun_agreement_irregular_1": [],
352
+ "blimp_determiner_noun_agreement_irregular_2": [],
353
+ "blimp_determiner_noun_agreement_with_adj_2": [],
354
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": [],
355
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": [],
356
+ "blimp_determiner_noun_agreement_with_adjective_1": [],
357
+ "blimp_distractor_agreement_relational_noun": [],
358
+ "blimp_distractor_agreement_relative_clause": [],
359
+ "blimp_drop_argument": [],
360
+ "blimp_ellipsis_n_bar_1": [],
361
+ "blimp_ellipsis_n_bar_2": [],
362
+ "blimp_existential_there_object_raising": [],
363
+ "blimp_existential_there_quantifiers_1": [],
364
+ "blimp_existential_there_quantifiers_2": [],
365
+ "blimp_existential_there_subject_raising": [],
366
+ "blimp_expletive_it_object_raising": [],
367
+ "blimp_inchoative": [],
368
+ "blimp_intransitive": [],
369
+ "blimp_irregular_past_participle_adjectives": [],
370
+ "blimp_irregular_past_participle_verbs": [],
371
+ "blimp_irregular_plural_subject_verb_agreement_1": [],
372
+ "blimp_irregular_plural_subject_verb_agreement_2": [],
373
+ "blimp_left_branch_island_echo_question": [],
374
+ "blimp_left_branch_island_simple_question": [],
375
+ "blimp_matrix_question_npi_licensor_present": [],
376
+ "blimp_npi_present_1": [],
377
+ "blimp_npi_present_2": [],
378
+ "blimp_only_npi_licensor_present": [],
379
+ "blimp_only_npi_scope": [],
380
+ "blimp_passive_1": [],
381
+ "blimp_passive_2": [],
382
+ "blimp_principle_A_c_command": [],
383
+ "blimp_principle_A_case_1": [],
384
+ "blimp_principle_A_case_2": [],
385
+ "blimp_principle_A_domain_1": [],
386
+ "blimp_principle_A_domain_2": [],
387
+ "blimp_principle_A_domain_3": [],
388
+ "blimp_principle_A_reconstruction": [],
389
+ "blimp_regular_plural_subject_verb_agreement_1": [],
390
+ "blimp_regular_plural_subject_verb_agreement_2": [],
391
+ "blimp_sentential_negation_npi_licensor_present": [],
392
+ "blimp_sentential_negation_npi_scope": [],
393
+ "blimp_sentential_subject_island": [],
394
+ "blimp_superlative_quantifiers_1": [],
395
+ "blimp_superlative_quantifiers_2": [],
396
+ "blimp_tough_vs_raising_1": [],
397
+ "blimp_tough_vs_raising_2": [],
398
+ "blimp_transitive": [],
399
+ "blimp_wh_island": [],
400
+ "blimp_wh_questions_object_gap": [],
401
+ "blimp_wh_questions_subject_gap": [],
402
+ "blimp_wh_questions_subject_gap_long_distance": [],
403
+ "blimp_wh_vs_that_no_gap": [],
404
+ "blimp_wh_vs_that_no_gap_long_distance": [],
405
+ "blimp_wh_vs_that_with_gap": [],
406
+ "blimp_wh_vs_that_with_gap_long_distance": []
407
+ },
408
+ "configs": {
409
+ "blimp_adjunct_island": {
410
+ "task": "blimp_adjunct_island",
411
+ "group": "blimp",
412
+ "dataset_path": "blimp",
413
+ "dataset_name": "adjunct_island",
414
+ "validation_split": "train",
415
+ "doc_to_text": "",
416
+ "doc_to_target": 0,
417
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
418
+ "description": "",
419
+ "target_delimiter": " ",
420
+ "fewshot_delimiter": "\n\n",
421
+ "num_fewshot": 0,
422
+ "metric_list": [
423
+ {
424
+ "metric": "acc"
425
+ }
426
+ ],
427
+ "output_type": "multiple_choice",
428
+ "repeats": 1,
429
+ "should_decontaminate": true,
430
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
431
+ "metadata": {
432
+ "version": 1.0
433
+ }
434
+ },
435
+ "blimp_anaphor_gender_agreement": {
436
+ "task": "blimp_anaphor_gender_agreement",
437
+ "group": "blimp",
438
+ "dataset_path": "blimp",
439
+ "dataset_name": "anaphor_gender_agreement",
440
+ "validation_split": "train",
441
+ "doc_to_text": "",
442
+ "doc_to_target": 0,
443
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
444
+ "description": "",
445
+ "target_delimiter": " ",
446
+ "fewshot_delimiter": "\n\n",
447
+ "num_fewshot": 0,
448
+ "metric_list": [
449
+ {
450
+ "metric": "acc"
451
+ }
452
+ ],
453
+ "output_type": "multiple_choice",
454
+ "repeats": 1,
455
+ "should_decontaminate": true,
456
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
457
+ "metadata": {
458
+ "version": 1.0
459
+ }
460
+ },
461
+ "blimp_anaphor_number_agreement": {
462
+ "task": "blimp_anaphor_number_agreement",
463
+ "group": "blimp",
464
+ "dataset_path": "blimp",
465
+ "dataset_name": "anaphor_number_agreement",
466
+ "validation_split": "train",
467
+ "doc_to_text": "",
468
+ "doc_to_target": 0,
469
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
470
+ "description": "",
471
+ "target_delimiter": " ",
472
+ "fewshot_delimiter": "\n\n",
473
+ "num_fewshot": 0,
474
+ "metric_list": [
475
+ {
476
+ "metric": "acc"
477
+ }
478
+ ],
479
+ "output_type": "multiple_choice",
480
+ "repeats": 1,
481
+ "should_decontaminate": true,
482
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
483
+ "metadata": {
484
+ "version": 1.0
485
+ }
486
+ },
487
+ "blimp_animate_subject_passive": {
488
+ "task": "blimp_animate_subject_passive",
489
+ "group": "blimp",
490
+ "dataset_path": "blimp",
491
+ "dataset_name": "animate_subject_passive",
492
+ "validation_split": "train",
493
+ "doc_to_text": "",
494
+ "doc_to_target": 0,
495
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
496
+ "description": "",
497
+ "target_delimiter": " ",
498
+ "fewshot_delimiter": "\n\n",
499
+ "num_fewshot": 0,
500
+ "metric_list": [
501
+ {
502
+ "metric": "acc"
503
+ }
504
+ ],
505
+ "output_type": "multiple_choice",
506
+ "repeats": 1,
507
+ "should_decontaminate": true,
508
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
509
+ "metadata": {
510
+ "version": 1.0
511
+ }
512
+ },
513
+ "blimp_animate_subject_trans": {
514
+ "task": "blimp_animate_subject_trans",
515
+ "group": "blimp",
516
+ "dataset_path": "blimp",
517
+ "dataset_name": "animate_subject_trans",
518
+ "validation_split": "train",
519
+ "doc_to_text": "",
520
+ "doc_to_target": 0,
521
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
522
+ "description": "",
523
+ "target_delimiter": " ",
524
+ "fewshot_delimiter": "\n\n",
525
+ "num_fewshot": 0,
526
+ "metric_list": [
527
+ {
528
+ "metric": "acc"
529
+ }
530
+ ],
531
+ "output_type": "multiple_choice",
532
+ "repeats": 1,
533
+ "should_decontaminate": true,
534
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
535
+ "metadata": {
536
+ "version": 1.0
537
+ }
538
+ },
539
+ "blimp_causative": {
540
+ "task": "blimp_causative",
541
+ "group": "blimp",
542
+ "dataset_path": "blimp",
543
+ "dataset_name": "causative",
544
+ "validation_split": "train",
545
+ "doc_to_text": "",
546
+ "doc_to_target": 0,
547
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
548
+ "description": "",
549
+ "target_delimiter": " ",
550
+ "fewshot_delimiter": "\n\n",
551
+ "num_fewshot": 0,
552
+ "metric_list": [
553
+ {
554
+ "metric": "acc"
555
+ }
556
+ ],
557
+ "output_type": "multiple_choice",
558
+ "repeats": 1,
559
+ "should_decontaminate": true,
560
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
561
+ "metadata": {
562
+ "version": 1.0
563
+ }
564
+ },
565
+ "blimp_complex_NP_island": {
566
+ "task": "blimp_complex_NP_island",
567
+ "group": "blimp",
568
+ "dataset_path": "blimp",
569
+ "dataset_name": "complex_NP_island",
570
+ "validation_split": "train",
571
+ "doc_to_text": "",
572
+ "doc_to_target": 0,
573
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
574
+ "description": "",
575
+ "target_delimiter": " ",
576
+ "fewshot_delimiter": "\n\n",
577
+ "num_fewshot": 0,
578
+ "metric_list": [
579
+ {
580
+ "metric": "acc"
581
+ }
582
+ ],
583
+ "output_type": "multiple_choice",
584
+ "repeats": 1,
585
+ "should_decontaminate": true,
586
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
587
+ "metadata": {
588
+ "version": 1.0
589
+ }
590
+ },
591
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
592
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
593
+ "group": "blimp",
594
+ "dataset_path": "blimp",
595
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
596
+ "validation_split": "train",
597
+ "doc_to_text": "",
598
+ "doc_to_target": 0,
599
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
600
+ "description": "",
601
+ "target_delimiter": " ",
602
+ "fewshot_delimiter": "\n\n",
603
+ "num_fewshot": 0,
604
+ "metric_list": [
605
+ {
606
+ "metric": "acc"
607
+ }
608
+ ],
609
+ "output_type": "multiple_choice",
610
+ "repeats": 1,
611
+ "should_decontaminate": true,
612
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
613
+ "metadata": {
614
+ "version": 1.0
615
+ }
616
+ },
617
+ "blimp_coordinate_structure_constraint_object_extraction": {
618
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
619
+ "group": "blimp",
620
+ "dataset_path": "blimp",
621
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
622
+ "validation_split": "train",
623
+ "doc_to_text": "",
624
+ "doc_to_target": 0,
625
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
626
+ "description": "",
627
+ "target_delimiter": " ",
628
+ "fewshot_delimiter": "\n\n",
629
+ "num_fewshot": 0,
630
+ "metric_list": [
631
+ {
632
+ "metric": "acc"
633
+ }
634
+ ],
635
+ "output_type": "multiple_choice",
636
+ "repeats": 1,
637
+ "should_decontaminate": true,
638
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
639
+ "metadata": {
640
+ "version": 1.0
641
+ }
642
+ },
643
+ "blimp_determiner_noun_agreement_1": {
644
+ "task": "blimp_determiner_noun_agreement_1",
645
+ "group": "blimp",
646
+ "dataset_path": "blimp",
647
+ "dataset_name": "determiner_noun_agreement_1",
648
+ "validation_split": "train",
649
+ "doc_to_text": "",
650
+ "doc_to_target": 0,
651
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
652
+ "description": "",
653
+ "target_delimiter": " ",
654
+ "fewshot_delimiter": "\n\n",
655
+ "num_fewshot": 0,
656
+ "metric_list": [
657
+ {
658
+ "metric": "acc"
659
+ }
660
+ ],
661
+ "output_type": "multiple_choice",
662
+ "repeats": 1,
663
+ "should_decontaminate": true,
664
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
665
+ "metadata": {
666
+ "version": 1.0
667
+ }
668
+ },
669
+ "blimp_determiner_noun_agreement_2": {
670
+ "task": "blimp_determiner_noun_agreement_2",
671
+ "group": "blimp",
672
+ "dataset_path": "blimp",
673
+ "dataset_name": "determiner_noun_agreement_2",
674
+ "validation_split": "train",
675
+ "doc_to_text": "",
676
+ "doc_to_target": 0,
677
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
678
+ "description": "",
679
+ "target_delimiter": " ",
680
+ "fewshot_delimiter": "\n\n",
681
+ "num_fewshot": 0,
682
+ "metric_list": [
683
+ {
684
+ "metric": "acc"
685
+ }
686
+ ],
687
+ "output_type": "multiple_choice",
688
+ "repeats": 1,
689
+ "should_decontaminate": true,
690
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
691
+ "metadata": {
692
+ "version": 1.0
693
+ }
694
+ },
695
+ "blimp_determiner_noun_agreement_irregular_1": {
696
+ "task": "blimp_determiner_noun_agreement_irregular_1",
697
+ "group": "blimp",
698
+ "dataset_path": "blimp",
699
+ "dataset_name": "determiner_noun_agreement_irregular_1",
700
+ "validation_split": "train",
701
+ "doc_to_text": "",
702
+ "doc_to_target": 0,
703
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
704
+ "description": "",
705
+ "target_delimiter": " ",
706
+ "fewshot_delimiter": "\n\n",
707
+ "num_fewshot": 0,
708
+ "metric_list": [
709
+ {
710
+ "metric": "acc"
711
+ }
712
+ ],
713
+ "output_type": "multiple_choice",
714
+ "repeats": 1,
715
+ "should_decontaminate": true,
716
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
717
+ "metadata": {
718
+ "version": 1.0
719
+ }
720
+ },
721
+ "blimp_determiner_noun_agreement_irregular_2": {
722
+ "task": "blimp_determiner_noun_agreement_irregular_2",
723
+ "group": "blimp",
724
+ "dataset_path": "blimp",
725
+ "dataset_name": "determiner_noun_agreement_irregular_2",
726
+ "validation_split": "train",
727
+ "doc_to_text": "",
728
+ "doc_to_target": 0,
729
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
730
+ "description": "",
731
+ "target_delimiter": " ",
732
+ "fewshot_delimiter": "\n\n",
733
+ "num_fewshot": 0,
734
+ "metric_list": [
735
+ {
736
+ "metric": "acc"
737
+ }
738
+ ],
739
+ "output_type": "multiple_choice",
740
+ "repeats": 1,
741
+ "should_decontaminate": true,
742
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
743
+ "metadata": {
744
+ "version": 1.0
745
+ }
746
+ },
747
+ "blimp_determiner_noun_agreement_with_adj_2": {
748
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
749
+ "group": "blimp",
750
+ "dataset_path": "blimp",
751
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
752
+ "validation_split": "train",
753
+ "doc_to_text": "",
754
+ "doc_to_target": 0,
755
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
756
+ "description": "",
757
+ "target_delimiter": " ",
758
+ "fewshot_delimiter": "\n\n",
759
+ "num_fewshot": 0,
760
+ "metric_list": [
761
+ {
762
+ "metric": "acc"
763
+ }
764
+ ],
765
+ "output_type": "multiple_choice",
766
+ "repeats": 1,
767
+ "should_decontaminate": true,
768
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
769
+ "metadata": {
770
+ "version": 1.0
771
+ }
772
+ },
773
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
774
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
775
+ "group": "blimp",
776
+ "dataset_path": "blimp",
777
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
778
+ "validation_split": "train",
779
+ "doc_to_text": "",
780
+ "doc_to_target": 0,
781
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
782
+ "description": "",
783
+ "target_delimiter": " ",
784
+ "fewshot_delimiter": "\n\n",
785
+ "num_fewshot": 0,
786
+ "metric_list": [
787
+ {
788
+ "metric": "acc"
789
+ }
790
+ ],
791
+ "output_type": "multiple_choice",
792
+ "repeats": 1,
793
+ "should_decontaminate": true,
794
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
795
+ "metadata": {
796
+ "version": 1.0
797
+ }
798
+ },
799
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
800
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
801
+ "group": "blimp",
802
+ "dataset_path": "blimp",
803
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
804
+ "validation_split": "train",
805
+ "doc_to_text": "",
806
+ "doc_to_target": 0,
807
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
808
+ "description": "",
809
+ "target_delimiter": " ",
810
+ "fewshot_delimiter": "\n\n",
811
+ "num_fewshot": 0,
812
+ "metric_list": [
813
+ {
814
+ "metric": "acc"
815
+ }
816
+ ],
817
+ "output_type": "multiple_choice",
818
+ "repeats": 1,
819
+ "should_decontaminate": true,
820
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
821
+ "metadata": {
822
+ "version": 1.0
823
+ }
824
+ },
825
+ "blimp_determiner_noun_agreement_with_adjective_1": {
826
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
827
+ "group": "blimp",
828
+ "dataset_path": "blimp",
829
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
830
+ "validation_split": "train",
831
+ "doc_to_text": "",
832
+ "doc_to_target": 0,
833
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
834
+ "description": "",
835
+ "target_delimiter": " ",
836
+ "fewshot_delimiter": "\n\n",
837
+ "num_fewshot": 0,
838
+ "metric_list": [
839
+ {
840
+ "metric": "acc"
841
+ }
842
+ ],
843
+ "output_type": "multiple_choice",
844
+ "repeats": 1,
845
+ "should_decontaminate": true,
846
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
847
+ "metadata": {
848
+ "version": 1.0
849
+ }
850
+ },
851
+ "blimp_distractor_agreement_relational_noun": {
852
+ "task": "blimp_distractor_agreement_relational_noun",
853
+ "group": "blimp",
854
+ "dataset_path": "blimp",
855
+ "dataset_name": "distractor_agreement_relational_noun",
856
+ "validation_split": "train",
857
+ "doc_to_text": "",
858
+ "doc_to_target": 0,
859
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
860
+ "description": "",
861
+ "target_delimiter": " ",
862
+ "fewshot_delimiter": "\n\n",
863
+ "num_fewshot": 0,
864
+ "metric_list": [
865
+ {
866
+ "metric": "acc"
867
+ }
868
+ ],
869
+ "output_type": "multiple_choice",
870
+ "repeats": 1,
871
+ "should_decontaminate": true,
872
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
873
+ "metadata": {
874
+ "version": 1.0
875
+ }
876
+ },
877
+ "blimp_distractor_agreement_relative_clause": {
878
+ "task": "blimp_distractor_agreement_relative_clause",
879
+ "group": "blimp",
880
+ "dataset_path": "blimp",
881
+ "dataset_name": "distractor_agreement_relative_clause",
882
+ "validation_split": "train",
883
+ "doc_to_text": "",
884
+ "doc_to_target": 0,
885
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
886
+ "description": "",
887
+ "target_delimiter": " ",
888
+ "fewshot_delimiter": "\n\n",
889
+ "num_fewshot": 0,
890
+ "metric_list": [
891
+ {
892
+ "metric": "acc"
893
+ }
894
+ ],
895
+ "output_type": "multiple_choice",
896
+ "repeats": 1,
897
+ "should_decontaminate": true,
898
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
899
+ "metadata": {
900
+ "version": 1.0
901
+ }
902
+ },
903
+ "blimp_drop_argument": {
904
+ "task": "blimp_drop_argument",
905
+ "group": "blimp",
906
+ "dataset_path": "blimp",
907
+ "dataset_name": "drop_argument",
908
+ "validation_split": "train",
909
+ "doc_to_text": "",
910
+ "doc_to_target": 0,
911
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
912
+ "description": "",
913
+ "target_delimiter": " ",
914
+ "fewshot_delimiter": "\n\n",
915
+ "num_fewshot": 0,
916
+ "metric_list": [
917
+ {
918
+ "metric": "acc"
919
+ }
920
+ ],
921
+ "output_type": "multiple_choice",
922
+ "repeats": 1,
923
+ "should_decontaminate": true,
924
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
925
+ "metadata": {
926
+ "version": 1.0
927
+ }
928
+ },
929
+ "blimp_ellipsis_n_bar_1": {
930
+ "task": "blimp_ellipsis_n_bar_1",
931
+ "group": "blimp",
932
+ "dataset_path": "blimp",
933
+ "dataset_name": "ellipsis_n_bar_1",
934
+ "validation_split": "train",
935
+ "doc_to_text": "",
936
+ "doc_to_target": 0,
937
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
938
+ "description": "",
939
+ "target_delimiter": " ",
940
+ "fewshot_delimiter": "\n\n",
941
+ "num_fewshot": 0,
942
+ "metric_list": [
943
+ {
944
+ "metric": "acc"
945
+ }
946
+ ],
947
+ "output_type": "multiple_choice",
948
+ "repeats": 1,
949
+ "should_decontaminate": true,
950
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
951
+ "metadata": {
952
+ "version": 1.0
953
+ }
954
+ },
955
+ "blimp_ellipsis_n_bar_2": {
956
+ "task": "blimp_ellipsis_n_bar_2",
957
+ "group": "blimp",
958
+ "dataset_path": "blimp",
959
+ "dataset_name": "ellipsis_n_bar_2",
960
+ "validation_split": "train",
961
+ "doc_to_text": "",
962
+ "doc_to_target": 0,
963
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
964
+ "description": "",
965
+ "target_delimiter": " ",
966
+ "fewshot_delimiter": "\n\n",
967
+ "num_fewshot": 0,
968
+ "metric_list": [
969
+ {
970
+ "metric": "acc"
971
+ }
972
+ ],
973
+ "output_type": "multiple_choice",
974
+ "repeats": 1,
975
+ "should_decontaminate": true,
976
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
977
+ "metadata": {
978
+ "version": 1.0
979
+ }
980
+ },
981
+ "blimp_existential_there_object_raising": {
982
+ "task": "blimp_existential_there_object_raising",
983
+ "group": "blimp",
984
+ "dataset_path": "blimp",
985
+ "dataset_name": "existential_there_object_raising",
986
+ "validation_split": "train",
987
+ "doc_to_text": "",
988
+ "doc_to_target": 0,
989
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
990
+ "description": "",
991
+ "target_delimiter": " ",
992
+ "fewshot_delimiter": "\n\n",
993
+ "num_fewshot": 0,
994
+ "metric_list": [
995
+ {
996
+ "metric": "acc"
997
+ }
998
+ ],
999
+ "output_type": "multiple_choice",
1000
+ "repeats": 1,
1001
+ "should_decontaminate": true,
1002
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1003
+ "metadata": {
1004
+ "version": 1.0
1005
+ }
1006
+ },
1007
+ "blimp_existential_there_quantifiers_1": {
1008
+ "task": "blimp_existential_there_quantifiers_1",
1009
+ "group": "blimp",
1010
+ "dataset_path": "blimp",
1011
+ "dataset_name": "existential_there_quantifiers_1",
1012
+ "validation_split": "train",
1013
+ "doc_to_text": "",
1014
+ "doc_to_target": 0,
1015
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1016
+ "description": "",
1017
+ "target_delimiter": " ",
1018
+ "fewshot_delimiter": "\n\n",
1019
+ "num_fewshot": 0,
1020
+ "metric_list": [
1021
+ {
1022
+ "metric": "acc"
1023
+ }
1024
+ ],
1025
+ "output_type": "multiple_choice",
1026
+ "repeats": 1,
1027
+ "should_decontaminate": true,
1028
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1029
+ "metadata": {
1030
+ "version": 1.0
1031
+ }
1032
+ },
1033
+ "blimp_existential_there_quantifiers_2": {
1034
+ "task": "blimp_existential_there_quantifiers_2",
1035
+ "group": "blimp",
1036
+ "dataset_path": "blimp",
1037
+ "dataset_name": "existential_there_quantifiers_2",
1038
+ "validation_split": "train",
1039
+ "doc_to_text": "",
1040
+ "doc_to_target": 0,
1041
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1042
+ "description": "",
1043
+ "target_delimiter": " ",
1044
+ "fewshot_delimiter": "\n\n",
1045
+ "num_fewshot": 0,
1046
+ "metric_list": [
1047
+ {
1048
+ "metric": "acc"
1049
+ }
1050
+ ],
1051
+ "output_type": "multiple_choice",
1052
+ "repeats": 1,
1053
+ "should_decontaminate": true,
1054
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1055
+ "metadata": {
1056
+ "version": 1.0
1057
+ }
1058
+ },
1059
+ "blimp_existential_there_subject_raising": {
1060
+ "task": "blimp_existential_there_subject_raising",
1061
+ "group": "blimp",
1062
+ "dataset_path": "blimp",
1063
+ "dataset_name": "existential_there_subject_raising",
1064
+ "validation_split": "train",
1065
+ "doc_to_text": "",
1066
+ "doc_to_target": 0,
1067
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1068
+ "description": "",
1069
+ "target_delimiter": " ",
1070
+ "fewshot_delimiter": "\n\n",
1071
+ "num_fewshot": 0,
1072
+ "metric_list": [
1073
+ {
1074
+ "metric": "acc"
1075
+ }
1076
+ ],
1077
+ "output_type": "multiple_choice",
1078
+ "repeats": 1,
1079
+ "should_decontaminate": true,
1080
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1081
+ "metadata": {
1082
+ "version": 1.0
1083
+ }
1084
+ },
1085
+ "blimp_expletive_it_object_raising": {
1086
+ "task": "blimp_expletive_it_object_raising",
1087
+ "group": "blimp",
1088
+ "dataset_path": "blimp",
1089
+ "dataset_name": "expletive_it_object_raising",
1090
+ "validation_split": "train",
1091
+ "doc_to_text": "",
1092
+ "doc_to_target": 0,
1093
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1094
+ "description": "",
1095
+ "target_delimiter": " ",
1096
+ "fewshot_delimiter": "\n\n",
1097
+ "num_fewshot": 0,
1098
+ "metric_list": [
1099
+ {
1100
+ "metric": "acc"
1101
+ }
1102
+ ],
1103
+ "output_type": "multiple_choice",
1104
+ "repeats": 1,
1105
+ "should_decontaminate": true,
1106
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1107
+ "metadata": {
1108
+ "version": 1.0
1109
+ }
1110
+ },
1111
+ "blimp_inchoative": {
1112
+ "task": "blimp_inchoative",
1113
+ "group": "blimp",
1114
+ "dataset_path": "blimp",
1115
+ "dataset_name": "inchoative",
1116
+ "validation_split": "train",
1117
+ "doc_to_text": "",
1118
+ "doc_to_target": 0,
1119
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1120
+ "description": "",
1121
+ "target_delimiter": " ",
1122
+ "fewshot_delimiter": "\n\n",
1123
+ "num_fewshot": 0,
1124
+ "metric_list": [
1125
+ {
1126
+ "metric": "acc"
1127
+ }
1128
+ ],
1129
+ "output_type": "multiple_choice",
1130
+ "repeats": 1,
1131
+ "should_decontaminate": true,
1132
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1133
+ "metadata": {
1134
+ "version": 1.0
1135
+ }
1136
+ },
1137
+ "blimp_intransitive": {
1138
+ "task": "blimp_intransitive",
1139
+ "group": "blimp",
1140
+ "dataset_path": "blimp",
1141
+ "dataset_name": "intransitive",
1142
+ "validation_split": "train",
1143
+ "doc_to_text": "",
1144
+ "doc_to_target": 0,
1145
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1146
+ "description": "",
1147
+ "target_delimiter": " ",
1148
+ "fewshot_delimiter": "\n\n",
1149
+ "num_fewshot": 0,
1150
+ "metric_list": [
1151
+ {
1152
+ "metric": "acc"
1153
+ }
1154
+ ],
1155
+ "output_type": "multiple_choice",
1156
+ "repeats": 1,
1157
+ "should_decontaminate": true,
1158
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1159
+ "metadata": {
1160
+ "version": 1.0
1161
+ }
1162
+ },
1163
+ "blimp_irregular_past_participle_adjectives": {
1164
+ "task": "blimp_irregular_past_participle_adjectives",
1165
+ "group": "blimp",
1166
+ "dataset_path": "blimp",
1167
+ "dataset_name": "irregular_past_participle_adjectives",
1168
+ "validation_split": "train",
1169
+ "doc_to_text": "",
1170
+ "doc_to_target": 0,
1171
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1172
+ "description": "",
1173
+ "target_delimiter": " ",
1174
+ "fewshot_delimiter": "\n\n",
1175
+ "num_fewshot": 0,
1176
+ "metric_list": [
1177
+ {
1178
+ "metric": "acc"
1179
+ }
1180
+ ],
1181
+ "output_type": "multiple_choice",
1182
+ "repeats": 1,
1183
+ "should_decontaminate": true,
1184
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1185
+ "metadata": {
1186
+ "version": 1.0
1187
+ }
1188
+ },
1189
+ "blimp_irregular_past_participle_verbs": {
1190
+ "task": "blimp_irregular_past_participle_verbs",
1191
+ "group": "blimp",
1192
+ "dataset_path": "blimp",
1193
+ "dataset_name": "irregular_past_participle_verbs",
1194
+ "validation_split": "train",
1195
+ "doc_to_text": "",
1196
+ "doc_to_target": 0,
1197
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1198
+ "description": "",
1199
+ "target_delimiter": " ",
1200
+ "fewshot_delimiter": "\n\n",
1201
+ "num_fewshot": 0,
1202
+ "metric_list": [
1203
+ {
1204
+ "metric": "acc"
1205
+ }
1206
+ ],
1207
+ "output_type": "multiple_choice",
1208
+ "repeats": 1,
1209
+ "should_decontaminate": true,
1210
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1211
+ "metadata": {
1212
+ "version": 1.0
1213
+ }
1214
+ },
1215
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1216
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1217
+ "group": "blimp",
1218
+ "dataset_path": "blimp",
1219
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1220
+ "validation_split": "train",
1221
+ "doc_to_text": "",
1222
+ "doc_to_target": 0,
1223
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1224
+ "description": "",
1225
+ "target_delimiter": " ",
1226
+ "fewshot_delimiter": "\n\n",
1227
+ "num_fewshot": 0,
1228
+ "metric_list": [
1229
+ {
1230
+ "metric": "acc"
1231
+ }
1232
+ ],
1233
+ "output_type": "multiple_choice",
1234
+ "repeats": 1,
1235
+ "should_decontaminate": true,
1236
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1237
+ "metadata": {
1238
+ "version": 1.0
1239
+ }
1240
+ },
1241
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1242
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1243
+ "group": "blimp",
1244
+ "dataset_path": "blimp",
1245
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1246
+ "validation_split": "train",
1247
+ "doc_to_text": "",
1248
+ "doc_to_target": 0,
1249
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1250
+ "description": "",
1251
+ "target_delimiter": " ",
1252
+ "fewshot_delimiter": "\n\n",
1253
+ "num_fewshot": 0,
1254
+ "metric_list": [
1255
+ {
1256
+ "metric": "acc"
1257
+ }
1258
+ ],
1259
+ "output_type": "multiple_choice",
1260
+ "repeats": 1,
1261
+ "should_decontaminate": true,
1262
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1263
+ "metadata": {
1264
+ "version": 1.0
1265
+ }
1266
+ },
1267
+ "blimp_left_branch_island_echo_question": {
1268
+ "task": "blimp_left_branch_island_echo_question",
1269
+ "group": "blimp",
1270
+ "dataset_path": "blimp",
1271
+ "dataset_name": "left_branch_island_echo_question",
1272
+ "validation_split": "train",
1273
+ "doc_to_text": "",
1274
+ "doc_to_target": 0,
1275
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1276
+ "description": "",
1277
+ "target_delimiter": " ",
1278
+ "fewshot_delimiter": "\n\n",
1279
+ "num_fewshot": 0,
1280
+ "metric_list": [
1281
+ {
1282
+ "metric": "acc"
1283
+ }
1284
+ ],
1285
+ "output_type": "multiple_choice",
1286
+ "repeats": 1,
1287
+ "should_decontaminate": true,
1288
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1289
+ "metadata": {
1290
+ "version": 1.0
1291
+ }
1292
+ },
1293
+ "blimp_left_branch_island_simple_question": {
1294
+ "task": "blimp_left_branch_island_simple_question",
1295
+ "group": "blimp",
1296
+ "dataset_path": "blimp",
1297
+ "dataset_name": "left_branch_island_simple_question",
1298
+ "validation_split": "train",
1299
+ "doc_to_text": "",
1300
+ "doc_to_target": 0,
1301
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1302
+ "description": "",
1303
+ "target_delimiter": " ",
1304
+ "fewshot_delimiter": "\n\n",
1305
+ "num_fewshot": 0,
1306
+ "metric_list": [
1307
+ {
1308
+ "metric": "acc"
1309
+ }
1310
+ ],
1311
+ "output_type": "multiple_choice",
1312
+ "repeats": 1,
1313
+ "should_decontaminate": true,
1314
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1315
+ "metadata": {
1316
+ "version": 1.0
1317
+ }
1318
+ },
1319
+ "blimp_matrix_question_npi_licensor_present": {
1320
+ "task": "blimp_matrix_question_npi_licensor_present",
1321
+ "group": "blimp",
1322
+ "dataset_path": "blimp",
1323
+ "dataset_name": "matrix_question_npi_licensor_present",
1324
+ "validation_split": "train",
1325
+ "doc_to_text": "",
1326
+ "doc_to_target": 0,
1327
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1328
+ "description": "",
1329
+ "target_delimiter": " ",
1330
+ "fewshot_delimiter": "\n\n",
1331
+ "num_fewshot": 0,
1332
+ "metric_list": [
1333
+ {
1334
+ "metric": "acc"
1335
+ }
1336
+ ],
1337
+ "output_type": "multiple_choice",
1338
+ "repeats": 1,
1339
+ "should_decontaminate": true,
1340
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1341
+ "metadata": {
1342
+ "version": 1.0
1343
+ }
1344
+ },
1345
+ "blimp_npi_present_1": {
1346
+ "task": "blimp_npi_present_1",
1347
+ "group": "blimp",
1348
+ "dataset_path": "blimp",
1349
+ "dataset_name": "npi_present_1",
1350
+ "validation_split": "train",
1351
+ "doc_to_text": "",
1352
+ "doc_to_target": 0,
1353
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1354
+ "description": "",
1355
+ "target_delimiter": " ",
1356
+ "fewshot_delimiter": "\n\n",
1357
+ "num_fewshot": 0,
1358
+ "metric_list": [
1359
+ {
1360
+ "metric": "acc"
1361
+ }
1362
+ ],
1363
+ "output_type": "multiple_choice",
1364
+ "repeats": 1,
1365
+ "should_decontaminate": true,
1366
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1367
+ "metadata": {
1368
+ "version": 1.0
1369
+ }
1370
+ },
1371
+ "blimp_npi_present_2": {
1372
+ "task": "blimp_npi_present_2",
1373
+ "group": "blimp",
1374
+ "dataset_path": "blimp",
1375
+ "dataset_name": "npi_present_2",
1376
+ "validation_split": "train",
1377
+ "doc_to_text": "",
1378
+ "doc_to_target": 0,
1379
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1380
+ "description": "",
1381
+ "target_delimiter": " ",
1382
+ "fewshot_delimiter": "\n\n",
1383
+ "num_fewshot": 0,
1384
+ "metric_list": [
1385
+ {
1386
+ "metric": "acc"
1387
+ }
1388
+ ],
1389
+ "output_type": "multiple_choice",
1390
+ "repeats": 1,
1391
+ "should_decontaminate": true,
1392
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1393
+ "metadata": {
1394
+ "version": 1.0
1395
+ }
1396
+ },
1397
+ "blimp_only_npi_licensor_present": {
1398
+ "task": "blimp_only_npi_licensor_present",
1399
+ "group": "blimp",
1400
+ "dataset_path": "blimp",
1401
+ "dataset_name": "only_npi_licensor_present",
1402
+ "validation_split": "train",
1403
+ "doc_to_text": "",
1404
+ "doc_to_target": 0,
1405
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1406
+ "description": "",
1407
+ "target_delimiter": " ",
1408
+ "fewshot_delimiter": "\n\n",
1409
+ "num_fewshot": 0,
1410
+ "metric_list": [
1411
+ {
1412
+ "metric": "acc"
1413
+ }
1414
+ ],
1415
+ "output_type": "multiple_choice",
1416
+ "repeats": 1,
1417
+ "should_decontaminate": true,
1418
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1419
+ "metadata": {
1420
+ "version": 1.0
1421
+ }
1422
+ },
1423
+ "blimp_only_npi_scope": {
1424
+ "task": "blimp_only_npi_scope",
1425
+ "group": "blimp",
1426
+ "dataset_path": "blimp",
1427
+ "dataset_name": "only_npi_scope",
1428
+ "validation_split": "train",
1429
+ "doc_to_text": "",
1430
+ "doc_to_target": 0,
1431
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1432
+ "description": "",
1433
+ "target_delimiter": " ",
1434
+ "fewshot_delimiter": "\n\n",
1435
+ "num_fewshot": 0,
1436
+ "metric_list": [
1437
+ {
1438
+ "metric": "acc"
1439
+ }
1440
+ ],
1441
+ "output_type": "multiple_choice",
1442
+ "repeats": 1,
1443
+ "should_decontaminate": true,
1444
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1445
+ "metadata": {
1446
+ "version": 1.0
1447
+ }
1448
+ },
1449
+ "blimp_passive_1": {
1450
+ "task": "blimp_passive_1",
1451
+ "group": "blimp",
1452
+ "dataset_path": "blimp",
1453
+ "dataset_name": "passive_1",
1454
+ "validation_split": "train",
1455
+ "doc_to_text": "",
1456
+ "doc_to_target": 0,
1457
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1458
+ "description": "",
1459
+ "target_delimiter": " ",
1460
+ "fewshot_delimiter": "\n\n",
1461
+ "num_fewshot": 0,
1462
+ "metric_list": [
1463
+ {
1464
+ "metric": "acc"
1465
+ }
1466
+ ],
1467
+ "output_type": "multiple_choice",
1468
+ "repeats": 1,
1469
+ "should_decontaminate": true,
1470
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1471
+ "metadata": {
1472
+ "version": 1.0
1473
+ }
1474
+ },
1475
+ "blimp_passive_2": {
1476
+ "task": "blimp_passive_2",
1477
+ "group": "blimp",
1478
+ "dataset_path": "blimp",
1479
+ "dataset_name": "passive_2",
1480
+ "validation_split": "train",
1481
+ "doc_to_text": "",
1482
+ "doc_to_target": 0,
1483
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1484
+ "description": "",
1485
+ "target_delimiter": " ",
1486
+ "fewshot_delimiter": "\n\n",
1487
+ "num_fewshot": 0,
1488
+ "metric_list": [
1489
+ {
1490
+ "metric": "acc"
1491
+ }
1492
+ ],
1493
+ "output_type": "multiple_choice",
1494
+ "repeats": 1,
1495
+ "should_decontaminate": true,
1496
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1497
+ "metadata": {
1498
+ "version": 1.0
1499
+ }
1500
+ },
1501
+ "blimp_principle_A_c_command": {
1502
+ "task": "blimp_principle_A_c_command",
1503
+ "group": "blimp",
1504
+ "dataset_path": "blimp",
1505
+ "dataset_name": "principle_A_c_command",
1506
+ "validation_split": "train",
1507
+ "doc_to_text": "",
1508
+ "doc_to_target": 0,
1509
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1510
+ "description": "",
1511
+ "target_delimiter": " ",
1512
+ "fewshot_delimiter": "\n\n",
1513
+ "num_fewshot": 0,
1514
+ "metric_list": [
1515
+ {
1516
+ "metric": "acc"
1517
+ }
1518
+ ],
1519
+ "output_type": "multiple_choice",
1520
+ "repeats": 1,
1521
+ "should_decontaminate": true,
1522
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1523
+ "metadata": {
1524
+ "version": 1.0
1525
+ }
1526
+ },
1527
+ "blimp_principle_A_case_1": {
1528
+ "task": "blimp_principle_A_case_1",
1529
+ "group": "blimp",
1530
+ "dataset_path": "blimp",
1531
+ "dataset_name": "principle_A_case_1",
1532
+ "validation_split": "train",
1533
+ "doc_to_text": "",
1534
+ "doc_to_target": 0,
1535
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1536
+ "description": "",
1537
+ "target_delimiter": " ",
1538
+ "fewshot_delimiter": "\n\n",
1539
+ "num_fewshot": 0,
1540
+ "metric_list": [
1541
+ {
1542
+ "metric": "acc"
1543
+ }
1544
+ ],
1545
+ "output_type": "multiple_choice",
1546
+ "repeats": 1,
1547
+ "should_decontaminate": true,
1548
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1549
+ "metadata": {
1550
+ "version": 1.0
1551
+ }
1552
+ },
1553
+ "blimp_principle_A_case_2": {
1554
+ "task": "blimp_principle_A_case_2",
1555
+ "group": "blimp",
1556
+ "dataset_path": "blimp",
1557
+ "dataset_name": "principle_A_case_2",
1558
+ "validation_split": "train",
1559
+ "doc_to_text": "",
1560
+ "doc_to_target": 0,
1561
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1562
+ "description": "",
1563
+ "target_delimiter": " ",
1564
+ "fewshot_delimiter": "\n\n",
1565
+ "num_fewshot": 0,
1566
+ "metric_list": [
1567
+ {
1568
+ "metric": "acc"
1569
+ }
1570
+ ],
1571
+ "output_type": "multiple_choice",
1572
+ "repeats": 1,
1573
+ "should_decontaminate": true,
1574
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1575
+ "metadata": {
1576
+ "version": 1.0
1577
+ }
1578
+ },
1579
+ "blimp_principle_A_domain_1": {
1580
+ "task": "blimp_principle_A_domain_1",
1581
+ "group": "blimp",
1582
+ "dataset_path": "blimp",
1583
+ "dataset_name": "principle_A_domain_1",
1584
+ "validation_split": "train",
1585
+ "doc_to_text": "",
1586
+ "doc_to_target": 0,
1587
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1588
+ "description": "",
1589
+ "target_delimiter": " ",
1590
+ "fewshot_delimiter": "\n\n",
1591
+ "num_fewshot": 0,
1592
+ "metric_list": [
1593
+ {
1594
+ "metric": "acc"
1595
+ }
1596
+ ],
1597
+ "output_type": "multiple_choice",
1598
+ "repeats": 1,
1599
+ "should_decontaminate": true,
1600
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1601
+ "metadata": {
1602
+ "version": 1.0
1603
+ }
1604
+ },
1605
+ "blimp_principle_A_domain_2": {
1606
+ "task": "blimp_principle_A_domain_2",
1607
+ "group": "blimp",
1608
+ "dataset_path": "blimp",
1609
+ "dataset_name": "principle_A_domain_2",
1610
+ "validation_split": "train",
1611
+ "doc_to_text": "",
1612
+ "doc_to_target": 0,
1613
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1614
+ "description": "",
1615
+ "target_delimiter": " ",
1616
+ "fewshot_delimiter": "\n\n",
1617
+ "num_fewshot": 0,
1618
+ "metric_list": [
1619
+ {
1620
+ "metric": "acc"
1621
+ }
1622
+ ],
1623
+ "output_type": "multiple_choice",
1624
+ "repeats": 1,
1625
+ "should_decontaminate": true,
1626
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1627
+ "metadata": {
1628
+ "version": 1.0
1629
+ }
1630
+ },
1631
+ "blimp_principle_A_domain_3": {
1632
+ "task": "blimp_principle_A_domain_3",
1633
+ "group": "blimp",
1634
+ "dataset_path": "blimp",
1635
+ "dataset_name": "principle_A_domain_3",
1636
+ "validation_split": "train",
1637
+ "doc_to_text": "",
1638
+ "doc_to_target": 0,
1639
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1640
+ "description": "",
1641
+ "target_delimiter": " ",
1642
+ "fewshot_delimiter": "\n\n",
1643
+ "num_fewshot": 0,
1644
+ "metric_list": [
1645
+ {
1646
+ "metric": "acc"
1647
+ }
1648
+ ],
1649
+ "output_type": "multiple_choice",
1650
+ "repeats": 1,
1651
+ "should_decontaminate": true,
1652
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1653
+ "metadata": {
1654
+ "version": 1.0
1655
+ }
1656
+ },
1657
+ "blimp_principle_A_reconstruction": {
1658
+ "task": "blimp_principle_A_reconstruction",
1659
+ "group": "blimp",
1660
+ "dataset_path": "blimp",
1661
+ "dataset_name": "principle_A_reconstruction",
1662
+ "validation_split": "train",
1663
+ "doc_to_text": "",
1664
+ "doc_to_target": 0,
1665
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1666
+ "description": "",
1667
+ "target_delimiter": " ",
1668
+ "fewshot_delimiter": "\n\n",
1669
+ "num_fewshot": 0,
1670
+ "metric_list": [
1671
+ {
1672
+ "metric": "acc"
1673
+ }
1674
+ ],
1675
+ "output_type": "multiple_choice",
1676
+ "repeats": 1,
1677
+ "should_decontaminate": true,
1678
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1679
+ "metadata": {
1680
+ "version": 1.0
1681
+ }
1682
+ },
1683
+ "blimp_regular_plural_subject_verb_agreement_1": {
1684
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1685
+ "group": "blimp",
1686
+ "dataset_path": "blimp",
1687
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1688
+ "validation_split": "train",
1689
+ "doc_to_text": "",
1690
+ "doc_to_target": 0,
1691
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1692
+ "description": "",
1693
+ "target_delimiter": " ",
1694
+ "fewshot_delimiter": "\n\n",
1695
+ "num_fewshot": 0,
1696
+ "metric_list": [
1697
+ {
1698
+ "metric": "acc"
1699
+ }
1700
+ ],
1701
+ "output_type": "multiple_choice",
1702
+ "repeats": 1,
1703
+ "should_decontaminate": true,
1704
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1705
+ "metadata": {
1706
+ "version": 1.0
1707
+ }
1708
+ },
1709
+ "blimp_regular_plural_subject_verb_agreement_2": {
1710
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1711
+ "group": "blimp",
1712
+ "dataset_path": "blimp",
1713
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1714
+ "validation_split": "train",
1715
+ "doc_to_text": "",
1716
+ "doc_to_target": 0,
1717
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1718
+ "description": "",
1719
+ "target_delimiter": " ",
1720
+ "fewshot_delimiter": "\n\n",
1721
+ "num_fewshot": 0,
1722
+ "metric_list": [
1723
+ {
1724
+ "metric": "acc"
1725
+ }
1726
+ ],
1727
+ "output_type": "multiple_choice",
1728
+ "repeats": 1,
1729
+ "should_decontaminate": true,
1730
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1731
+ "metadata": {
1732
+ "version": 1.0
1733
+ }
1734
+ },
1735
+ "blimp_sentential_negation_npi_licensor_present": {
1736
+ "task": "blimp_sentential_negation_npi_licensor_present",
1737
+ "group": "blimp",
1738
+ "dataset_path": "blimp",
1739
+ "dataset_name": "sentential_negation_npi_licensor_present",
1740
+ "validation_split": "train",
1741
+ "doc_to_text": "",
1742
+ "doc_to_target": 0,
1743
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1744
+ "description": "",
1745
+ "target_delimiter": " ",
1746
+ "fewshot_delimiter": "\n\n",
1747
+ "num_fewshot": 0,
1748
+ "metric_list": [
1749
+ {
1750
+ "metric": "acc"
1751
+ }
1752
+ ],
1753
+ "output_type": "multiple_choice",
1754
+ "repeats": 1,
1755
+ "should_decontaminate": true,
1756
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1757
+ "metadata": {
1758
+ "version": 1.0
1759
+ }
1760
+ },
1761
+ "blimp_sentential_negation_npi_scope": {
1762
+ "task": "blimp_sentential_negation_npi_scope",
1763
+ "group": "blimp",
1764
+ "dataset_path": "blimp",
1765
+ "dataset_name": "sentential_negation_npi_scope",
1766
+ "validation_split": "train",
1767
+ "doc_to_text": "",
1768
+ "doc_to_target": 0,
1769
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1770
+ "description": "",
1771
+ "target_delimiter": " ",
1772
+ "fewshot_delimiter": "\n\n",
1773
+ "num_fewshot": 0,
1774
+ "metric_list": [
1775
+ {
1776
+ "metric": "acc"
1777
+ }
1778
+ ],
1779
+ "output_type": "multiple_choice",
1780
+ "repeats": 1,
1781
+ "should_decontaminate": true,
1782
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1783
+ "metadata": {
1784
+ "version": 1.0
1785
+ }
1786
+ },
1787
+ "blimp_sentential_subject_island": {
1788
+ "task": "blimp_sentential_subject_island",
1789
+ "group": "blimp",
1790
+ "dataset_path": "blimp",
1791
+ "dataset_name": "sentential_subject_island",
1792
+ "validation_split": "train",
1793
+ "doc_to_text": "",
1794
+ "doc_to_target": 0,
1795
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1796
+ "description": "",
1797
+ "target_delimiter": " ",
1798
+ "fewshot_delimiter": "\n\n",
1799
+ "num_fewshot": 0,
1800
+ "metric_list": [
1801
+ {
1802
+ "metric": "acc"
1803
+ }
1804
+ ],
1805
+ "output_type": "multiple_choice",
1806
+ "repeats": 1,
1807
+ "should_decontaminate": true,
1808
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1809
+ "metadata": {
1810
+ "version": 1.0
1811
+ }
1812
+ },
1813
+ "blimp_superlative_quantifiers_1": {
1814
+ "task": "blimp_superlative_quantifiers_1",
1815
+ "group": "blimp",
1816
+ "dataset_path": "blimp",
1817
+ "dataset_name": "superlative_quantifiers_1",
1818
+ "validation_split": "train",
1819
+ "doc_to_text": "",
1820
+ "doc_to_target": 0,
1821
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1822
+ "description": "",
1823
+ "target_delimiter": " ",
1824
+ "fewshot_delimiter": "\n\n",
1825
+ "num_fewshot": 0,
1826
+ "metric_list": [
1827
+ {
1828
+ "metric": "acc"
1829
+ }
1830
+ ],
1831
+ "output_type": "multiple_choice",
1832
+ "repeats": 1,
1833
+ "should_decontaminate": true,
1834
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1835
+ "metadata": {
1836
+ "version": 1.0
1837
+ }
1838
+ },
1839
+ "blimp_superlative_quantifiers_2": {
1840
+ "task": "blimp_superlative_quantifiers_2",
1841
+ "group": "blimp",
1842
+ "dataset_path": "blimp",
1843
+ "dataset_name": "superlative_quantifiers_2",
1844
+ "validation_split": "train",
1845
+ "doc_to_text": "",
1846
+ "doc_to_target": 0,
1847
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1848
+ "description": "",
1849
+ "target_delimiter": " ",
1850
+ "fewshot_delimiter": "\n\n",
1851
+ "num_fewshot": 0,
1852
+ "metric_list": [
1853
+ {
1854
+ "metric": "acc"
1855
+ }
1856
+ ],
1857
+ "output_type": "multiple_choice",
1858
+ "repeats": 1,
1859
+ "should_decontaminate": true,
1860
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1861
+ "metadata": {
1862
+ "version": 1.0
1863
+ }
1864
+ },
1865
+ "blimp_tough_vs_raising_1": {
1866
+ "task": "blimp_tough_vs_raising_1",
1867
+ "group": "blimp",
1868
+ "dataset_path": "blimp",
1869
+ "dataset_name": "tough_vs_raising_1",
1870
+ "validation_split": "train",
1871
+ "doc_to_text": "",
1872
+ "doc_to_target": 0,
1873
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1874
+ "description": "",
1875
+ "target_delimiter": " ",
1876
+ "fewshot_delimiter": "\n\n",
1877
+ "num_fewshot": 0,
1878
+ "metric_list": [
1879
+ {
1880
+ "metric": "acc"
1881
+ }
1882
+ ],
1883
+ "output_type": "multiple_choice",
1884
+ "repeats": 1,
1885
+ "should_decontaminate": true,
1886
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1887
+ "metadata": {
1888
+ "version": 1.0
1889
+ }
1890
+ },
1891
+ "blimp_tough_vs_raising_2": {
1892
+ "task": "blimp_tough_vs_raising_2",
1893
+ "group": "blimp",
1894
+ "dataset_path": "blimp",
1895
+ "dataset_name": "tough_vs_raising_2",
1896
+ "validation_split": "train",
1897
+ "doc_to_text": "",
1898
+ "doc_to_target": 0,
1899
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1900
+ "description": "",
1901
+ "target_delimiter": " ",
1902
+ "fewshot_delimiter": "\n\n",
1903
+ "num_fewshot": 0,
1904
+ "metric_list": [
1905
+ {
1906
+ "metric": "acc"
1907
+ }
1908
+ ],
1909
+ "output_type": "multiple_choice",
1910
+ "repeats": 1,
1911
+ "should_decontaminate": true,
1912
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1913
+ "metadata": {
1914
+ "version": 1.0
1915
+ }
1916
+ },
1917
+ "blimp_transitive": {
1918
+ "task": "blimp_transitive",
1919
+ "group": "blimp",
1920
+ "dataset_path": "blimp",
1921
+ "dataset_name": "transitive",
1922
+ "validation_split": "train",
1923
+ "doc_to_text": "",
1924
+ "doc_to_target": 0,
1925
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1926
+ "description": "",
1927
+ "target_delimiter": " ",
1928
+ "fewshot_delimiter": "\n\n",
1929
+ "num_fewshot": 0,
1930
+ "metric_list": [
1931
+ {
1932
+ "metric": "acc"
1933
+ }
1934
+ ],
1935
+ "output_type": "multiple_choice",
1936
+ "repeats": 1,
1937
+ "should_decontaminate": true,
1938
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1939
+ "metadata": {
1940
+ "version": 1.0
1941
+ }
1942
+ },
1943
+ "blimp_wh_island": {
1944
+ "task": "blimp_wh_island",
1945
+ "group": "blimp",
1946
+ "dataset_path": "blimp",
1947
+ "dataset_name": "wh_island",
1948
+ "validation_split": "train",
1949
+ "doc_to_text": "",
1950
+ "doc_to_target": 0,
1951
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1952
+ "description": "",
1953
+ "target_delimiter": " ",
1954
+ "fewshot_delimiter": "\n\n",
1955
+ "num_fewshot": 0,
1956
+ "metric_list": [
1957
+ {
1958
+ "metric": "acc"
1959
+ }
1960
+ ],
1961
+ "output_type": "multiple_choice",
1962
+ "repeats": 1,
1963
+ "should_decontaminate": true,
1964
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1965
+ "metadata": {
1966
+ "version": 1.0
1967
+ }
1968
+ },
1969
+ "blimp_wh_questions_object_gap": {
1970
+ "task": "blimp_wh_questions_object_gap",
1971
+ "group": "blimp",
1972
+ "dataset_path": "blimp",
1973
+ "dataset_name": "wh_questions_object_gap",
1974
+ "validation_split": "train",
1975
+ "doc_to_text": "",
1976
+ "doc_to_target": 0,
1977
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1978
+ "description": "",
1979
+ "target_delimiter": " ",
1980
+ "fewshot_delimiter": "\n\n",
1981
+ "num_fewshot": 0,
1982
+ "metric_list": [
1983
+ {
1984
+ "metric": "acc"
1985
+ }
1986
+ ],
1987
+ "output_type": "multiple_choice",
1988
+ "repeats": 1,
1989
+ "should_decontaminate": true,
1990
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1991
+ "metadata": {
1992
+ "version": 1.0
1993
+ }
1994
+ },
1995
+ "blimp_wh_questions_subject_gap": {
1996
+ "task": "blimp_wh_questions_subject_gap",
1997
+ "group": "blimp",
1998
+ "dataset_path": "blimp",
1999
+ "dataset_name": "wh_questions_subject_gap",
2000
+ "validation_split": "train",
2001
+ "doc_to_text": "",
2002
+ "doc_to_target": 0,
2003
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2004
+ "description": "",
2005
+ "target_delimiter": " ",
2006
+ "fewshot_delimiter": "\n\n",
2007
+ "num_fewshot": 0,
2008
+ "metric_list": [
2009
+ {
2010
+ "metric": "acc"
2011
+ }
2012
+ ],
2013
+ "output_type": "multiple_choice",
2014
+ "repeats": 1,
2015
+ "should_decontaminate": true,
2016
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2017
+ "metadata": {
2018
+ "version": 1.0
2019
+ }
2020
+ },
2021
+ "blimp_wh_questions_subject_gap_long_distance": {
2022
+ "task": "blimp_wh_questions_subject_gap_long_distance",
2023
+ "group": "blimp",
2024
+ "dataset_path": "blimp",
2025
+ "dataset_name": "wh_questions_subject_gap_long_distance",
2026
+ "validation_split": "train",
2027
+ "doc_to_text": "",
2028
+ "doc_to_target": 0,
2029
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2030
+ "description": "",
2031
+ "target_delimiter": " ",
2032
+ "fewshot_delimiter": "\n\n",
2033
+ "num_fewshot": 0,
2034
+ "metric_list": [
2035
+ {
2036
+ "metric": "acc"
2037
+ }
2038
+ ],
2039
+ "output_type": "multiple_choice",
2040
+ "repeats": 1,
2041
+ "should_decontaminate": true,
2042
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2043
+ "metadata": {
2044
+ "version": 1.0
2045
+ }
2046
+ },
2047
+ "blimp_wh_vs_that_no_gap": {
2048
+ "task": "blimp_wh_vs_that_no_gap",
2049
+ "group": "blimp",
2050
+ "dataset_path": "blimp",
2051
+ "dataset_name": "wh_vs_that_no_gap",
2052
+ "validation_split": "train",
2053
+ "doc_to_text": "",
2054
+ "doc_to_target": 0,
2055
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2056
+ "description": "",
2057
+ "target_delimiter": " ",
2058
+ "fewshot_delimiter": "\n\n",
2059
+ "num_fewshot": 0,
2060
+ "metric_list": [
2061
+ {
2062
+ "metric": "acc"
2063
+ }
2064
+ ],
2065
+ "output_type": "multiple_choice",
2066
+ "repeats": 1,
2067
+ "should_decontaminate": true,
2068
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2069
+ "metadata": {
2070
+ "version": 1.0
2071
+ }
2072
+ },
2073
+ "blimp_wh_vs_that_no_gap_long_distance": {
2074
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2075
+ "group": "blimp",
2076
+ "dataset_path": "blimp",
2077
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2078
+ "validation_split": "train",
2079
+ "doc_to_text": "",
2080
+ "doc_to_target": 0,
2081
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2082
+ "description": "",
2083
+ "target_delimiter": " ",
2084
+ "fewshot_delimiter": "\n\n",
2085
+ "num_fewshot": 0,
2086
+ "metric_list": [
2087
+ {
2088
+ "metric": "acc"
2089
+ }
2090
+ ],
2091
+ "output_type": "multiple_choice",
2092
+ "repeats": 1,
2093
+ "should_decontaminate": true,
2094
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2095
+ "metadata": {
2096
+ "version": 1.0
2097
+ }
2098
+ },
2099
+ "blimp_wh_vs_that_with_gap": {
2100
+ "task": "blimp_wh_vs_that_with_gap",
2101
+ "group": "blimp",
2102
+ "dataset_path": "blimp",
2103
+ "dataset_name": "wh_vs_that_with_gap",
2104
+ "validation_split": "train",
2105
+ "doc_to_text": "",
2106
+ "doc_to_target": 0,
2107
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2108
+ "description": "",
2109
+ "target_delimiter": " ",
2110
+ "fewshot_delimiter": "\n\n",
2111
+ "num_fewshot": 0,
2112
+ "metric_list": [
2113
+ {
2114
+ "metric": "acc"
2115
+ }
2116
+ ],
2117
+ "output_type": "multiple_choice",
2118
+ "repeats": 1,
2119
+ "should_decontaminate": true,
2120
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2121
+ "metadata": {
2122
+ "version": 1.0
2123
+ }
2124
+ },
2125
+ "blimp_wh_vs_that_with_gap_long_distance": {
2126
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2127
+ "group": "blimp",
2128
+ "dataset_path": "blimp",
2129
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2130
+ "validation_split": "train",
2131
+ "doc_to_text": "",
2132
+ "doc_to_target": 0,
2133
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2134
+ "description": "",
2135
+ "target_delimiter": " ",
2136
+ "fewshot_delimiter": "\n\n",
2137
+ "num_fewshot": 0,
2138
+ "metric_list": [
2139
+ {
2140
+ "metric": "acc"
2141
+ }
2142
+ ],
2143
+ "output_type": "multiple_choice",
2144
+ "repeats": 1,
2145
+ "should_decontaminate": true,
2146
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2147
+ "metadata": {
2148
+ "version": 1.0
2149
+ }
2150
+ }
2151
+ },
2152
+ "versions": {
2153
+ "blimp_adjunct_island": 1.0,
2154
+ "blimp_anaphor_gender_agreement": 1.0,
2155
+ "blimp_anaphor_number_agreement": 1.0,
2156
+ "blimp_animate_subject_passive": 1.0,
2157
+ "blimp_animate_subject_trans": 1.0,
2158
+ "blimp_causative": 1.0,
2159
+ "blimp_complex_NP_island": 1.0,
2160
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2161
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2162
+ "blimp_determiner_noun_agreement_1": 1.0,
2163
+ "blimp_determiner_noun_agreement_2": 1.0,
2164
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2165
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2166
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2167
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2168
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2169
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2170
+ "blimp_distractor_agreement_relational_noun": 1.0,
2171
+ "blimp_distractor_agreement_relative_clause": 1.0,
2172
+ "blimp_drop_argument": 1.0,
2173
+ "blimp_ellipsis_n_bar_1": 1.0,
2174
+ "blimp_ellipsis_n_bar_2": 1.0,
2175
+ "blimp_existential_there_object_raising": 1.0,
2176
+ "blimp_existential_there_quantifiers_1": 1.0,
2177
+ "blimp_existential_there_quantifiers_2": 1.0,
2178
+ "blimp_existential_there_subject_raising": 1.0,
2179
+ "blimp_expletive_it_object_raising": 1.0,
2180
+ "blimp_inchoative": 1.0,
2181
+ "blimp_intransitive": 1.0,
2182
+ "blimp_irregular_past_participle_adjectives": 1.0,
2183
+ "blimp_irregular_past_participle_verbs": 1.0,
2184
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2185
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2186
+ "blimp_left_branch_island_echo_question": 1.0,
2187
+ "blimp_left_branch_island_simple_question": 1.0,
2188
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2189
+ "blimp_npi_present_1": 1.0,
2190
+ "blimp_npi_present_2": 1.0,
2191
+ "blimp_only_npi_licensor_present": 1.0,
2192
+ "blimp_only_npi_scope": 1.0,
2193
+ "blimp_passive_1": 1.0,
2194
+ "blimp_passive_2": 1.0,
2195
+ "blimp_principle_A_c_command": 1.0,
2196
+ "blimp_principle_A_case_1": 1.0,
2197
+ "blimp_principle_A_case_2": 1.0,
2198
+ "blimp_principle_A_domain_1": 1.0,
2199
+ "blimp_principle_A_domain_2": 1.0,
2200
+ "blimp_principle_A_domain_3": 1.0,
2201
+ "blimp_principle_A_reconstruction": 1.0,
2202
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2203
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2204
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2205
+ "blimp_sentential_negation_npi_scope": 1.0,
2206
+ "blimp_sentential_subject_island": 1.0,
2207
+ "blimp_superlative_quantifiers_1": 1.0,
2208
+ "blimp_superlative_quantifiers_2": 1.0,
2209
+ "blimp_tough_vs_raising_1": 1.0,
2210
+ "blimp_tough_vs_raising_2": 1.0,
2211
+ "blimp_transitive": 1.0,
2212
+ "blimp_wh_island": 1.0,
2213
+ "blimp_wh_questions_object_gap": 1.0,
2214
+ "blimp_wh_questions_subject_gap": 1.0,
2215
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2216
+ "blimp_wh_vs_that_no_gap": 1.0,
2217
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2218
+ "blimp_wh_vs_that_with_gap": 1.0,
2219
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2220
+ },
2221
+ "n-shot": {
2222
+ "blimp_adjunct_island": 0,
2223
+ "blimp_anaphor_gender_agreement": 0,
2224
+ "blimp_anaphor_number_agreement": 0,
2225
+ "blimp_animate_subject_passive": 0,
2226
+ "blimp_animate_subject_trans": 0,
2227
+ "blimp_causative": 0,
2228
+ "blimp_complex_NP_island": 0,
2229
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2230
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2231
+ "blimp_determiner_noun_agreement_1": 0,
2232
+ "blimp_determiner_noun_agreement_2": 0,
2233
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2234
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2235
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2236
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2237
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2238
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2239
+ "blimp_distractor_agreement_relational_noun": 0,
2240
+ "blimp_distractor_agreement_relative_clause": 0,
2241
+ "blimp_drop_argument": 0,
2242
+ "blimp_ellipsis_n_bar_1": 0,
2243
+ "blimp_ellipsis_n_bar_2": 0,
2244
+ "blimp_existential_there_object_raising": 0,
2245
+ "blimp_existential_there_quantifiers_1": 0,
2246
+ "blimp_existential_there_quantifiers_2": 0,
2247
+ "blimp_existential_there_subject_raising": 0,
2248
+ "blimp_expletive_it_object_raising": 0,
2249
+ "blimp_inchoative": 0,
2250
+ "blimp_intransitive": 0,
2251
+ "blimp_irregular_past_participle_adjectives": 0,
2252
+ "blimp_irregular_past_participle_verbs": 0,
2253
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2254
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2255
+ "blimp_left_branch_island_echo_question": 0,
2256
+ "blimp_left_branch_island_simple_question": 0,
2257
+ "blimp_matrix_question_npi_licensor_present": 0,
2258
+ "blimp_npi_present_1": 0,
2259
+ "blimp_npi_present_2": 0,
2260
+ "blimp_only_npi_licensor_present": 0,
2261
+ "blimp_only_npi_scope": 0,
2262
+ "blimp_passive_1": 0,
2263
+ "blimp_passive_2": 0,
2264
+ "blimp_principle_A_c_command": 0,
2265
+ "blimp_principle_A_case_1": 0,
2266
+ "blimp_principle_A_case_2": 0,
2267
+ "blimp_principle_A_domain_1": 0,
2268
+ "blimp_principle_A_domain_2": 0,
2269
+ "blimp_principle_A_domain_3": 0,
2270
+ "blimp_principle_A_reconstruction": 0,
2271
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2272
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2273
+ "blimp_sentential_negation_npi_licensor_present": 0,
2274
+ "blimp_sentential_negation_npi_scope": 0,
2275
+ "blimp_sentential_subject_island": 0,
2276
+ "blimp_superlative_quantifiers_1": 0,
2277
+ "blimp_superlative_quantifiers_2": 0,
2278
+ "blimp_tough_vs_raising_1": 0,
2279
+ "blimp_tough_vs_raising_2": 0,
2280
+ "blimp_transitive": 0,
2281
+ "blimp_wh_island": 0,
2282
+ "blimp_wh_questions_object_gap": 0,
2283
+ "blimp_wh_questions_subject_gap": 0,
2284
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2285
+ "blimp_wh_vs_that_no_gap": 0,
2286
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2287
+ "blimp_wh_vs_that_with_gap": 0,
2288
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2289
+ },
2290
+ "n-samples": {
2291
+ "blimp_wh_vs_that_with_gap_long_distance": {
2292
+ "original": 1000,
2293
+ "effective": 1000
2294
+ },
2295
+ "blimp_wh_vs_that_with_gap": {
2296
+ "original": 1000,
2297
+ "effective": 1000
2298
+ },
2299
+ "blimp_wh_vs_that_no_gap_long_distance": {
2300
+ "original": 1000,
2301
+ "effective": 1000
2302
+ },
2303
+ "blimp_wh_vs_that_no_gap": {
2304
+ "original": 1000,
2305
+ "effective": 1000
2306
+ },
2307
+ "blimp_wh_questions_subject_gap_long_distance": {
2308
+ "original": 1000,
2309
+ "effective": 1000
2310
+ },
2311
+ "blimp_wh_questions_subject_gap": {
2312
+ "original": 1000,
2313
+ "effective": 1000
2314
+ },
2315
+ "blimp_wh_questions_object_gap": {
2316
+ "original": 1000,
2317
+ "effective": 1000
2318
+ },
2319
+ "blimp_wh_island": {
2320
+ "original": 1000,
2321
+ "effective": 1000
2322
+ },
2323
+ "blimp_transitive": {
2324
+ "original": 1000,
2325
+ "effective": 1000
2326
+ },
2327
+ "blimp_tough_vs_raising_2": {
2328
+ "original": 1000,
2329
+ "effective": 1000
2330
+ },
2331
+ "blimp_tough_vs_raising_1": {
2332
+ "original": 1000,
2333
+ "effective": 1000
2334
+ },
2335
+ "blimp_superlative_quantifiers_2": {
2336
+ "original": 1000,
2337
+ "effective": 1000
2338
+ },
2339
+ "blimp_superlative_quantifiers_1": {
2340
+ "original": 1000,
2341
+ "effective": 1000
2342
+ },
2343
+ "blimp_sentential_subject_island": {
2344
+ "original": 1000,
2345
+ "effective": 1000
2346
+ },
2347
+ "blimp_sentential_negation_npi_scope": {
2348
+ "original": 1000,
2349
+ "effective": 1000
2350
+ },
2351
+ "blimp_sentential_negation_npi_licensor_present": {
2352
+ "original": 1000,
2353
+ "effective": 1000
2354
+ },
2355
+ "blimp_regular_plural_subject_verb_agreement_2": {
2356
+ "original": 1000,
2357
+ "effective": 1000
2358
+ },
2359
+ "blimp_regular_plural_subject_verb_agreement_1": {
2360
+ "original": 1000,
2361
+ "effective": 1000
2362
+ },
2363
+ "blimp_principle_A_reconstruction": {
2364
+ "original": 1000,
2365
+ "effective": 1000
2366
+ },
2367
+ "blimp_principle_A_domain_3": {
2368
+ "original": 1000,
2369
+ "effective": 1000
2370
+ },
2371
+ "blimp_principle_A_domain_2": {
2372
+ "original": 1000,
2373
+ "effective": 1000
2374
+ },
2375
+ "blimp_principle_A_domain_1": {
2376
+ "original": 1000,
2377
+ "effective": 1000
2378
+ },
2379
+ "blimp_principle_A_case_2": {
2380
+ "original": 1000,
2381
+ "effective": 1000
2382
+ },
2383
+ "blimp_principle_A_case_1": {
2384
+ "original": 1000,
2385
+ "effective": 1000
2386
+ },
2387
+ "blimp_principle_A_c_command": {
2388
+ "original": 1000,
2389
+ "effective": 1000
2390
+ },
2391
+ "blimp_passive_2": {
2392
+ "original": 1000,
2393
+ "effective": 1000
2394
+ },
2395
+ "blimp_passive_1": {
2396
+ "original": 1000,
2397
+ "effective": 1000
2398
+ },
2399
+ "blimp_only_npi_scope": {
2400
+ "original": 1000,
2401
+ "effective": 1000
2402
+ },
2403
+ "blimp_only_npi_licensor_present": {
2404
+ "original": 1000,
2405
+ "effective": 1000
2406
+ },
2407
+ "blimp_npi_present_2": {
2408
+ "original": 1000,
2409
+ "effective": 1000
2410
+ },
2411
+ "blimp_npi_present_1": {
2412
+ "original": 1000,
2413
+ "effective": 1000
2414
+ },
2415
+ "blimp_matrix_question_npi_licensor_present": {
2416
+ "original": 1000,
2417
+ "effective": 1000
2418
+ },
2419
+ "blimp_left_branch_island_simple_question": {
2420
+ "original": 1000,
2421
+ "effective": 1000
2422
+ },
2423
+ "blimp_left_branch_island_echo_question": {
2424
+ "original": 1000,
2425
+ "effective": 1000
2426
+ },
2427
+ "blimp_irregular_plural_subject_verb_agreement_2": {
2428
+ "original": 1000,
2429
+ "effective": 1000
2430
+ },
2431
+ "blimp_irregular_plural_subject_verb_agreement_1": {
2432
+ "original": 1000,
2433
+ "effective": 1000
2434
+ },
2435
+ "blimp_irregular_past_participle_verbs": {
2436
+ "original": 1000,
2437
+ "effective": 1000
2438
+ },
2439
+ "blimp_irregular_past_participle_adjectives": {
2440
+ "original": 1000,
2441
+ "effective": 1000
2442
+ },
2443
+ "blimp_intransitive": {
2444
+ "original": 1000,
2445
+ "effective": 1000
2446
+ },
2447
+ "blimp_inchoative": {
2448
+ "original": 1000,
2449
+ "effective": 1000
2450
+ },
2451
+ "blimp_expletive_it_object_raising": {
2452
+ "original": 1000,
2453
+ "effective": 1000
2454
+ },
2455
+ "blimp_existential_there_subject_raising": {
2456
+ "original": 1000,
2457
+ "effective": 1000
2458
+ },
2459
+ "blimp_existential_there_quantifiers_2": {
2460
+ "original": 1000,
2461
+ "effective": 1000
2462
+ },
2463
+ "blimp_existential_there_quantifiers_1": {
2464
+ "original": 1000,
2465
+ "effective": 1000
2466
+ },
2467
+ "blimp_existential_there_object_raising": {
2468
+ "original": 1000,
2469
+ "effective": 1000
2470
+ },
2471
+ "blimp_ellipsis_n_bar_2": {
2472
+ "original": 1000,
2473
+ "effective": 1000
2474
+ },
2475
+ "blimp_ellipsis_n_bar_1": {
2476
+ "original": 1000,
2477
+ "effective": 1000
2478
+ },
2479
+ "blimp_drop_argument": {
2480
+ "original": 1000,
2481
+ "effective": 1000
2482
+ },
2483
+ "blimp_distractor_agreement_relative_clause": {
2484
+ "original": 1000,
2485
+ "effective": 1000
2486
+ },
2487
+ "blimp_distractor_agreement_relational_noun": {
2488
+ "original": 1000,
2489
+ "effective": 1000
2490
+ },
2491
+ "blimp_determiner_noun_agreement_with_adjective_1": {
2492
+ "original": 1000,
2493
+ "effective": 1000
2494
+ },
2495
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
2496
+ "original": 1000,
2497
+ "effective": 1000
2498
+ },
2499
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
2500
+ "original": 1000,
2501
+ "effective": 1000
2502
+ },
2503
+ "blimp_determiner_noun_agreement_with_adj_2": {
2504
+ "original": 1000,
2505
+ "effective": 1000
2506
+ },
2507
+ "blimp_determiner_noun_agreement_irregular_2": {
2508
+ "original": 1000,
2509
+ "effective": 1000
2510
+ },
2511
+ "blimp_determiner_noun_agreement_irregular_1": {
2512
+ "original": 1000,
2513
+ "effective": 1000
2514
+ },
2515
+ "blimp_determiner_noun_agreement_2": {
2516
+ "original": 1000,
2517
+ "effective": 1000
2518
+ },
2519
+ "blimp_determiner_noun_agreement_1": {
2520
+ "original": 1000,
2521
+ "effective": 1000
2522
+ },
2523
+ "blimp_coordinate_structure_constraint_object_extraction": {
2524
+ "original": 1000,
2525
+ "effective": 1000
2526
+ },
2527
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
2528
+ "original": 1000,
2529
+ "effective": 1000
2530
+ },
2531
+ "blimp_complex_NP_island": {
2532
+ "original": 1000,
2533
+ "effective": 1000
2534
+ },
2535
+ "blimp_causative": {
2536
+ "original": 1000,
2537
+ "effective": 1000
2538
+ },
2539
+ "blimp_animate_subject_trans": {
2540
+ "original": 1000,
2541
+ "effective": 1000
2542
+ },
2543
+ "blimp_animate_subject_passive": {
2544
+ "original": 1000,
2545
+ "effective": 1000
2546
+ },
2547
+ "blimp_anaphor_number_agreement": {
2548
+ "original": 1000,
2549
+ "effective": 1000
2550
+ },
2551
+ "blimp_anaphor_gender_agreement": {
2552
+ "original": 1000,
2553
+ "effective": 1000
2554
+ },
2555
+ "blimp_adjunct_island": {
2556
+ "original": 1000,
2557
+ "effective": 1000
2558
+ }
2559
+ },
2560
+ "config": {
2561
+ "model": "hf",
2562
+ "model_args": "pretrained=EleutherAI/pythia-14m-seed1,revision=step48000",
2563
+ "model_num_parameters": 14067712,
2564
+ "model_dtype": "torch.float16",
2565
+ "model_revision": "step48000",
2566
+ "model_sha": "7d4581d6f59c2b200ddab71790d132486e3e55bd",
2567
+ "batch_size": "1024",
2568
+ "batch_sizes": [],
2569
+ "device": "cuda",
2570
+ "use_cache": null,
2571
+ "limit": null,
2572
+ "bootstrap_iters": 100000,
2573
+ "gen_kwargs": null,
2574
+ "random_seed": 0,
2575
+ "numpy_seed": 1234,
2576
+ "torch_seed": 1234,
2577
+ "fewshot_seed": 1234
2578
+ },
2579
+ "git_hash": "51a7ca9",
2580
+ "date": 1724072863.4251223,
2581
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: NVIDIA GeForce RTX 2080 Ti\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 32\nOn-line CPU(s) list: 0-31\nThread(s) per core: 1\nCore(s) per socket: 32\nSocket(s): 1\nNUMA node(s): 2\nVendor ID: AuthenticAMD\nCPU family: 23\nModel: 49\nModel name: AMD EPYC 7502P 32-Core Processor\nStepping: 0\nCPU MHz: 1500.000\nCPU max MHz: 2500.0000\nCPU min MHz: 1500.0000\nBogoMIPS: 5000.08\nVirtualization: AMD-V\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 512K\nL3 cache: 16384K\nNUMA node0 CPU(s): 0-15\nNUMA node1 CPU(s): 16-31\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc art rep_good nopl nonstop_tsc extd_apicid aperfmperf eagerfpu pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_l2 cpb cat_l3 cdp_l3 hw_pstate sme ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif umip overflow_recov succor smca\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
2582
+ "transformers_version": "4.40.2",
2583
+ "upper_git_hash": null,
2584
+ "task_hashes": {},
2585
+ "model_source": "hf",
2586
+ "model_name": "EleutherAI/pythia-14m-seed1",
2587
+ "model_name_sanitized": "EleutherAI__pythia-14m-seed1",
2588
+ "start_time": 1527573.119877638,
2589
+ "end_time": 1527899.689462757,
2590
+ "total_evaluation_time_seconds": "326.56958511890844"
2591
+ }
pythia-410m-seed1/step0/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-28-17.109635.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 3629862.729873048,
5
+ "perplexity_stderr,none": 354942.65646939597,
6
+ "acc,none": 0.0,
7
+ "acc_stderr,none": 0.0,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step0,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step0",
71
+ "model_sha": "4aa96ad7344b88d2a3e190d48499e37770c43556",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724084782.0195007,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2673.693\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1464250.079157139,
94
+ "end_time": 1464383.64014923,
95
+ "total_evaluation_time_seconds": "133.56099209096283"
96
+ }
pythia-410m-seed1/step0/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-14-05.109765.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 3629862.729873048,
5
+ "perplexity_stderr,none": 354942.65646939597,
6
+ "acc,none": 0.0,
7
+ "acc_stderr,none": 0.0,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step0",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step0",
71
+ "model_sha": "4aa96ad7344b88d2a3e190d48499e37770c43556",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724105530.2372417,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1016.217\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4382898.268185724,
94
+ "end_time": 4383023.673871659,
95
+ "total_evaluation_time_seconds": "125.40568593516946"
96
+ }
pythia-410m-seed1/step1/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-30-29.878303.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 3629862.729873048,
5
+ "perplexity_stderr,none": 354942.65646939597,
6
+ "acc,none": 0.0,
7
+ "acc_stderr,none": 0.0,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step1,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step1",
71
+ "model_sha": "1bae1d6b538312de3555a8d21566da2925d019e5",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724084921.2877703,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2989.550\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1464393.247957212,
94
+ "end_time": 1464516.410960305,
95
+ "total_evaluation_time_seconds": "123.16300309309736"
96
+ }
pythia-410m-seed1/step1/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-16-13.141132.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 3629862.729873048,
5
+ "perplexity_stderr,none": 354942.65646939597,
6
+ "acc,none": 0.0,
7
+ "acc_stderr,none": 0.0,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step1",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step1",
71
+ "model_sha": "1bae1d6b538312de3555a8d21566da2925d019e5",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724105662.608963,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1093.426\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4383031.63382091,
94
+ "end_time": 4383151.707558739,
95
+ "total_evaluation_time_seconds": "120.07373782899231"
96
+ }
pythia-410m-seed1/step1000/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-52-40.338990.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 5080.824365419011,
5
+ "perplexity_stderr,none": 195.43070132414059,
6
+ "acc,none": 0.020570541432175432,
7
+ "acc_stderr,none": 0.0019775228725465105,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step1000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step1000",
71
+ "model_sha": "7e02c7e50950e28b99125696eaa1fe08078251c5",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724086255.0255961,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2829.095\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1465725.398089807,
94
+ "end_time": 1465846.871523291,
95
+ "total_evaluation_time_seconds": "121.47343348409049"
96
+ }
pythia-410m-seed1/step1000/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-35-06.794346.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 5080.824365419011,
5
+ "perplexity_stderr,none": 195.43070132414059,
6
+ "acc,none": 0.020570541432175432,
7
+ "acc_stderr,none": 0.0019775228725465105,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step1000",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step1000",
71
+ "model_sha": "7e02c7e50950e28b99125696eaa1fe08078251c5",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724106797.818584,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1891.912\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4384166.95736031,
94
+ "end_time": 4384285.360651387,
95
+ "total_evaluation_time_seconds": "118.40329107735306"
96
+ }
pythia-410m-seed1/step10000/EleutherAI__pythia-410m-seed1/results_2024-08-19T10-12-27.949043.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 24.584044336439028,
5
+ "perplexity_stderr,none": 0.8631365482636594,
6
+ "acc,none": 0.38133126334174267,
7
+ "acc_stderr,none": 0.006766940596952872,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step10000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step10000",
71
+ "model_sha": "5d6d3e2dd3f3f1403a3810af8d8dea85fe2c5b33",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724087442.278141,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3031.805\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1466912.599790743,
94
+ "end_time": 1467034.481633835,
95
+ "total_evaluation_time_seconds": "121.88184309192002"
96
+ }
pythia-410m-seed1/step10000/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-53-57.877190.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 24.584044336439028,
5
+ "perplexity_stderr,none": 0.8631365482636594,
6
+ "acc,none": 0.38133126334174267,
7
+ "acc_stderr,none": 0.006766940596952872,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step10000",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step10000",
71
+ "model_sha": "5d6d3e2dd3f3f1403a3810af8d8dea85fe2c5b33",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724107929.7750034,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1391.735\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4385298.895028719,
94
+ "end_time": 4385416.44356251,
95
+ "total_evaluation_time_seconds": "117.54853379167616"
96
+ }
pythia-410m-seed1/step100000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-36-43.663669.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.591952878111991,
5
+ "perplexity_stderr,none": 0.35042546745390324,
6
+ "acc,none": 0.5010673394139337,
7
+ "acc_stderr,none": 0.006965961785703306,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step100000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step100000",
71
+ "model_sha": "4795a3fd91978f98a963f930c9ed90ba8dc432b3",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724099681.157819,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2893.811\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479151.882794458,
94
+ "end_time": 1479290.195910123,
95
+ "total_evaluation_time_seconds": "138.31311566499062"
96
+ }
pythia-410m-seed1/step100000/EleutherAI__pythia-410m-seed1/results_2024-08-19T16-12-51.708415.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.591952878111991,
5
+ "perplexity_stderr,none": 0.35042546745390324,
6
+ "acc,none": 0.5010673394139337,
7
+ "acc_stderr,none": 0.006965961785703306,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step100000",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step100000",
71
+ "model_sha": "4795a3fd91978f98a963f930c9ed90ba8dc432b3",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724109061.965506,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1792.242\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4386431.075623083,
94
+ "end_time": 4386550.275282716,
95
+ "total_evaluation_time_seconds": "119.19965963345021"
96
+ }
pythia-410m-seed1/step101000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-39-02.636733.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.515270194664298,
5
+ "perplexity_stderr,none": 0.345188609736811,
6
+ "acc,none": 0.5010673394139337,
7
+ "acc_stderr,none": 0.006965961785703306,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step101000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step101000",
71
+ "model_sha": "f860dd5f8da51fa408ccd2fa476b0d86cb5c5173",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724099827.1200736,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2879.772\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479299.298601289,
94
+ "end_time": 1479429.169075634,
95
+ "total_evaluation_time_seconds": "129.87047434505075"
96
+ }
pythia-410m-seed1/step102000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-41-16.692404.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.545952746208481,
5
+ "perplexity_stderr,none": 0.3509745112281527,
6
+ "acc,none": 0.5080535610324083,
7
+ "acc_stderr,none": 0.006965073974108908,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step102000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step102000",
71
+ "model_sha": "8aea38af723da29f59316c5761aa8bdcecdc8404",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724099968.297545,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2872.753\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479438.711829315,
94
+ "end_time": 1479563.224492608,
95
+ "total_evaluation_time_seconds": "124.51266329316422"
96
+ }
pythia-410m-seed1/step103000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-43-31.736206.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.713346007380485,
5
+ "perplexity_stderr,none": 0.3557582219742647,
6
+ "acc,none": 0.5018435862604308,
7
+ "acc_stderr,none": 0.006965930304957752,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step103000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step103000",
71
+ "model_sha": "10aa5c1f97ac60757bd64a478bee92be16923962",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100100.6171248,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2891.003\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479571.92818624,
94
+ "end_time": 1479698.26875645,
95
+ "total_evaluation_time_seconds": "126.34057021001354"
96
+ }
pythia-410m-seed1/step104000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-45-53.228039.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.401408790573289,
5
+ "perplexity_stderr,none": 0.34637584576111213,
6
+ "acc,none": 0.5082476227440326,
7
+ "acc_stderr,none": 0.006965029895407432,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step104000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step104000",
71
+ "model_sha": "a9f93e4fd67cb323d45215b257893b686ad5124b",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100236.9367678,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2658.392\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479707.682436252,
94
+ "end_time": 1479839.759791051,
95
+ "total_evaluation_time_seconds": "132.07735479902476"
96
+ }
pythia-410m-seed1/step105000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-48-12.742336.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 12.139155061589388,
5
+ "perplexity_stderr,none": 0.36465865661163127,
6
+ "acc,none": 0.4865127110421114,
7
+ "acc_stderr,none": 0.006963442876327781,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step105000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step105000",
71
+ "model_sha": "5adcc5b5b423fe7031aa0757510892f4d1c9c850",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100378.5586107,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2874.157\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479848.991179207,
94
+ "end_time": 1479979.273938509,
95
+ "total_evaluation_time_seconds": "130.28275930183008"
96
+ }
pythia-410m-seed1/step106000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-50-27.254318.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.524858282420901,
5
+ "perplexity_stderr,none": 0.34647295919597537,
6
+ "acc,none": 0.5053366970696681,
7
+ "acc_stderr,none": 0.006965580859127542,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step106000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step106000",
71
+ "model_sha": "93568ced9ffdd579d1219844c898131f881b06fb",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100517.9064639,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2849.871\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1479988.411062216,
94
+ "end_time": 1480113.786204029,
95
+ "total_evaluation_time_seconds": "125.37514181318693"
96
+ }
pythia-410m-seed1/step107000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-52-46.612009.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.101804756574467,
5
+ "perplexity_stderr,none": 0.3342161826382452,
6
+ "acc,none": 0.5220260042693576,
7
+ "acc_stderr,none": 0.006959215358336915,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step107000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step107000",
71
+ "model_sha": "151d1c55af22cae2789acc5696272231738c6e13",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100652.5770772,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2887.634\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480123.014362277,
94
+ "end_time": 1480253.143458506,
95
+ "total_evaluation_time_seconds": "130.129096229095"
96
+ }
pythia-410m-seed1/step108000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-54-56.733656.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.321058300476361,
5
+ "perplexity_stderr,none": 0.3408530812837078,
6
+ "acc,none": 0.5096060547254027,
7
+ "acc_stderr,none": 0.006964691949428163,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step108000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step108000",
71
+ "model_sha": "d9022b33b3b0f804733e08fedf0e3b43a4c34d9c",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100788.910648,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2981.829\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480261.194176749,
94
+ "end_time": 1480383.265162954,
95
+ "total_evaluation_time_seconds": "122.07098620501347"
96
+ }
pythia-410m-seed1/step109000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-57-07.517021.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 12.963747848087031,
5
+ "perplexity_stderr,none": 0.39044224511348835,
6
+ "acc,none": 0.4723462060935377,
7
+ "acc_stderr,none": 0.006955315280609102,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step109000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step109000",
71
+ "model_sha": "438f4102541d0e544b3b6d7c31fff8cded135524",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724100920.7004702,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2967.510\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480392.361651623,
94
+ "end_time": 1480514.049509619,
95
+ "total_evaluation_time_seconds": "121.6878579959739"
96
+ }
pythia-410m-seed1/step11000/EleutherAI__pythia-410m-seed1/results_2024-08-19T10-14-35.252974.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 28.224578752506694,
5
+ "perplexity_stderr,none": 0.9694011784984539,
6
+ "acc,none": 0.36173103046768873,
7
+ "acc_stderr,none": 0.006694325434645113,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step11000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step11000",
71
+ "model_sha": "c356db702242547fa7f00f0be9db269eaf6feeda",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724087569.753189,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2838.360\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1467043.38081713,
94
+ "end_time": 1467161.785193007,
95
+ "total_evaluation_time_seconds": "118.40437587699853"
96
+ }
pythia-410m-seed1/step110000/EleutherAI__pythia-410m-seed1/results_2024-08-19T13-59-16.856195.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.925663706441672,
5
+ "perplexity_stderr,none": 0.35890193181504154,
6
+ "acc,none": 0.49505142635358046,
7
+ "acc_stderr,none": 0.006965636477805093,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step110000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step110000",
71
+ "model_sha": "9391bd1401c486ab6834c4112eabb910b26e4a5e",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101050.8016098,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3040.649\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480523.481970895,
94
+ "end_time": 1480643.389048046,
95
+ "total_evaluation_time_seconds": "119.90707715088502"
96
+ }
pythia-410m-seed1/step110000/EleutherAI__pythia-410m-seed1/results_2024-08-19T16-14-56.300208.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.925663706441672,
5
+ "perplexity_stderr,none": 0.35890193181504154,
6
+ "acc,none": 0.49505142635358046,
7
+ "acc_stderr,none": 0.006965636477805093,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step110000",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step110000",
71
+ "model_sha": "9391bd1401c486ab6834c4112eabb910b26e4a5e",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724109188.517649,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1195.202\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4386557.713760981,
94
+ "end_time": 4386674.866760632,
95
+ "total_evaluation_time_seconds": "117.15299965068698"
96
+ }
pythia-410m-seed1/step111000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-01-28.609515.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.461871869288272,
5
+ "perplexity_stderr,none": 0.3491823384972304,
6
+ "acc,none": 0.5033960799534252,
7
+ "acc_stderr,none": 0.006965816973095752,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step111000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step111000",
71
+ "model_sha": "671c144faf77504dbe225c21d9c48cf35d06d611",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101179.5987117,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2820.111\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480652.672111797,
94
+ "end_time": 1480775.141290772,
95
+ "total_evaluation_time_seconds": "122.46917897509411"
96
+ }
pythia-410m-seed1/step112000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-03-42.866090.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.712944822265866,
5
+ "perplexity_stderr,none": 0.3532333757288862,
6
+ "acc,none": 0.5037842033766737,
7
+ "acc_stderr,none": 0.0069657781460152664,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step112000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step112000",
71
+ "model_sha": "bc9539706ec5543d475ccbc041e4626c25d24b84",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101313.986077,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2979.162\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480784.59665149,
94
+ "end_time": 1480909.3982658,
95
+ "total_evaluation_time_seconds": "124.80161430989392"
96
+ }
pythia-410m-seed1/step113000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-05-53.516831.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 10.869634471512853,
5
+ "perplexity_stderr,none": 0.3254716107281577,
6
+ "acc,none": 0.524160683097225,
7
+ "acc_stderr,none": 0.006957840284118925,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step113000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step113000",
71
+ "model_sha": "bff03de69e3fddd007795cfc9bcb4f8d36f6eb76",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101447.5780346,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3017.486\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1480918.203105447,
94
+ "end_time": 1481040.048962766,
95
+ "total_evaluation_time_seconds": "121.8458573189564"
96
+ }
pythia-410m-seed1/step114000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-08-07.802761.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 12.50402044976452,
5
+ "perplexity_stderr,none": 0.37808045830045844,
6
+ "acc,none": 0.4917523772559674,
7
+ "acc_stderr,none": 0.006965029895407433,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step114000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step114000",
71
+ "model_sha": "e3f197e747f179a121e894ff446f2b2a7d247b17",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101578.8129156,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2908.270\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481049.142743861,
94
+ "end_time": 1481174.335194622,
95
+ "total_evaluation_time_seconds": "125.19245076086372"
96
+ }
pythia-410m-seed1/step115000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-10-18.845461.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 12.132550206217747,
5
+ "perplexity_stderr,none": 0.369324864443418,
6
+ "acc,none": 0.49951484572093924,
7
+ "acc_stderr,none": 0.00696597437796169,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step115000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step115000",
71
+ "model_sha": "792720a905b9ad86863f0b288026148ee11822bb",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101712.9387066,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3199.981\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481183.619114827,
94
+ "end_time": 1481305.377479916,
95
+ "total_evaluation_time_seconds": "121.75836508907378"
96
+ }
pythia-410m-seed1/step116000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-12-39.685880.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.02176016565918,
5
+ "perplexity_stderr,none": 0.33186723520316236,
6
+ "acc,none": 0.5195032020182418,
7
+ "acc_stderr,none": 0.00696067627395547,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step116000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step116000",
71
+ "model_sha": "3c5b1aea6f7ae24651c41191d9b04b286c84b317",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101844.4310477,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2940.838\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481314.687587146,
94
+ "end_time": 1481446.217943971,
95
+ "total_evaluation_time_seconds": "131.53035682509653"
96
+ }
pythia-410m-seed1/step117000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-14-54.387325.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 12.054169560898933,
5
+ "perplexity_stderr,none": 0.3629933251229978,
6
+ "acc,none": 0.49252862410246456,
7
+ "acc_stderr,none": 0.0069651999117189166,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step117000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step117000",
71
+ "model_sha": "9c6d117baf40f5e906c24b949b9877c1daa432f9",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724101984.3682263,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2866.998\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481455.464290228,
94
+ "end_time": 1481580.918879481,
95
+ "total_evaluation_time_seconds": "125.45458925305866"
96
+ }
pythia-410m-seed1/step118000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-17-09.761598.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.034510046050771,
5
+ "perplexity_stderr,none": 0.3331036809255414,
6
+ "acc,none": 0.5129051038230157,
7
+ "acc_stderr,none": 0.0069636570190568705,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step118000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step118000",
71
+ "model_sha": "1cf4628c418db1a8bea3188ed9ffd15e27c3874f",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102120.0975928,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2898.303\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481590.285118381,
94
+ "end_time": 1481716.29290737,
95
+ "total_evaluation_time_seconds": "126.00778898899443"
96
+ }
pythia-410m-seed1/step119000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-19-24.999819.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.65651054653041,
5
+ "perplexity_stderr,none": 0.3534508341672034,
6
+ "acc,none": 0.5063070056277896,
7
+ "acc_stderr,none": 0.006965423445368851,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step119000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step119000",
71
+ "model_sha": "9fa38d4637b3e78d9ecc4edcef5f476229c7ee4b",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102255.012125,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2804.669\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481725.914227089,
94
+ "end_time": 1481851.531700417,
95
+ "total_evaluation_time_seconds": "125.61747332802042"
96
+ }
pythia-410m-seed1/step12000/EleutherAI__pythia-410m-seed1/results_2024-08-19T10-16-47.118030.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 23.889312493294376,
5
+ "perplexity_stderr,none": 0.8278282573304948,
6
+ "acc,none": 0.38191344847661557,
7
+ "acc_stderr,none": 0.006768917107635302,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step12000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step12000",
71
+ "model_sha": "dba1d98dc09e83bc1a804650d6b5d9bd8a9ad2dc",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724087701.508467,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2862.786\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1467171.383259823,
94
+ "end_time": 1467293.649841673,
95
+ "total_evaluation_time_seconds": "122.26658184989356"
96
+ }
pythia-410m-seed1/step120000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-21-35.912998.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.343565187247625,
5
+ "perplexity_stderr,none": 0.34214182245226377,
6
+ "acc,none": 0.5086357461672812,
7
+ "acc_stderr,none": 0.006964938588638405,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step120000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step120000",
71
+ "model_sha": "3eddae838caf9686af0e8fd78f0216455e01717b",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102390.3643336,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3030.963\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481860.820726602,
94
+ "end_time": 1481982.445990254,
95
+ "total_evaluation_time_seconds": "121.62526365183294"
96
+ }
pythia-410m-seed1/step120000/EleutherAI__pythia-410m-seed1/results_2024-08-19T16-17-01.420402.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.343565187247625,
5
+ "perplexity_stderr,none": 0.34214182245226377,
6
+ "acc,none": 0.5086357461672812,
7
+ "acc_stderr,none": 0.006964938588638405,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step120000",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step120000",
71
+ "model_sha": "3eddae838caf9686af0e8fd78f0216455e01717b",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724109313.019431,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 1396.228\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4386682.20772072,
94
+ "end_time": 4386799.986466108,
95
+ "total_evaluation_time_seconds": "117.77874538768083"
96
+ }
pythia-410m-seed1/step121000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-23-50.851329.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.753343153074336,
5
+ "perplexity_stderr,none": 0.3537980343118889,
6
+ "acc,none": 0.49951484572093924,
7
+ "acc_stderr,none": 0.00696597437796169,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step121000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step121000",
71
+ "model_sha": "04fe941637cb5df42c6ebc46f4cd4d75bff80b1e",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102520.8883436,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2960.070\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1481991.166587916,
94
+ "end_time": 1482117.383176908,
95
+ "total_evaluation_time_seconds": "126.21658899192698"
96
+ }
pythia-410m-seed1/step122000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-26-01.715237.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.969401961821955,
5
+ "perplexity_stderr,none": 0.3616857920747243,
6
+ "acc,none": 0.49524548806520474,
7
+ "acc_stderr,none": 0.0069656627128762095,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step122000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step122000",
71
+ "model_sha": "e53e137ace91c517d02abc57f89af658c6e87091",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102655.8568647,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3026.049\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482126.304534307,
94
+ "end_time": 1482248.246724469,
95
+ "total_evaluation_time_seconds": "121.94219016190618"
96
+ }
pythia-410m-seed1/step123000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-28-16.468536.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.252002567197422,
5
+ "perplexity_stderr,none": 0.3406888092251402,
6
+ "acc,none": 0.5090238695905298,
7
+ "acc_stderr,none": 0.006964843082576188,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step123000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step123000",
71
+ "model_sha": "3a55097f9efd407b7e7f780c1860d7514ec7bd86",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102785.857144,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2878.649\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482257.335281981,
94
+ "end_time": 1482382.99999707,
95
+ "total_evaluation_time_seconds": "125.66471508913673"
96
+ }
pythia-410m-seed1/step124000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-30-31.303548.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.913052868233192,
5
+ "perplexity_stderr,none": 0.3585290192531499,
6
+ "acc,none": 0.4969920434698234,
7
+ "acc_stderr,none": 0.006965851602471434,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step124000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step124000",
71
+ "model_sha": "b02078aad1b9d7c2d3452932dc56666d1559d5de",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724102921.5303605,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 3055.950\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482392.057127412,
94
+ "end_time": 1482517.835658131,
95
+ "total_evaluation_time_seconds": "125.77853071899153"
96
+ }
pythia-410m-seed1/step125000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-32-41.102456.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.75160891146739,
5
+ "perplexity_stderr,none": 0.35389506370701995,
6
+ "acc,none": 0.5035901416650495,
7
+ "acc_stderr,none": 0.006965798084272818,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step125000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step125000",
71
+ "model_sha": "8bd5430d74046942188f4e11990627495b212a0c",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724103055.9677248,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2971.441\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482526.434447031,
94
+ "end_time": 1482647.634812656,
95
+ "total_evaluation_time_seconds": "121.20036562508903"
96
+ }
pythia-410m-seed1/step126000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-34-47.156887.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.183764101961948,
5
+ "perplexity_stderr,none": 0.3358807158350267,
6
+ "acc,none": 0.507859499320784,
7
+ "acc_stderr,none": 0.0069651170030484225,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step126000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step126000",
71
+ "model_sha": "bee5d8ad9ed96ebd8e9ae9585894c0347f60a1f3",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724103180.7710948,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2920.343\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482655.956897265,
94
+ "end_time": 1482773.688116065,
95
+ "total_evaluation_time_seconds": "117.73121879994869"
96
+ }
pythia-410m-seed1/step127000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-37-02.387203.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.489965158454744,
5
+ "perplexity_stderr,none": 0.3455782674668887,
6
+ "acc,none": 0.5103823015718999,
7
+ "acc_stderr,none": 0.006964475739361836,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step127000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step127000",
71
+ "model_sha": "2998931a1d949a953e47352117560b9af3e98281",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724103311.7086706,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2927.362\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482783.284408231,
94
+ "end_time": 1482908.919046783,
95
+ "total_evaluation_time_seconds": "125.63463855185546"
96
+ }
pythia-410m-seed1/step128/EleutherAI__pythia-410m-seed1/results_2024-08-19T09-46-09.637000.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 1737003.2151938989,
5
+ "perplexity_stderr,none": 145407.90166406872,
6
+ "acc,none": 0.0,
7
+ "acc_stderr,none": 0.0,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step128,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step128",
71
+ "model_sha": "f923368e0f9e5f794e15b0a9a97cb4979f292b00",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724085860.5683973,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2939.434\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1465331.995489992,
94
+ "end_time": 1465456.16257172,
95
+ "total_evaluation_time_seconds": "124.16708172811195"
96
+ }
pythia-410m-seed1/step128/EleutherAI__pythia-410m-seed1/results_2024-08-19T15-30-54.888518.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 1737003.2151938989,
5
+ "perplexity_stderr,none": 145407.90166406872,
6
+ "acc,none": 0.0,
7
+ "acc_stderr,none": 0.0,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step128",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step128",
71
+ "model_sha": "f923368e0f9e5f794e15b0a9a97cb4979f292b00",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724106546.6568365,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.118.1.el7.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.54.15\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2199.346\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 4383915.785462759,
94
+ "end_time": 4384033.455065158,
95
+ "total_evaluation_time_seconds": "117.66960239876062"
96
+ }
pythia-410m-seed1/step128000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-39-13.635461.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 11.905508079738697,
5
+ "perplexity_stderr,none": 0.3588151373455982,
6
+ "acc,none": 0.4989326605860664,
7
+ "acc_stderr,none": 0.006965961785703306,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step128000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step128000",
71
+ "model_sha": "1ca449b1cee5b6ba990dd7c8a013aa3f9f3d74e9",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724103447.8610413,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2723.809\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1482918.318040931,
94
+ "end_time": 1483040.168048426,
95
+ "total_evaluation_time_seconds": "121.85000749514438"
96
+ }
pythia-410m-seed1/step129000/EleutherAI__pythia-410m-seed1/results_2024-08-19T14-41-17.419545.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_openai": {
4
+ "perplexity,none": 10.932974553577269,
5
+ "perplexity_stderr,none": 0.32985608971117786,
6
+ "acc,none": 0.5148457209392587,
7
+ "acc_stderr,none": 0.006962906440875493,
8
+ "alias": "lambada_openai"
9
+ }
10
+ },
11
+ "group_subtasks": {
12
+ "lambada_openai": []
13
+ },
14
+ "configs": {
15
+ "lambada_openai": {
16
+ "task": "lambada_openai",
17
+ "group": [
18
+ "lambada"
19
+ ],
20
+ "dataset_path": "EleutherAI/lambada_openai",
21
+ "dataset_name": "default",
22
+ "dataset_kwargs": {
23
+ "trust_remote_code": true
24
+ },
25
+ "test_split": "test",
26
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
27
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
28
+ "description": "",
29
+ "target_delimiter": " ",
30
+ "fewshot_delimiter": "\n\n",
31
+ "num_fewshot": 0,
32
+ "metric_list": [
33
+ {
34
+ "metric": "perplexity",
35
+ "aggregation": "perplexity",
36
+ "higher_is_better": false
37
+ },
38
+ {
39
+ "metric": "acc",
40
+ "aggregation": "mean",
41
+ "higher_is_better": true
42
+ }
43
+ ],
44
+ "output_type": "loglikelihood",
45
+ "repeats": 1,
46
+ "should_decontaminate": true,
47
+ "doc_to_decontamination_query": "{{text}}",
48
+ "metadata": {
49
+ "version": 1.0
50
+ }
51
+ }
52
+ },
53
+ "versions": {
54
+ "lambada_openai": 1.0
55
+ },
56
+ "n-shot": {
57
+ "lambada_openai": 0
58
+ },
59
+ "n-samples": {
60
+ "lambada_openai": {
61
+ "original": 5153,
62
+ "effective": 5153
63
+ }
64
+ },
65
+ "config": {
66
+ "model": "hf",
67
+ "model_args": "pretrained=EleutherAI/pythia-410m-seed1,revision=step129000,",
68
+ "model_num_parameters": 405334016,
69
+ "model_dtype": "torch.float16",
70
+ "model_revision": "step129000",
71
+ "model_sha": "f5f99e5cef34b3d365fb2326b1edab3f9dd0bbbd",
72
+ "batch_size": "128",
73
+ "batch_sizes": [],
74
+ "device": "cuda",
75
+ "use_cache": null,
76
+ "limit": null,
77
+ "bootstrap_iters": 100000,
78
+ "gen_kwargs": null,
79
+ "random_seed": 0,
80
+ "numpy_seed": 1234,
81
+ "torch_seed": 1234,
82
+ "fewshot_seed": 1234
83
+ },
84
+ "git_hash": "51a7ca9",
85
+ "date": 1724103573.050569,
86
+ "pretty_env_info": "PyTorch version: 2.3.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: CentOS Linux release 7.9.2009 (Core) (x86_64)\nGCC version: (GCC) 12.1.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.17\n\nPython version: 3.12.1 (main, Jan 12 2024, 16:49:08) [GCC 12.1.0] (64-bit runtime)\nPython platform: Linux-3.10.0-1160.119.1.el7.tuxcare.els2.x86_64-x86_64-with-glibc2.17\nIs CUDA available: True\nCUDA runtime version: 12.4.99\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: GPU 0: Tesla V100-PCIE-32GB\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nByte Order: Little Endian\nCPU(s): 24\nOn-line CPU(s) list: 0-23\nThread(s) per core: 1\nCore(s) per socket: 12\nSocket(s): 2\nNUMA node(s): 2\nVendor ID: GenuineIntel\nCPU family: 6\nModel: 85\nModel name: Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\nStepping: 4\nCPU MHz: 2835.974\nCPU max MHz: 3200.0000\nCPU min MHz: 1000.0000\nBogoMIPS: 4600.00\nVirtualization: VT-x\nL1d cache: 32K\nL1i cache: 32K\nL2 cache: 1024K\nL3 cache: 16896K\nNUMA node0 CPU(s): 0,2,4,6,8,10,12,14,16,18,20,22\nNUMA node1 CPU(s): 1,3,5,7,9,11,13,15,17,19,21,23\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 invpcid_single intel_ppin ssbd mba ibrs ibpb stibp tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb intel_pt avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts pku ospke md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.3.0\n[conda] Could not collect",
87
+ "transformers_version": "4.40.2",
88
+ "upper_git_hash": null,
89
+ "task_hashes": {},
90
+ "model_source": "hf",
91
+ "model_name": "EleutherAI/pythia-410m-seed1",
92
+ "model_name_sanitized": "EleutherAI__pythia-410m-seed1",
93
+ "start_time": 1483048.063962735,
94
+ "end_time": 1483163.950415313,
95
+ "total_evaluation_time_seconds": "115.88645257800817"
96
+ }