CultriX commited on
Commit
d6d1ad3
·
verified ·
1 Parent(s): 6b68288

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1610 -1609
app.py CHANGED
@@ -319,1808 +319,1809 @@ def download_all_data():
319
  # --------------------------------------------------------------------
320
  # This is your larger dataset, rank = 44..105
321
  benchmark_data = [
322
- {
 
323
  "rank": 1,
324
  "name": "wanlige/li-14b-v0.4",
325
  "scores": {
326
- "average": 43.66,
327
- "IFEval": 81.33,
328
- "BBH": 50.38,
329
- "MATH": 55.74,
330
- "GPQA": 11.86,
331
- "MUSR": 16.35,
332
- "MMLU_PRO": 46.30,
333
- "Architecture": "Qwen2ForCausalLM",
334
- "Parameters": "14.77B",
335
- "Chat_Template": "Yes"
336
  },
337
  "hf_url": "https://huggingface.co/wanlige/li-14b-v0.4",
338
  "known_config": null
339
- },
340
- {
341
  "rank": 2,
342
  "name": "suayptalha/Lamarckvergence-14B",
343
  "scores": {
344
- "average": 43.32,
345
- "IFEval": 76.56,
346
- "BBH": 50.33,
347
- "MATH": 54.0,
348
- "GPQA": 15.1,
349
- "MUSR": 16.34,
350
- "MMLU_PRO": 47.59,
351
- "Architecture": "Qwen2ForCausalLM",
352
- "Parameters": "14.766B",
353
- "Chat_Template": "Yes"
354
  },
355
  "hf_url": "https://huggingface.co/suayptalha/Lamarckvergence-14B",
356
  "known_config": null
357
- },
358
- {
359
  "rank": 3,
360
  "name": "wanlige/li-14b-v0.4-slerp0.1",
361
  "scores": {
362
- "average": 42.91,
363
- "IFEval": 79.23,
364
- "BBH": 50.88,
365
- "MATH": 53.32,
366
- "GPQA": 14.54,
367
- "MUSR": 11.75,
368
- "MMLU_PRO": 47.71,
369
- "Architecture": "Qwen2ForCausalLM",
370
- "Parameters": "14.766B",
371
- "Chat_Template": "Yes"
372
  },
373
  "hf_url": "https://huggingface.co/wanlige/li-14b-v0.4-slerp0.1",
374
  "known_config": null
375
- },
376
- {
377
  "rank": 4,
378
  "name": "sthenno-com/miscii-14b-0218",
379
  "scores": {
380
- "average": 42.90,
381
- "IFEval": 76.56,
382
- "BBH": 50.64,
383
- "MATH": 51.44,
384
- "GPQA": 17.79,
385
- "MUSR": 13.21,
386
- "MMLU_PRO": 47.75,
387
- "Architecture": "Qwen2ForCausalLM",
388
- "Parameters": "14.766B",
389
- "Chat_Template": "Yes"
390
  },
391
  "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-0218",
392
  "known_config": null
393
- },
394
- {
395
  "rank": 5,
396
  "name": "sthenno/tempesthenno-ppo-ckpt40",
397
  "scores": {
398
- "average": 42.74,
399
- "IFEval": 79.23,
400
- "BBH": 50.57,
401
- "MATH": 47.36,
402
- "GPQA": 17.0,
403
- "MUSR": 14.56,
404
- "MMLU_PRO": 47.69,
405
- "Architecture": "Qwen2ForCausalLM",
406
- "Parameters": "14.766B",
407
- "Chat_Template": "Yes"
408
  },
409
  "hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
410
  "known_config": null
411
- },
412
- {
413
  "rank": 6,
414
  "name": "tanliboy/lambda-qwen2.5-14b-dpo-test",
415
  "scores": {
416
- "average": 42.62,
417
- "IFEval": 82.31,
418
- "BBH": 48.45,
419
- "MATH": 54.61,
420
- "GPQA": 14.99,
421
- "MUSR": 12.59,
422
- "MMLU_PRO": 42.75,
423
- "Architecture": "Qwen2ForCausalLM",
424
- "Parameters": "14.77B",
425
- "Chat_Template": "Yes"
426
  },
427
  "hf_url": "https://huggingface.co/tanliboy/lambda-qwen2.5-14b-dpo-test",
428
  "known_config": null
429
- },
430
- {
431
  "rank": 7,
432
  "name": "sthenno/tempesthenno-nuslerp-001",
433
  "scores": {
434
- "average": 42.59,
435
- "IFEval": 79.26,
436
- "BBH": 51.04,
437
- "MATH": 47.58,
438
- "GPQA": 16.44,
439
- "MUSR": 13.88,
440
- "MMLU_PRO": 47.30,
441
- "Architecture": "Qwen2ForCausalLM",
442
- "Parameters": "14.766B",
443
- "Chat_Template": "Yes"
444
  },
445
  "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
446
  "known_config": null
447
- },
448
- {
449
  "rank": 8,
450
  "name": "YOYO-AI/Qwen2.5-14B-1M-YOYO-V3",
451
  "scores": {
452
- "average": 42.56,
453
- "IFEval": 83.98,
454
- "BBH": 49.47,
455
- "MATH": 53.55,
456
- "GPQA": 10.51,
457
- "MUSR": 11.10,
458
- "MMLU_PRO": 46.74,
459
- "Architecture": "Qwen2ForCausalLM",
460
- "Parameters": "14.766B",
461
- "Chat_Template": "Yes"
462
  },
463
  "hf_url": "https://huggingface.co/YOYO-AI/Qwen2.5-14B-1M-YOYO-V3",
464
  "known_config": null
465
- },
466
- {
467
  "rank": 9,
468
  "name": "Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
469
  "scores": {
470
- "average": 42.55,
471
- "IFEval": 82.92,
472
- "BBH": 48.05,
473
- "MATH": 54.23,
474
- "GPQA": 12.30,
475
- "MUSR": 13.15,
476
- "MMLU_PRO": 44.65,
477
- "Architecture": "Qwen2ForCausalLM",
478
- "Parameters": "14.77B",
479
- "Chat_Template": "Yes"
480
  },
481
  "hf_url": "https://huggingface.co/Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
482
  "known_config": null
483
- },
484
- {
485
  "rank": 10,
486
  "name": "djuna/Q2.5-Veltha-14B",
487
  "scores": {
488
- "average": 42.52,
489
- "IFEval": 82.92,
490
- "BBH": 49.75,
491
- "MATH": 47.89,
492
- "GPQA": 14.54,
493
- "MUSR": 12.26,
494
- "MMLU_PRO": 47.76,
495
- "Architecture": "Qwen2ForCausalLM",
496
- "Parameters": "14.766B",
497
- "Chat_Template": "Yes"
498
  },
499
  "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B",
500
  "known_config": null
501
- },
502
- {
503
  "rank": 11,
504
  "name": "arcee-ai/Virtuoso-Small-v2",
505
  "scores": {
506
- "average": 42.48,
507
- "IFEval": 82.73,
508
- "BBH": 50.95,
509
- "MATH": 46.60,
510
- "GPQA": 13.76,
511
- "MUSR": 14.28,
512
- "MMLU_PRO": 46.53,
513
- "Architecture": "Qwen2ForCausalLM",
514
- "Parameters": "14.766B",
515
- "Chat_Template": "Yes"
516
  },
517
  "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
518
  "known_config": null
519
- },
520
- {
521
  "rank": 12,
522
  "name": "YOYO-AI/Qwen2.5-14B-YOYO-V4-p1",
523
  "scores": {
524
- "average": 42.46,
525
- "IFEval": 82.03,
526
- "BBH": 50.25,
527
- "MATH": 53.32,
528
- "GPQA": 12.75,
529
- "MUSR": 11.73,
530
- "MMLU_PRO": 44.67,
531
- "Architecture": "Qwen2ForCausalLM",
532
- "Parameters": "14.766B",
533
- "Chat_Template": "Yes"
534
  },
535
  "hf_url": "https://huggingface.co/YOYO-AI/Qwen2.5-14B-YOYO-V4-p1",
536
  "known_config": null
537
- },
538
- {
539
  "rank": 13,
540
  "name": "jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
541
  "scores": {
542
- "average": 42.42,
543
- "IFEval": 70.40,
544
- "BBH": 54.85,
545
- "MATH": 56.19,
546
- "GPQA": 12.19,
547
- "MUSR": 12.29,
548
- "MMLU_PRO": 48.60,
549
- "Architecture": "Phi3ForCausalLM",
550
- "Parameters": "14.66B",
551
- "Chat_Template": "Yes"
552
  },
553
  "hf_url": "https://huggingface.co/jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
554
  "known_config": null
555
- },
556
- {
557
  "rank": 14,
558
  "name": "sthenno-com/miscii-14b-1028",
559
  "scores": {
560
- "average": 42.38,
561
- "IFEval": 82.37,
562
- "BBH": 49.26,
563
- "MATH": 50.30,
564
- "GPQA": 14.21,
565
- "MUSR": 12.00,
566
- "MMLU_PRO": 46.14,
567
- "Architecture": "Qwen2ForCausalLM",
568
- "Parameters": "14.77B",
569
- "Chat_Template": "Yes"
570
  },
571
  "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1028",
572
  "known_config": null
573
- },
574
- {
575
  "rank": 15,
576
  "name": "sthenno-com/miscii-14b-1225",
577
  "scores": {
578
- "average": 42.35,
579
- "IFEval": 78.78,
580
- "BBH": 50.91,
581
- "MATH": 45.17,
582
- "GPQA": 17.00,
583
- "MUSR": 14.77,
584
- "MMLU_PRO": 47.46,
585
- "Architecture": "Qwen2ForCausalLM",
586
- "Parameters": "14.766B",
587
- "Chat_Template": "Yes"
588
  },
589
  "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
590
  "known_config": null
591
- },
592
- {
593
  "rank": 16,
594
  "name": "prithivMLmods/Sombrero-Opus-14B-Elite5",
595
  "scores": {
596
- "average": 42.32,
597
- "IFEval": 78.81,
598
- "BBH": 50.17,
599
- "MATH": 53.55,
600
- "GPQA": 11.52,
601
- "MUSR": 13.22,
602
- "MMLU_PRO": 46.67,
603
- "Architecture": "Qwen2ForCausalLM",
604
- "Parameters": "14.766B",
605
- "Chat_Template": "Yes"
606
  },
607
  "hf_url": "https://huggingface.co/prithivMLmods/Sombrero-Opus-14B-Elite5",
608
  "known_config": null
609
- },
610
- {
611
  "rank": 17,
612
  "name": "Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v8",
613
  "scores": {
614
- "average": 42.26,
615
- "IFEval": 73.84,
616
- "BBH": 49.31,
617
- "MATH": 41.69,
618
- "GPQA": 18.23,
619
- "MUSR": 21.96,
620
- "MMLU_PRO": 48.50,
621
- "Architecture": "Qwen2ForCausalLM",
622
- "Parameters": "14.766B",
623
- "Chat_Template": "No"
624
  },
625
  "hf_url": "https://huggingface.co/Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v8",
626
  "known_config": null
627
- },
628
- {
629
  "rank": 18,
630
  "name": "prithivMLmods/Equuleus-Opus-14B-Exp",
631
  "scores": {
632
- "average": 42.20,
633
- "IFEval": 70.01,
634
- "BBH": 48.62,
635
- "MATH": 45.85,
636
- "GPQA": 18.23,
637
- "MUSR": 21.90,
638
- "MMLU_PRO": 48.60,
639
- "Architecture": "Qwen2ForCausalLM",
640
- "Parameters": "14.766B",
641
- "Chat_Template": "No"
642
  },
643
  "hf_url": "https://huggingface.co/prithivMLmods/Equuleus-Opus-14B-Exp",
644
  "known_config": null
645
- },
646
- {
647
  "rank": 19,
648
  "name": "rombodawg/Rombos-LLM-V2.6-Qwen-14b",
649
  "scores": {
650
- "average": 42.20,
651
- "IFEval": 84.32,
652
- "BBH": 49.28,
653
- "MATH": 52.11,
654
- "GPQA": 11.19,
655
- "MUSR": 12.29,
656
- "MMLU_PRO": 44.01,
657
- "Architecture": "Qwen2ForCausalLM",
658
- "Parameters": "14.77B",
659
- "Chat_Template": "Yes"
660
  },
661
  "hf_url": "https://huggingface.co/rombodawg/Rombos-LLM-V2.6-Qwen-14b",
662
  "known_config": null
663
- },
664
- {
665
  "rank": 20,
666
  "name": "nbeerbower/EVA-abliterated-TIES-Qwen2.5-14B",
667
  "scores": {
668
- "average": 42.16,
669
- "IFEval": 78.36,
670
- "BBH": 48.52,
671
- "MATH": 50.45,
672
- "GPQA": 13.98,
673
- "MUSR": 14.88,
674
- "MMLU_PRO": 46.79,
675
- "Architecture": "Qwen2ForCausalLM",
676
- "Parameters": "14.77B",
677
- "Chat_Template": "Yes"
678
  },
679
  "hf_url": "https://huggingface.co/nbeerbower/EVA-abliterated-TIES-Qwen2.5-14B",
680
  "known_config": null
681
- },
682
- {
683
  "rank": 21,
684
  "name": "sometimesanotion/LamarckInfusion-14B-v1",
685
  "scores": {
686
- "average": 42.06,
687
- "IFEval": 71.98,
688
- "BBH": 50.35,
689
- "MATH": 41.69,
690
- "GPQA": 18.79,
691
- "MUSR": 20.90,
692
- "MMLU_PRO": 48.63,
693
- "Architecture": "Qwen2ForCausalLM",
694
- "Parameters": "14.766B",
695
- "Chat_Template": "No"
696
  },
697
  "hf_url": "https://huggingface.co/sometimesanotion/LamarckInfusion-14B-v1",
698
  "known_config": null
699
- },
700
- {
701
  "rank": 22,
702
  "name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
703
  "scores": {
704
- "average": 41.99,
705
- "IFEval": 82.40,
706
- "BBH": 50.53,
707
- "MATH": 46.53,
708
- "GPQA": 12.53,
709
- "MUSR": 13.88,
710
- "MMLU_PRO": 46.07,
711
- "Architecture": "Qwen2ForCausalLM",
712
- "Parameters": "14.766B",
713
- "Chat_Template": "Yes"
714
  },
715
  "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
716
  "known_config": null
717
- },
718
- {
719
  "rank": 23,
720
  "name": "Quazim0t0/Fugazi14b",
721
  "scores": {
722
- "average": 41.94,
723
- "IFEval": 69.98,
724
- "BBH": 56.09,
725
- "MATH": 46.53,
726
- "GPQA": 13.53,
727
- "MUSR": 16.42,
728
- "MMLU_PRO": 49.08,
729
- "Architecture": "LlamaForCausalLM",
730
- "Parameters": "14.66B",
731
- "Chat_Template": "Yes"
732
  },
733
  "hf_url": "https://huggingface.co/Quazim0t0/Fugazi14b",
734
  "known_config": null
735
- },
736
- {
737
  "rank": 24,
738
  "name": "1024m/QWEN-14B-B100",
739
  "scores": {
740
- "average": 41.92,
741
- "IFEval": 77.62,
742
- "BBH": 49.78,
743
- "MATH": 54.38,
744
- "GPQA": 13.42,
745
- "MUSR": 9.88,
746
- "MMLU_PRO": 46.43,
747
- "Architecture": "Qwen2ForCausalLM",
748
- "Parameters": "14.77B",
749
- "Chat_Template": "Yes"
750
  },
751
  "hf_url": "https://huggingface.co/1024m/QWEN-14B-B100",
752
  "known_config": null
753
- },
754
- {
755
  "rank": 25,
756
  "name": "Sakalti/Saka-14B",
757
  "scores": {
758
- "average": 41.91,
759
- "IFEval": 71.74,
760
- "BBH": 49.72,
761
- "MATH": 40.94,
762
- "GPQA": 19.46,
763
- "MUSR": 20.74,
764
- "MMLU_PRO": 48.84,
765
- "Architecture": "Qwen2ForCausalLM",
766
- "Parameters": "14.766B",
767
- "Chat_Template": "No"
768
  },
769
  "hf_url": "https://huggingface.co/Sakalti/Saka-14B",
770
  "known_config": null
771
- },
772
- {
773
  "rank": 26,
774
  "name": "prithivMLmods/Sombrero-Opus-14B-Elite6",
775
  "scores": {
776
- "average": 41.88,
777
- "IFEval": 72.26,
778
- "BBH": 49.60,
779
- "MATH": 40.79,
780
- "GPQA": 19.13,
781
- "MUSR": 20.74,
782
- "MMLU_PRO": 48.78,
783
- "Architecture": "Qwen2ForCausalLM",
784
- "Parameters": "14.766B",
785
- "Chat_Template": "No"
786
  },
787
  "hf_url": "https://huggingface.co/prithivMLmods/Sombrero-Opus-14B-Elite6",
788
  "known_config": null
789
- },
790
- {
791
  "rank": 27,
792
  "name": "YOYO-AI/Qwen2.5-14B-YOYO-latest-V2",
793
  "scores": {
794
- "average": 41.85,
795
- "IFEval": 77.71,
796
- "BBH": 47.30,
797
- "MATH": 51.59,
798
- "GPQA": 13.87,
799
- "MUSR": 13.68,
800
- "MMLU_PRO": 46.93,
801
- "Architecture": "Qwen2ForCausalLM",
802
- "Parameters": "14.766B",
803
- "Chat_Template": "Yes"
804
  },
805
  "hf_url": "https://huggingface.co/YOYO-AI/Qwen2.5-14B-YOYO-latest-V2",
806
  "known_config": null
807
- },
808
- {
809
  "rank": 28,
810
  "name": "Tsunami-th/Tsunami-1.0-14B-Instruct",
811
  "scores": {
812
- "average": 41.84,
813
- "IFEval": 78.29,
814
- "BBH": 49.15,
815
- "MATH": 45.85,
816
- "GPQA": 14.21,
817
- "MUSR": 16.34,
818
- "MMLU_PRO": 47.21,
819
- "Architecture": "Qwen2ForCausalLM",
820
- "Parameters": "14.77B",
821
- "Chat_Template": "Yes"
822
  },
823
  "hf_url": "https://huggingface.co/Tsunami-th/Tsunami-1.0-14B-Instruct",
824
  "known_config": null
825
- },
826
- {
827
  "rank": 29,
828
  "name": "sthenno/tempesthenno-kto-0205-ckpt80",
829
  "scores": {
830
- "average": 41.79,
831
- "IFEval": 80.54,
832
- "BBH": 50.64,
833
- "MATH": 45.92,
834
- "GPQA": 13.09,
835
- "MUSR": 12.93,
836
- "MMLU_PRO": 47.62,
837
- "Architecture": "Qwen2ForCausalLM",
838
- "Parameters": "14.766B",
839
- "Chat_Template": "No"
840
  },
841
  "hf_url": "https://huggingface.co/sthenno/tempesthenno-kto-0205-ckpt80",
842
  "known_config": null
843
- },
844
- {
845
  "rank": 30,
846
  "name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
847
  "scores": {
848
- "average": 41.79,
849
- "IFEval": 72.11,
850
- "BBH": 49.85,
851
- "MATH": 40.26,
852
- "GPQA": 18.57,
853
- "MUSR": 21.07,
854
- "MMLU_PRO": 48.89,
855
- "Architecture": "Qwen2ForCausalLM",
856
- "Parameters": "14.766B",
857
- "Chat_Template": "No"
858
  },
859
  "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
860
  "known_config": null
861
- },
862
- {
863
  "rank": 31,
864
  "name": "prithivMLmods/Porpoise-Opus-14B-Exp",
865
  "scores": {
866
- "average": 41.77,
867
- "IFEval": 70.98,
868
- "BBH": 49.95,
869
- "MATH": 40.41,
870
- "GPQA": 19.13,
871
- "MUSR": 21.30,
872
- "MMLU_PRO": 48.85,
873
- "Architecture": "Qwen2ForCausalLM",
874
- "Parameters": "14.766B",
875
- "Chat_Template": "No"
876
  },
877
  "hf_url": "https://huggingface.co/prithivMLmods/Porpoise-Opus-14B-Exp",
878
  "known_config": null
879
- },
880
- {
881
  "rank": 32,
882
  "name": "CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
883
  "scores": {
884
- "average": 41.77,
885
- "IFEval": 82.40,
886
- "BBH": 48.20,
887
- "MATH": 53.17,
888
- "GPQA": 9.96,
889
- "MUSR": 12.65,
890
- "MMLU_PRO": 44.21,
891
- "Architecture": "Qwen2ForCausalLM",
892
- "Parameters": "14.77B",
893
- "Chat_Template": "Yes"
894
  },
895
  "hf_url": "https://huggingface.co/CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
896
  "known_config": null
897
- },
898
- {
899
- "rank": 33,
900
- "name": "suayptalha/Lamarckvergence-14B",
901
- "scores": {
902
- "average": 43.32,
903
- "IFEval": 76.56,
904
- "BBH": 50.33,
905
- "MATH": 54.0,
906
- "GPQA": 15.1,
907
- "MUSR": 16.34,
908
- "MMLU_PRO": 47.59,
909
- "Architecture": "Qwen2ForCausalLM",
910
- "Parameters": "14.766B",
911
- "Chat_Template": "Yes"
912
- },
913
- "hf_url": "https://huggingface.co/suayptalha/Lamarckvergence-14B",
914
- "known_config": null
915
- },
916
- {
917
- "rank": 34,
918
- "name": "sthenno/tempesthenno-ppo-ckpt40",
919
- "scores": {
920
- "average": 42.74,
921
- "IFEval": 79.23,
922
- "BBH": 50.57,
923
- "MATH": 47.36,
924
- "GPQA": 17.0,
925
- "MUSR": 14.56,
926
- "MMLU_PRO": 47.69,
927
- "Architecture": "Qwen2ForCausalLM",
928
- "Parameters": "14.766B",
929
- "Chat_Template": "Yes"
930
- },
931
- "hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
932
- "known_config": null
933
- },
934
- {
935
- "rank": 35,
936
- "name": "tanliboy/lambda-qwen2.5-14b-dpo-test",
937
- "scores": {
938
- "average": 42.62,
939
- "IFEval": 82.31,
940
- "BBH": 48.45,
941
- "MATH": 54.61,
942
- "GPQA": 14.99,
943
- "MUSR": 12.59,
944
- "MMLU_PRO": 42.75,
945
- "Architecture": "Qwen2ForCausalLM",
946
- "Parameters": "14.77B",
947
- "Chat_Template": "Yes"
948
- },
949
- "hf_url": "https://huggingface.co/tanliboy/lambda-qwen2.5-14b-dpo-test",
950
- "known_config": null
951
- },
952
- {
953
- "rank": 36,
954
- "name": "sthenno/tempesthenno-nuslerp-001",
955
- "scores": {
956
- "average": 42.59,
957
- "IFEval": 79.26,
958
- "BBH": 51.04,
959
- "MATH": 47.58,
960
- "GPQA": 16.44,
961
- "MUSR": 13.88,
962
- "MMLU_PRO": 47.3,
963
- "Architecture": "Qwen2ForCausalLM",
964
- "Parameters": "14.766B",
965
- "Chat_Template": "Yes"
966
- },
967
- "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
968
- "known_config": null
969
- },
970
- {
971
- "rank": 37,
972
- "name": "Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
973
- "scores": {
974
- "average": 42.55,
975
- "IFEval": 82.92,
976
- "BBH": 48.05,
977
- "MATH": 54.23,
978
- "GPQA": 12.3,
979
- "MUSR": 13.15,
980
- "MMLU_PRO": 44.65,
981
- "Architecture": "Qwen2ForCausalLM",
982
- "Parameters": "14.77B",
983
- "Chat_Template": "Yes"
984
- },
985
- "hf_url": "https://huggingface.co/Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
986
- "known_config": null
987
- },
988
- {
989
- "rank": 38,
990
- "name": "djuna/Q2.5-Veltha-14B",
991
- "scores": {
992
- "average": 42.52,
993
- "IFEval": 82.92,
994
- "BBH": 49.75,
995
- "MATH": 47.89,
996
- "GPQA": 14.54,
997
- "MUSR": 12.26,
998
- "MMLU_PRO": 47.76,
999
- "Architecture": "Qwen2ForCausalLM",
1000
- "Parameters": "14.766B",
1001
- "Chat_Template": "Yes"
1002
- },
1003
- "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B",
1004
- "known_config": null
1005
- },
1006
- {
1007
- "rank": 39,
1008
- "name": "arcee-ai/Virtuoso-Small-v2",
1009
- "scores": {
1010
- "average": 42.48,
1011
- "IFEval": 82.73,
1012
- "BBH": 50.95,
1013
- "MATH": 46.6,
1014
- "GPQA": 13.76,
1015
- "MUSR": 14.28,
1016
- "MMLU_PRO": 46.53,
1017
- "Architecture": "Qwen2ForCausalLM",
1018
- "Parameters": "14.766B",
1019
- "Chat_Template": "Yes"
1020
- },
1021
- "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
1022
- "known_config": null
1023
- },
1024
- {
1025
- "rank": 40,
1026
- "name": "jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
1027
- "scores": {
1028
- "average": 42.42,
1029
- "IFEval": 70.4,
1030
- "BBH": 54.85,
1031
- "MATH": 56.19,
1032
- "GPQA": 12.19,
1033
- "MUSR": 12.29,
1034
- "MMLU_PRO": 48.6,
1035
- "Architecture": "Phi3ForCausalLM",
1036
- "Parameters": "14.66B",
1037
- "Chat_Template": "Yes"
1038
- },
1039
- "hf_url": "https://huggingface.co/jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
1040
- "known_config": null
1041
- },
1042
- {
1043
- "rank": 41,
1044
- "name": "sthenno-com/miscii-14b-1028",
1045
- "scores": {
1046
- "average": 42.38,
1047
- "IFEval": 82.37,
1048
- "BBH": 49.26,
1049
- "MATH": 50.3,
1050
- "GPQA": 14.21,
1051
- "MUSR": 12.0,
1052
- "MMLU_PRO": 46.14,
1053
- "Architecture": "Qwen2ForCausalLM",
1054
- "Parameters": "14.77B",
1055
- "Chat_Template": "Yes"
1056
- },
1057
- "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1028",
1058
- "known_config": null
1059
- },
1060
- {
1061
- "rank": 42,
1062
- "name": "sthenno-com/miscii-14b-1225",
1063
- "scores": {
1064
- "average": 42.35,
1065
- "IFEval": 78.78,
1066
- "BBH": 50.91,
1067
- "MATH": 45.17,
1068
- "GPQA": 17.0,
1069
- "MUSR": 14.77,
1070
- "MMLU_PRO": 47.46,
1071
- "Architecture": "Qwen2ForCausalLM",
1072
- "Parameters": "14.766B",
1073
- "Chat_Template": "Yes"
1074
- },
1075
- "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
1076
- "known_config": null
1077
- },
1078
- {
1079
- "rank": 43,
1080
- "name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
1081
- "scores": {
1082
- "average": 42.34,
1083
- "IFEval": 83.4,
1084
- "BBH": 50.99,
1085
- "MATH": 46.6,
1086
- "GPQA": 12.98,
1087
- "MUSR": 13.38,
1088
- "MMLU_PRO": 46.67,
1089
- "Architecture": "Qwen2ForCausalLM",
1090
- "Parameters": "14.766B",
1091
- "Chat_Template": "Yes"
1092
- },
1093
- "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
1094
- "known_config": null
1095
- },
1096
- {
1097
- "rank": 44,
1098
- "name": "rombodawg/Rombos-LLM-V2.6-Qwen-14b",
1099
- "scores": {
1100
- "average": 42.2,
1101
- "IFEval": 84.32,
1102
- "BBH": 49.28,
1103
- "MATH": 52.11,
1104
- "GPQA": 11.19,
1105
- "MUSR": 12.29,
1106
- "MMLU_PRO": 44.01,
1107
- "Architecture": "Qwen2ForCausalLM",
1108
- "Parameters": "14.77B",
1109
- "Chat_Template": "Yes"
1110
- },
1111
- "hf_url": "https://huggingface.co/rombodawg/Rombos-LLM-V2.6-Qwen-14b",
1112
- "known_config": null
1113
- },
1114
- {
1115
- "rank": 45,
1116
- "name": "1024m/QWEN-14B-B100",
1117
- "scores": {
1118
- "average": 41.92,
1119
- "IFEval": 77.62,
1120
- "BBH": 49.78,
1121
- "MATH": 54.38,
1122
- "GPQA": 13.42,
1123
- "MUSR": 9.88,
1124
- "MMLU_PRO": 46.43,
1125
- "Architecture": "Qwen2ForCausalLM",
1126
- "Parameters": "14.77B",
1127
- "Chat_Template": "Yes"
1128
- },
1129
- "hf_url": "https://huggingface.co/1024m/QWEN-14B-B100",
1130
- "known_config": null
1131
- },
1132
- {
1133
- "rank": 46,
1134
- "name": "Sakalti/Saka-14B",
1135
- "scores": {
1136
- "average": 41.91,
1137
- "IFEval": 71.74,
1138
- "BBH": 49.72,
1139
- "MATH": 40.94,
1140
- "GPQA": 19.46,
1141
- "MUSR": 20.74,
1142
- "MMLU_PRO": 48.84,
1143
- "Architecture": "Qwen2ForCausalLM",
1144
- "Parameters": "14.766B",
1145
- "Chat_Template": "No"
1146
- },
1147
- "hf_url": "https://huggingface.co/Sakalti/Saka-14B",
1148
- "known_config": null
1149
- },
1150
- {
1151
- "rank": 47,
1152
- "name": "Tsunami-th/Tsunami-1.0-14B-Instruct",
1153
- "scores": {
1154
- "average": 41.84,
1155
- "IFEval": 78.29,
1156
- "BBH": 49.15,
1157
- "MATH": 45.85,
1158
- "GPQA": 14.21,
1159
- "MUSR": 16.34,
1160
- "MMLU_PRO": 47.21,
1161
- "Architecture": "Qwen2ForCausalLM",
1162
- "Parameters": "14.77B",
1163
- "Chat_Template": "Yes"
1164
- },
1165
- "hf_url": "https://huggingface.co/Tsunami-th/Tsunami-1.0-14B-Instruct",
1166
- "known_config": null
1167
- },
1168
- {
1169
- "rank": 48,
1170
- "name": "sthenno/tempesthenno-kto-0205-ckpt80",
1171
- "scores": {
1172
- "average": 41.79,
1173
- "IFEval": 80.54,
1174
- "BBH": 50.64,
1175
- "MATH": 45.92,
1176
- "GPQA": 13.09,
1177
- "MUSR": 12.93,
1178
- "MMLU_PRO": 47.62,
1179
- "Architecture": "Qwen2ForCausalLM",
1180
- "Parameters": "14.766B",
1181
- "Chat_Template": "No"
1182
- },
1183
- "hf_url": "https://huggingface.co/sthenno/tempesthenno-kto-0205-ckpt80",
1184
- "known_config": null
1185
- },
1186
- {
1187
- "rank": 49,
1188
- "name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
1189
- "scores": {
1190
- "average": 41.79,
1191
- "IFEval": 72.11,
1192
- "BBH": 49.85,
1193
- "MATH": 40.26,
1194
- "GPQA": 18.57,
1195
- "MUSR": 21.07,
1196
- "MMLU_PRO": 48.89,
1197
- "Architecture": "Qwen2ForCausalLM",
1198
- "Parameters": "14.766B",
1199
- "Chat_Template": "No"
1200
- },
1201
- "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
1202
- "known_config": null
1203
- },
1204
- {
1205
- "rank": 50,
1206
- "name": "CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
1207
- "scores": {
1208
- "average": 41.77,
1209
- "IFEval": 82.4,
1210
- "BBH": 48.2,
1211
- "MATH": 53.17,
1212
- "GPQA": 9.96,
1213
- "MUSR": 12.65,
1214
- "MMLU_PRO": 44.21,
1215
- "Architecture": "Qwen2ForCausalLM",
1216
- "Parameters": "14.77B",
1217
- "Chat_Template": "Yes"
1218
- },
1219
- "hf_url": "https://huggingface.co/CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
1220
- "known_config": null
1221
- },
1222
- {
1223
- "rank": 51,
1224
- "name": "suayptalha/Luminis-phi-4",
1225
- "scores": {
1226
- "average": 41.76,
1227
- "IFEval": 69.0,
1228
- "BBH": 55.8,
1229
- "MATH": 46.37,
1230
- "GPQA": 13.53,
1231
- "MUSR": 16.68,
1232
- "MMLU_PRO": 49.15,
1233
- "Architecture": "LlamaForCausalLM",
1234
- "Parameters": "14.66B",
1235
- "Chat_Template": "Yes"
1236
- },
1237
- "hf_url": "https://huggingface.co/suayptalha/Luminis-phi-4",
1238
- "known_config": null
1239
- },
1240
- {
1241
- "rank": 52,
1242
- "name": "huihui-ai/Qwen2.5-14B-Instruct-abliterated-v2",
1243
- "scores": {
1244
- "average": 41.75,
1245
- "IFEval": 83.28,
1246
- "BBH": 47.41,
1247
- "MATH": 53.02,
1248
- "GPQA": 11.19,
1249
- "MUSR": 11.58,
1250
- "MMLU_PRO": 44.02,
1251
- "Architecture": "Qwen2ForCausalLM",
1252
- "Parameters": "14.77B",
1253
- "Chat_Template": "Yes"
1254
- },
1255
- "hf_url": "https://huggingface.co/huihui-ai/Qwen2.5-14B-Instruct-abliterated-v2",
1256
- "known_config": null
1257
- },
1258
- {
1259
- "rank": 53,
1260
- "name": "djuna/Q2.5-Veltha-14B-0.5",
1261
- "scores": {
1262
- "average": 41.61,
1263
- "IFEval": 77.96,
1264
- "BBH": 50.32,
1265
- "MATH": 43.73,
1266
- "GPQA": 15.77,
1267
- "MUSR": 14.17,
1268
- "MMLU_PRO": 47.72,
1269
- "Architecture": "Qwen2ForCausalLM",
1270
- "Parameters": "14.766B",
1271
- "Chat_Template": "Yes"
1272
- },
1273
- "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B-0.5",
1274
- "known_config": null
1275
- },
1276
- {
1277
- "rank": 54,
1278
- "name": "Qwen/Qwen2.5-14B-Instruct-1M",
1279
- "scores": {
1280
- "average": 41.56,
1281
- "IFEval": 84.14,
1282
- "BBH": 45.66,
1283
- "MATH": 53.02,
1284
- "GPQA": 12.42,
1285
- "MUSR": 11.35,
1286
- "MMLU_PRO": 42.77,
1287
- "Architecture": "Qwen2ForCausalLM",
1288
- "Parameters": "14.77B",
1289
- "Chat_Template": "Yes"
1290
- },
1291
- "hf_url": "https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M",
1292
- "known_config": null
1293
- },
1294
- {
1295
- "rank": 55,
1296
- "name": "notbdq/Qwen2.5-14B-Instruct-1M-GRPO-Reasoning",
1297
- "scores": {
1298
- "average": 41.56,
1299
- "IFEval": 84.14,
1300
- "BBH": 45.66,
1301
- "MATH": 53.02,
1302
- "GPQA": 12.42,
1303
- "MUSR": 11.35,
1304
- "MMLU_PRO": 42.77,
1305
- "Architecture": "Qwen2ForCausalLM",
1306
- "Parameters": "14.77B",
1307
- "Chat_Template": "Yes"
1308
- },
1309
- "hf_url": "https://huggingface.co/notbdq/Qwen2.5-14B-Instruct-1M-GRPO-Reasoning",
1310
- "known_config": null
1311
- },
1312
- {
1313
- "rank": 56,
1314
- "name": "sometimesanotion/Qwenvergence-14B-v11",
1315
- "scores": {
1316
- "average": 41.52,
1317
- "IFEval": 71.92,
1318
- "BBH": 47.55,
1319
- "MATH": 46.45,
1320
- "GPQA": 16.33,
1321
- "MUSR": 18.76,
1322
- "MMLU_PRO": 48.08,
1323
- "Architecture": "Qwen2ForCausalLM",
1324
- "Parameters": "14.766B",
1325
- "Chat_Template": "No"
1326
- },
1327
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v11",
1328
- "known_config": null
1329
- },
1330
- {
1331
- "rank": 57,
1332
- "name": "sometimesanotion/Qwenvergence-14B-v10",
1333
- "scores": {
1334
- "average": 41.48,
1335
- "IFEval": 67.57,
1336
- "BBH": 46.75,
1337
- "MATH": 47.89,
1338
- "GPQA": 17.23,
1339
- "MUSR": 22.33,
1340
- "MMLU_PRO": 47.1,
1341
- "Architecture": "Qwen2ForCausalLM",
1342
- "Parameters": "14.766B",
1343
- "Chat_Template": "No"
1344
- },
1345
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v10",
1346
- "known_config": null
1347
- },
1348
- {
1349
- "rank": 58,
1350
- "name": "CombinHorizon/huihui-ai-abliteratedV2-Qwen2.5-14B-Inst-BaseMerge-TIES",
1351
- "scores": {
1352
- "average": 41.47,
1353
- "IFEval": 81.76,
1354
- "BBH": 47.77,
1355
- "MATH": 54.76,
1356
- "GPQA": 8.61,
1357
- "MUSR": 12.45,
1358
- "MMLU_PRO": 43.45,
1359
- "Architecture": "Qwen2ForCausalLM",
1360
- "Parameters": "14.77B",
1361
- "Chat_Template": "Yes"
1362
- },
1363
- "hf_url": "https://huggingface.co/CombinHorizon/huihui-ai-abliteratedV2-Qwen2.5-14B-Inst-BaseMerge-TIES",
1364
- "known_config": null
1365
- },
1366
- {
1367
- "rank": 59,
1368
- "name": "RDson/WomboCombo-R1-Coder-14B-Preview",
1369
- "scores": {
1370
- "average": 41.46,
1371
- "IFEval": 62.86,
1372
- "BBH": 48.15,
1373
- "MATH": 59.89,
1374
- "GPQA": 9.51,
1375
- "MUSR": 22.01,
1376
- "MMLU_PRO": 46.31,
1377
- "Architecture": "Qwen2ForCausalLM",
1378
- "Parameters": "14.77B",
1379
- "Chat_Template": "Yes"
1380
- },
1381
- "hf_url": "https://huggingface.co/RDson/WomboCombo-R1-Coder-14B-Preview",
1382
- "known_config": null
1383
- },
1384
- {
1385
- "rank": 60,
1386
- "name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
1387
- "scores": {
1388
- "average": 41.43,
1389
- "IFEval": 73.23,
1390
- "BBH": 49.57,
1391
- "MATH": 41.09,
1392
- "GPQA": 17.23,
1393
- "MUSR": 19.3,
1394
- "MMLU_PRO": 48.19,
1395
- "Architecture": "Qwen2ForCausalLM",
1396
- "Parameters": "14.766B",
1397
- "Chat_Template": "No"
1398
- },
1399
- "hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
1400
- "known_config": null
1401
- },
1402
- {
1403
- "rank": 61,
1404
- "name": "Quazim0t0/Nova-14b-sce",
1405
- "scores": {
1406
- "average": 41.41,
1407
- "IFEval": 70.22,
1408
- "BBH": 56.03,
1409
- "MATH": 41.62,
1410
- "GPQA": 15.1,
1411
- "MUSR": 16.43,
1412
- "MMLU_PRO": 49.03,
1413
- "Architecture": "LlamaForCausalLM",
1414
- "Parameters": "14.66B",
1415
- "Chat_Template": "Yes"
1416
- },
1417
- "hf_url": "https://huggingface.co/Quazim0t0/Nova-14b-sce",
1418
- "known_config": null
1419
- },
1420
- {
1421
- "rank": 62,
1422
- "name": "v000000/Qwen2.5-14B-Gutenberg-Instruct-Slerpeno",
1423
- "scores": {
1424
- "average": 41.36,
1425
- "IFEval": 81.97,
1426
- "BBH": 48.45,
1427
- "MATH": 53.25,
1428
- "GPQA": 10.85,
1429
- "MUSR": 10.05,
1430
- "MMLU_PRO": 43.59,
1431
- "Architecture": "Qwen2ForCausalLM",
1432
- "Parameters": "14.77B",
1433
- "Chat_Template": "Yes"
1434
- },
1435
- "hf_url": "https://huggingface.co/v000000/Qwen2.5-14B-Gutenberg-Instruct-Slerpeno",
1436
- "known_config": null
1437
- },
1438
- {
1439
- "rank": 63,
1440
- "name": "Quazim0t0/NovaScotia-14b-stock",
1441
- "scores": {
1442
- "average": 41.35,
1443
- "IFEval": 67.87,
1444
- "BBH": 56.03,
1445
- "MATH": 46.3,
1446
- "GPQA": 13.2,
1447
- "MUSR": 15.7,
1448
- "MMLU_PRO": 48.99,
1449
- "Architecture": "LlamaForCausalLM",
1450
- "Parameters": "14.66B",
1451
- "Chat_Template": "Yes"
1452
- },
1453
- "hf_url": "https://huggingface.co/Quazim0t0/NovaScotia-14b-stock",
1454
- "known_config": null
1455
- },
1456
- {
1457
- "rank": 64,
1458
- "name": "Quazim0t0/ODB-14b-sce",
1459
- "scores": {
1460
- "average": 41.34,
1461
- "IFEval": 70.16,
1462
- "BBH": 56.19,
1463
- "MATH": 41.16,
1464
- "GPQA": 14.99,
1465
- "MUSR": 16.5,
1466
- "MMLU_PRO": 49.02,
1467
- "Architecture": "LlamaForCausalLM",
1468
- "Parameters": "14.66B",
1469
- "Chat_Template": "Yes"
1470
- },
1471
- "hf_url": "https://huggingface.co/Quazim0t0/ODB-14b-sce",
1472
- "known_config": null
1473
- },
1474
- {
1475
- "rank": 65,
1476
- "name": "LightningRodLabs/Flashlight-v1.1",
1477
- "scores": {
1478
- "average": 40.99,
1479
- "IFEval": 67.21,
1480
- "BBH": 55.43,
1481
- "MATH": 53.25,
1482
- "GPQA": 11.97,
1483
- "MUSR": 9.0,
1484
- "MMLU_PRO": 49.06,
1485
- "Architecture": "Phi3ForCausalLM",
1486
- "Parameters": "14.66B",
1487
- "Chat_Template": "Yes"
1488
- },
1489
- "hf_url": "https://huggingface.co/LightningRodLabs/Flashlight-v1.1",
1490
- "known_config": null
1491
- },
1492
- {
1493
- "rank": 66,
1494
- "name": "Quazim0t0/Mithril-14B-sce",
1495
- "scores": {
1496
- "average": 40.98,
1497
- "IFEval": 69.58,
1498
- "BBH": 55.93,
1499
- "MATH": 38.22,
1500
- "GPQA": 15.88,
1501
- "MUSR": 17.37,
1502
- "MMLU_PRO": 48.92,
1503
- "Architecture": "LlamaForCausalLM",
1504
- "Parameters": "14.66B",
1505
- "Chat_Template": "Yes"
1506
- },
1507
- "hf_url": "https://huggingface.co/Quazim0t0/Mithril-14B-sce",
1508
- "known_config": null
1509
- },
1510
- {
1511
- "rank": 67,
1512
- "name": "Sakalti/ultiima-14B-v0.2",
1513
- "scores": {
1514
- "average": 40.96,
1515
- "IFEval": 70.7,
1516
- "BBH": 49.51,
1517
- "MATH": 39.95,
1518
- "GPQA": 17.67,
1519
- "MUSR": 19.19,
1520
- "MMLU_PRO": 48.75,
1521
- "Architecture": "Qwen2ForCausalLM",
1522
- "Parameters": "14.766B",
1523
- "Chat_Template": "No"
1524
- },
1525
- "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.2",
1526
- "known_config": null
1527
- },
1528
- {
1529
- "rank": 68,
1530
- "name": "bunnycore/Phi-4-ReasoningRP",
1531
- "scores": {
1532
- "average": 40.95,
1533
- "IFEval": 67.36,
1534
- "BBH": 55.88,
1535
- "MATH": 45.69,
1536
- "GPQA": 12.53,
1537
- "MUSR": 15.14,
1538
- "MMLU_PRO": 49.12,
1539
- "Architecture": "LlamaForCausalLM",
1540
- "Parameters": "14.66B",
1541
- "Chat_Template": "Yes"
1542
- },
1543
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-ReasoningRP",
1544
- "known_config": null
1545
- },
1546
- {
1547
- "rank": 69,
1548
- "name": "dwikitheduck/gen-inst-1",
1549
- "scores": {
1550
- "average": 40.88,
1551
- "IFEval": 77.5,
1552
- "BBH": 48.32,
1553
- "MATH": 45.54,
1554
- "GPQA": 16.22,
1555
- "MUSR": 12.27,
1556
- "MMLU_PRO": 45.43,
1557
- "Architecture": "Qwen2ForCausalLM",
1558
- "Parameters": "14.77B",
1559
- "Chat_Template": "Yes"
1560
- },
1561
- "hf_url": "https://huggingface.co/dwikitheduck/gen-inst-1",
1562
- "known_config": null
1563
- },
1564
- {
1565
- "rank": 70,
1566
- "name": "v000000/Qwen2.5-14B-Gutenberg-1e-Delta",
1567
- "scores": {
1568
- "average": 40.88,
1569
- "IFEval": 80.45,
1570
- "BBH": 48.62,
1571
- "MATH": 52.64,
1572
- "GPQA": 10.51,
1573
- "MUSR": 9.38,
1574
- "MMLU_PRO": 43.67,
1575
- "Architecture": "Qwen2ForCausalLM",
1576
- "Parameters": "14.77B",
1577
- "Chat_Template": "Yes"
1578
- },
1579
- "hf_url": "https://huggingface.co/v000000/Qwen2.5-14B-Gutenberg-1e-Delta",
1580
- "known_config": null
1581
- },
1582
- {
1583
- "rank": 60,
1584
- "name": "hotmailuser/QwenSlerp2-14B",
1585
- "scores": {
1586
- "average": 40.86,
1587
- "IFEval": 70.37,
1588
- "BBH": 49.68,
1589
- "MATH": 39.65,
1590
- "GPQA": 17.45,
1591
- "MUSR": 19.35,
1592
- "MMLU_PRO": 48.66,
1593
- "Architecture": "Qwen2ForCausalLM",
1594
- "Parameters": "14.766B",
1595
- "Chat_Template": "No"
1596
- },
1597
- "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp2-14B",
1598
- "known_config": null
1599
- },
1600
- {
1601
- "rank": 71,
1602
- "name": "Quazim0t0/Loke-14B-sce",
1603
- "scores": {
1604
- "average": 40.86,
1605
- "IFEval": 68.48,
1606
- "BBH": 55.83,
1607
- "MATH": 39.05,
1608
- "GPQA": 15.32,
1609
- "MUSR": 17.56,
1610
- "MMLU_PRO": 48.9,
1611
- "Architecture": "LlamaForCausalLM",
1612
- "Parameters": "14.66B",
1613
- "Chat_Template": "Yes"
1614
- },
1615
- "hf_url": "https://huggingface.co/Quazim0t0/Loke-14B-sce",
1616
- "known_config": null
1617
- },
1618
- {
1619
- "rank": 72,
1620
- "name": "Quazim0t0/mosaic-14b-sce",
1621
- "scores": {
1622
- "average": 40.83,
1623
- "IFEval": 68.76,
1624
- "BBH": 55.69,
1625
- "MATH": 40.26,
1626
- "GPQA": 14.99,
1627
- "MUSR": 16.44,
1628
- "MMLU_PRO": 48.85,
1629
- "Architecture": "LlamaForCausalLM",
1630
- "Parameters": "14.66B",
1631
- "Chat_Template": "Yes"
1632
- },
1633
- "hf_url": "https://huggingface.co/Quazim0t0/mosaic-14b-sce",
1634
- "known_config": null
1635
- },
1636
- {
1637
- "rank": 73,
1638
- "name": "bunnycore/Phi-4-Model-Stock",
1639
- "scores": {
1640
- "average": 40.79,
1641
- "IFEval": 68.79,
1642
- "BBH": 55.32,
1643
- "MATH": 42.98,
1644
- "GPQA": 13.98,
1645
- "MUSR": 15.12,
1646
- "MMLU_PRO": 48.54,
1647
- "Architecture": "LlamaForCausalLM",
1648
- "Parameters": "14.66B",
1649
- "Chat_Template": "Yes"
1650
- },
1651
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock",
1652
- "known_config": null
1653
- },
1654
- {
1655
- "rank": 74,
1656
- "name": "unsloth/phi-4",
1657
- "scores": {
1658
- "average": 40.73,
1659
- "IFEval": 68.82,
1660
- "BBH": 55.25,
1661
- "MATH": 50.0,
1662
- "GPQA": 11.52,
1663
- "MUSR": 10.13,
1664
- "MMLU_PRO": 48.65,
1665
- "Architecture": "LlamaForCausalLM",
1666
- "Parameters": "14.66B",
1667
- "Chat_Template": "Yes"
1668
- },
1669
- "hf_url": "https://huggingface.co/unsloth/phi-4",
1670
- "known_config": null
1671
- },
1672
- {
1673
- "rank": 75,
1674
- "name": "pankajmathur/orca_mini_phi-4",
1675
- "scores": {
1676
- "average": 40.68,
1677
- "IFEval": 77.81,
1678
- "BBH": 54.63,
1679
- "MATH": 29.53,
1680
- "GPQA": 16.55,
1681
- "MUSR": 18.25,
1682
- "MMLU_PRO": 47.28,
1683
- "Architecture": "LlamaForCausalLM",
1684
- "Parameters": "14.66B",
1685
- "Chat_Template": "Yes"
1686
- },
1687
- "hf_url": "https://huggingface.co/pankajmathur/orca_mini_phi-4",
1688
- "known_config": null
1689
- },
1690
- {
1691
- "rank": 76,
1692
- "name": "pankajmathur/orca_mini_v9_2_14B",
1693
- "scores": {
1694
- "average": 40.68,
1695
- "IFEval": 77.81,
1696
- "BBH": 54.63,
1697
- "MATH": 29.53,
1698
- "GPQA": 16.55,
1699
- "MUSR": 18.25,
1700
- "MMLU_PRO": 47.28,
1701
- "Architecture": "LlamaForCausalLM",
1702
- "Parameters": "14.66B",
1703
- "Chat_Template": "Yes"
1704
- },
1705
- "hf_url": "https://huggingface.co/pankajmathur/orca_mini_v9_2_14B",
1706
- "known_config": null
1707
- },
1708
- {
1709
- "rank": 77,
1710
- "name": "sometimesanotion/Lamarck-14B-v0.6-model_stock",
1711
- "scores": {
1712
- "average": 40.68,
1713
- "IFEval": 67.9,
1714
- "BBH": 46.49,
1715
- "MATH": 42.45,
1716
- "GPQA": 17.9,
1717
- "MUSR": 22.68,
1718
- "MMLU_PRO": 46.64,
1719
- "Architecture": "Qwen2ForCausalLM",
1720
- "Parameters": "14B",
1721
- "Chat_Template": "No"
1722
- },
1723
- "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6-model_stock",
1724
- "known_config": null
1725
- },
1726
- {
1727
- "rank": 78,
1728
- "name": "sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock",
1729
- "scores": {
1730
- "average": 40.6,
1731
- "IFEval": 68.6,
1732
- "BBH": 46.37,
1733
- "MATH": 40.94,
1734
- "GPQA": 17.79,
1735
- "MUSR": 23.35,
1736
- "MMLU_PRO": 46.59,
1737
- "Architecture": "Qwen2ForCausalLM",
1738
- "Parameters": "14B",
1739
- "Chat_Template": "No"
1740
- },
1741
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock",
1742
- "known_config": null
1743
- },
1744
- {
1745
- "rank": 79,
1746
- "name": "Quazim0t0/Oasis-14B-ties",
1747
- "scores": {
1748
- "average": 40.59,
1749
- "IFEval": 69.37,
1750
- "BBH": 55.75,
1751
- "MATH": 37.54,
1752
- "GPQA": 15.32,
1753
- "MUSR": 16.63,
1754
- "MMLU_PRO": 48.94,
1755
- "Architecture": "LlamaForCausalLM",
1756
- "Parameters": "14.66B",
1757
- "Chat_Template": "Yes"
1758
- },
1759
- "hf_url": "https://huggingface.co/Quazim0t0/Oasis-14B-ties",
1760
- "known_config": null
1761
- },
1762
- {
1763
- "rank": 80,
1764
- "name": "LightningRodLabs/Flashlight-v1.0",
1765
- "scores": {
1766
- "average": 40.57,
1767
- "IFEval": 67.45,
1768
- "BBH": 55.15,
1769
- "MATH": 49.7,
1770
- "GPQA": 12.3,
1771
- "MUSR": 9.93,
1772
- "MMLU_PRO": 48.91,
1773
- "Architecture": "LlamaForCausalLM",
1774
- "Parameters": "14.66B",
1775
- "Chat_Template": "Yes"
1776
- },
1777
- "hf_url": "https://huggingface.co/LightningRodLabs/Flashlight-v1.0",
1778
- "known_config": null
1779
- },
1780
- {
1781
- "rank": 81,
1782
- "name": "arcee-ai/Virtuoso-Small",
1783
- "scores": {
1784
- "average": 40.54,
1785
- "IFEval": 79.35,
1786
- "BBH": 50.4,
1787
- "MATH": 40.94,
1788
- "GPQA": 11.52,
1789
- "MUSR": 14.44,
1790
- "MMLU_PRO": 46.57,
1791
- "Architecture": "Qwen2ForCausalLM",
1792
- "Parameters": "14.77B",
1793
- "Chat_Template": "Yes"
1794
- },
1795
- "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small",
1796
- "known_config": null
1797
- },
1798
- {
1799
- "rank": 82,
1800
- "name": "Quazim0t0/GuiltySpark-14B-ties",
1801
- "scores": {
1802
- "average": 40.52,
1803
- "IFEval": 68.54,
1804
- "BBH": 55.72,
1805
- "MATH": 38.37,
1806
- "GPQA": 15.32,
1807
- "MUSR": 16.3,
1808
- "MMLU_PRO": 48.89,
1809
- "Architecture": "LlamaForCausalLM",
1810
- "Parameters": "14.66B",
1811
- "Chat_Template": "Yes"
1812
- },
1813
- "hf_url": "https://huggingface.co/Quazim0t0/GuiltySpark-14B-ties",
1814
- "known_config": null
1815
- },
1816
- {
1817
- "rank": 83,
1818
- "name": "ozone-ai/0x-lite",
1819
- "scores": {
1820
- "average": 40.48,
1821
- "IFEval": 77.4,
1822
- "BBH": 47.53,
1823
- "MATH": 50.45,
1824
- "GPQA": 9.28,
1825
- "MUSR": 11.76,
1826
- "MMLU_PRO": 46.49,
1827
- "Architecture": "Qwen2ForCausalLM",
1828
- "Parameters": "14.77B",
1829
- "Chat_Template": "Yes"
1830
- },
1831
- "hf_url": "https://huggingface.co/ozone-ai/0x-lite",
1832
- "known_config": null
1833
- },
1834
- {
1835
- "rank": 84,
1836
- "name": "Quazim0t0/Casa-14b-sce",
1837
- "scores": {
1838
- "average": 40.41,
1839
- "IFEval": 66.54,
1840
- "BBH": 55.4,
1841
- "MATH": 46.98,
1842
- "GPQA": 11.07,
1843
- "MUSR": 13.31,
1844
- "MMLU_PRO": 49.17,
1845
- "Architecture": "LlamaForCausalLM",
1846
- "Parameters": "14.66B",
1847
- "Chat_Template": "Yes"
1848
- },
1849
- "hf_url": "https://huggingface.co/Quazim0t0/Casa-14b-sce",
1850
- "known_config": null
1851
- },
1852
- {
1853
- "rank": 85,
1854
- "name": "Sakalti/ultiima-14B-v0.3",
1855
- "scores": {
1856
- "average": 40.38,
1857
- "IFEval": 70.4,
1858
- "BBH": 48.45,
1859
- "MATH": 39.65,
1860
- "GPQA": 16.89,
1861
- "MUSR": 18.73,
1862
- "MMLU_PRO": 48.18,
1863
- "Architecture": "Qwen2ForCausalLM",
1864
- "Parameters": "14.766B",
1865
- "Chat_Template": "No"
1866
- },
1867
- "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.3",
1868
- "known_config": null
1869
- },
1870
- {
1871
- "rank": 86,
1872
- "name": "ehristoforu/fp4-14b-v1-fix",
1873
- "scores": {
1874
- "average": 40.37,
1875
- "IFEval": 67.42,
1876
- "BBH": 54.33,
1877
- "MATH": 42.07,
1878
- "GPQA": 13.87,
1879
- "MUSR": 16.18,
1880
- "MMLU_PRO": 48.37,
1881
- "Architecture": "LlamaForCausalLM",
1882
- "Parameters": "14.66B",
1883
- "Chat_Template": "Yes"
1884
- },
1885
- "hf_url": "https://huggingface.co/ehristoforu/fp4-14b-v1-fix",
1886
- "known_config": null
1887
- },
1888
- {
1889
- "rank": 87,
1890
- "name": "FINGU-AI/Chocolatine-Fusion-14B",
1891
- "scores": {
1892
- "average": 40.36,
1893
- "IFEval": 69.49,
1894
- "BBH": 48.6,
1895
- "MATH": 38.52,
1896
- "GPQA": 16.22,
1897
- "MUSR": 21.99,
1898
- "MMLU_PRO": 47.35,
1899
- "Architecture": "Qwen2ForCausalLM",
1900
- "Parameters": "8.367B",
1901
- "Chat_Template": "No"
1902
- },
1903
- "hf_url": "https://huggingface.co/FINGU-AI/Chocolatine-Fusion-14B",
1904
- "known_config": null
1905
- },
1906
- {
1907
- "rank": 88,
1908
- "name": "hotmailuser/QwenSlerp-14B",
1909
- "scores": {
1910
- "average": 40.35,
1911
- "IFEval": 70.25,
1912
- "BBH": 49.42,
1913
- "MATH": 38.37,
1914
- "GPQA": 18.34,
1915
- "MUSR": 16.83,
1916
- "MMLU_PRO": 48.89,
1917
- "Architecture": "Qwen2ForCausalLM",
1918
- "Parameters": "14.766B",
1919
- "Chat_Template": "No"
1920
- },
1921
- "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp-14B",
1922
- "known_config": null
1923
- },
1924
- {
1925
- "rank": 89,
1926
- "name": "Triangle104/Robo-Gutenberg_V1.0",
1927
- "scores": {
1928
- "average": 40.35,
1929
- "IFEval": 60.08,
1930
- "BBH": 50.29,
1931
- "MATH": 45.62,
1932
- "GPQA": 18.12,
1933
- "MUSR": 19.2,
1934
- "MMLU_PRO": 48.79,
1935
- "Architecture": "Qwen2ForCausalLM",
1936
- "Parameters": "14.77B",
1937
- "Chat_Template": "No"
1938
- },
1939
- "hf_url": "https://huggingface.co/Triangle104/Robo-Gutenberg_V1.0",
1940
- "known_config": null
1941
- },
1942
- {
1943
- "rank": 90,
1944
- "name": "Quazim0t0/Adamant-14B-sce",
1945
- "scores": {
1946
- "average": 40.32,
1947
- "IFEval": 68.58,
1948
- "BBH": 54.97,
1949
- "MATH": 39.88,
1950
- "GPQA": 13.42,
1951
- "MUSR": 16.51,
1952
- "MMLU_PRO": 48.57,
1953
- "Architecture": "LlamaForCausalLM",
1954
- "Parameters": "14.66B",
1955
- "Chat_Template": "Yes"
1956
- },
1957
- "hf_url": "https://huggingface.co/Quazim0t0/Adamant-14B-sce",
1958
- "known_config": null
1959
- },
1960
- {
1961
- "rank": 91,
1962
- "name": "Quazim0t0/Phi4Basis-14B-sce",
1963
- "scores": {
1964
- "average": 40.31,
1965
- "IFEval": 65.02,
1966
- "BBH": 55.67,
1967
- "MATH": 47.89,
1968
- "GPQA": 10.51,
1969
- "MUSR": 14.02,
1970
- "MMLU_PRO": 48.78,
1971
- "Architecture": "LlamaForCausalLM",
1972
- "Parameters": "14.66B",
1973
- "Chat_Template": "Yes"
1974
- },
1975
- "hf_url": "https://huggingface.co/Quazim0t0/Phi4Basis-14B-sce",
1976
- "known_config": null
1977
- },
1978
- {
1979
- "rank": 92,
1980
- "name": "Quazim0t0/bloom-14b-stock",
1981
- "scores": {
1982
- "average": 40.29,
1983
- "IFEval": 65.75,
1984
- "BBH": 55.27,
1985
- "MATH": 48.11,
1986
- "GPQA": 10.85,
1987
- "MUSR": 13.17,
1988
- "MMLU_PRO": 48.59,
1989
- "Architecture": "LlamaForCausalLM",
1990
- "Parameters": "14.66B",
1991
- "Chat_Template": "Yes"
1992
- },
1993
- "hf_url": "https://huggingface.co/Quazim0t0/bloom-14b-stock",
1994
- "known_config": null
1995
- },
1996
- {
1997
- "rank": 93,
1998
- "name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01",
1999
- "scores": {
2000
- "average": 40.28,
2001
- "IFEval": 68.72,
2002
- "BBH": 47.71,
2003
- "MATH": 39.95,
2004
- "GPQA": 18.23,
2005
- "MUSR": 19.56,
2006
- "MMLU_PRO": 47.5,
2007
- "Architecture": "Qwen2ForCausalLM",
2008
- "Parameters": "14B",
2009
- "Chat_Template": "No"
2010
- },
2011
- "hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01",
2012
- "known_config": null
2013
- },
2014
- {
2015
- "rank": 94,
2016
- "name": "Quazim0t0/Halo-14B-sce",
2017
- "scores": {
2018
- "average": 40.26,
2019
- "IFEval": 67.54,
2020
- "BBH": 55.27,
2021
- "MATH": 42.9,
2022
- "GPQA": 12.98,
2023
- "MUSR": 14.24,
2024
- "MMLU_PRO": 48.63,
2025
- "Architecture": "LlamaForCausalLM",
2026
- "Parameters": "14.66B",
2027
- "Chat_Template": "Yes"
2028
- },
2029
- "hf_url": "https://huggingface.co/Quazim0t0/Halo-14B-sce",
2030
- "known_config": null
2031
- },
2032
- {
2033
- "rank": 95,
2034
- "name": "prithivMLmods/Calcium-Opus-14B-Elite2",
2035
- "scores": {
2036
- "average": 40.25,
2037
- "IFEval": 61.76,
2038
- "BBH": 46.81,
2039
- "MATH": 46.9,
2040
- "GPQA": 16.0,
2041
- "MUSR": 22.24,
2042
- "MMLU_PRO": 47.79,
2043
- "Architecture": "Qwen2ForCausalLM",
2044
- "Parameters": "14.766B",
2045
- "Chat_Template": "No"
2046
- },
2047
- "hf_url": "https://huggingface.co/prithivMLmods/Calcium-Opus-14B-Elite2",
2048
- "known_config": null
2049
- },
2050
- {
2051
- "rank": 96,
2052
- "name": "SicariusSicariiStuff/Impish_QWEN_14B-1M",
2053
- "scores": {
2054
- "average": 40.24,
2055
- "IFEval": 78.68,
2056
- "BBH": 47.22,
2057
- "MATH": 39.65,
2058
- "GPQA": 13.42,
2059
- "MUSR": 17.52,
2060
- "MMLU_PRO": 44.93,
2061
- "Architecture": "Qwen2ForCausalLM",
2062
- "Parameters": "14.77B",
2063
- "Chat_Template": "Yes"
2064
- },
2065
- "hf_url": "https://huggingface.co/SicariusSicariiStuff/Impish_QWEN_14B-1M",
2066
- "known_config": null
2067
- },
2068
- {
2069
- "rank": 97,
2070
- "name": "bunnycore/Phi-4-Stock-Ex",
2071
- "scores": {
2072
- "average": 40.22,
2073
- "IFEval": 65.75,
2074
- "BBH": 55.2,
2075
- "MATH": 40.86,
2076
- "GPQA": 13.42,
2077
- "MUSR": 17.46,
2078
- "MMLU_PRO": 48.61,
2079
- "Architecture": "LlamaForCausalLM",
2080
- "Parameters": "14.66B",
2081
- "Chat_Template": "Yes"
2082
- },
2083
- "hf_url": "https://huggingface.co/bunnycore/Phi-4-Stock-Ex",
2084
- "known_config": null
2085
- },
2086
- {
2087
- "rank": 98,
2088
- "name": "sometimesanotion/Qwenvergence-14B-qv256",
2089
- "scores": {
2090
- "average": 40.12,
2091
- "IFEval": 70.06,
2092
- "BBH": 47.08,
2093
- "MATH": 38.97,
2094
- "GPQA": 17.11,
2095
- "MUSR": 21.07,
2096
- "MMLU_PRO": 46.42,
2097
- "Architecture": "Qwen2ForCausalLM",
2098
- "Parameters": "14B",
2099
- "Chat_Template": "No"
2100
- },
2101
- "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-qv256",
2102
- "known_config": null
2103
- },
2104
- {
2105
- "rank": 99,
2106
- "name": "tensopolis/virtuoso-small-tensopolis-v2",
2107
- "scores": {
2108
- "average": 40.11,
2109
- "IFEval": 80.2,
2110
- "BBH": 50.23,
2111
- "MATH": 38.75,
2112
- "GPQA": 10.51,
2113
- "MUSR": 14.84,
2114
- "MMLU_PRO": 46.15,
2115
- "Architecture": "Qwen2ForCausalLM",
2116
- "Parameters": "14.77B",
2117
- "Chat_Template": "Yes"
2118
- },
2119
- "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v2",
2120
- "known_config": null
2121
- }
 
2122
  ]
2123
-
2124
 
2125
 
2126
 
 
319
  # --------------------------------------------------------------------
320
  # This is your larger dataset, rank = 44..105
321
  benchmark_data = [
322
+ [
323
+ {
324
  "rank": 1,
325
  "name": "wanlige/li-14b-v0.4",
326
  "scores": {
327
+ "average": 43.66,
328
+ "IFEval": 81.33,
329
+ "BBH": 50.38,
330
+ "MATH": 55.74,
331
+ "GPQA": 11.86,
332
+ "MUSR": 16.35,
333
+ "MMLU_PRO": 46.3,
334
+ "Architecture": "Qwen2ForCausalLM",
335
+ "Parameters": "14.77B",
336
+ "Chat_Template": "Yes"
337
  },
338
  "hf_url": "https://huggingface.co/wanlige/li-14b-v0.4",
339
  "known_config": null
340
+ },
341
+ {
342
  "rank": 2,
343
  "name": "suayptalha/Lamarckvergence-14B",
344
  "scores": {
345
+ "average": 43.32,
346
+ "IFEval": 76.56,
347
+ "BBH": 50.33,
348
+ "MATH": 54,
349
+ "GPQA": 15.1,
350
+ "MUSR": 16.34,
351
+ "MMLU_PRO": 47.59,
352
+ "Architecture": "Qwen2ForCausalLM",
353
+ "Parameters": "14.766B",
354
+ "Chat_Template": "Yes"
355
  },
356
  "hf_url": "https://huggingface.co/suayptalha/Lamarckvergence-14B",
357
  "known_config": null
358
+ },
359
+ {
360
  "rank": 3,
361
  "name": "wanlige/li-14b-v0.4-slerp0.1",
362
  "scores": {
363
+ "average": 42.91,
364
+ "IFEval": 79.23,
365
+ "BBH": 50.88,
366
+ "MATH": 53.32,
367
+ "GPQA": 14.54,
368
+ "MUSR": 11.75,
369
+ "MMLU_PRO": 47.71,
370
+ "Architecture": "Qwen2ForCausalLM",
371
+ "Parameters": "14.766B",
372
+ "Chat_Template": "Yes"
373
  },
374
  "hf_url": "https://huggingface.co/wanlige/li-14b-v0.4-slerp0.1",
375
  "known_config": null
376
+ },
377
+ {
378
  "rank": 4,
379
  "name": "sthenno-com/miscii-14b-0218",
380
  "scores": {
381
+ "average": 42.9,
382
+ "IFEval": 76.56,
383
+ "BBH": 50.64,
384
+ "MATH": 51.44,
385
+ "GPQA": 17.79,
386
+ "MUSR": 13.21,
387
+ "MMLU_PRO": 47.75,
388
+ "Architecture": "Qwen2ForCausalLM",
389
+ "Parameters": "14.766B",
390
+ "Chat_Template": "Yes"
391
  },
392
  "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-0218",
393
  "known_config": null
394
+ },
395
+ {
396
  "rank": 5,
397
  "name": "sthenno/tempesthenno-ppo-ckpt40",
398
  "scores": {
399
+ "average": 42.74,
400
+ "IFEval": 79.23,
401
+ "BBH": 50.57,
402
+ "MATH": 47.36,
403
+ "GPQA": 17,
404
+ "MUSR": 14.56,
405
+ "MMLU_PRO": 47.69,
406
+ "Architecture": "Qwen2ForCausalLM",
407
+ "Parameters": "14.766B",
408
+ "Chat_Template": "Yes"
409
  },
410
  "hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
411
  "known_config": null
412
+ },
413
+ {
414
  "rank": 6,
415
  "name": "tanliboy/lambda-qwen2.5-14b-dpo-test",
416
  "scores": {
417
+ "average": 42.62,
418
+ "IFEval": 82.31,
419
+ "BBH": 48.45,
420
+ "MATH": 54.61,
421
+ "GPQA": 14.99,
422
+ "MUSR": 12.59,
423
+ "MMLU_PRO": 42.75,
424
+ "Architecture": "Qwen2ForCausalLM",
425
+ "Parameters": "14.77B",
426
+ "Chat_Template": "Yes"
427
  },
428
  "hf_url": "https://huggingface.co/tanliboy/lambda-qwen2.5-14b-dpo-test",
429
  "known_config": null
430
+ },
431
+ {
432
  "rank": 7,
433
  "name": "sthenno/tempesthenno-nuslerp-001",
434
  "scores": {
435
+ "average": 42.59,
436
+ "IFEval": 79.26,
437
+ "BBH": 51.04,
438
+ "MATH": 47.58,
439
+ "GPQA": 16.44,
440
+ "MUSR": 13.88,
441
+ "MMLU_PRO": 47.3,
442
+ "Architecture": "Qwen2ForCausalLM",
443
+ "Parameters": "14.766B",
444
+ "Chat_Template": "Yes"
445
  },
446
  "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
447
  "known_config": null
448
+ },
449
+ {
450
  "rank": 8,
451
  "name": "YOYO-AI/Qwen2.5-14B-1M-YOYO-V3",
452
  "scores": {
453
+ "average": 42.56,
454
+ "IFEval": 83.98,
455
+ "BBH": 49.47,
456
+ "MATH": 53.55,
457
+ "GPQA": 10.51,
458
+ "MUSR": 11.1,
459
+ "MMLU_PRO": 46.74,
460
+ "Architecture": "Qwen2ForCausalLM",
461
+ "Parameters": "14.766B",
462
+ "Chat_Template": "Yes"
463
  },
464
  "hf_url": "https://huggingface.co/YOYO-AI/Qwen2.5-14B-1M-YOYO-V3",
465
  "known_config": null
466
+ },
467
+ {
468
  "rank": 9,
469
  "name": "Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
470
  "scores": {
471
+ "average": 42.55,
472
+ "IFEval": 82.92,
473
+ "BBH": 48.05,
474
+ "MATH": 54.23,
475
+ "GPQA": 12.3,
476
+ "MUSR": 13.15,
477
+ "MMLU_PRO": 44.65,
478
+ "Architecture": "Qwen2ForCausalLM",
479
+ "Parameters": "14.77B",
480
+ "Chat_Template": "Yes"
481
  },
482
  "hf_url": "https://huggingface.co/Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
483
  "known_config": null
484
+ },
485
+ {
486
  "rank": 10,
487
  "name": "djuna/Q2.5-Veltha-14B",
488
  "scores": {
489
+ "average": 42.52,
490
+ "IFEval": 82.92,
491
+ "BBH": 49.75,
492
+ "MATH": 47.89,
493
+ "GPQA": 14.54,
494
+ "MUSR": 12.26,
495
+ "MMLU_PRO": 47.76,
496
+ "Architecture": "Qwen2ForCausalLM",
497
+ "Parameters": "14.766B",
498
+ "Chat_Template": "Yes"
499
  },
500
  "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B",
501
  "known_config": null
502
+ },
503
+ {
504
  "rank": 11,
505
  "name": "arcee-ai/Virtuoso-Small-v2",
506
  "scores": {
507
+ "average": 42.48,
508
+ "IFEval": 82.73,
509
+ "BBH": 50.95,
510
+ "MATH": 46.6,
511
+ "GPQA": 13.76,
512
+ "MUSR": 14.28,
513
+ "MMLU_PRO": 46.53,
514
+ "Architecture": "Qwen2ForCausalLM",
515
+ "Parameters": "14.766B",
516
+ "Chat_Template": "Yes"
517
  },
518
  "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
519
  "known_config": null
520
+ },
521
+ {
522
  "rank": 12,
523
  "name": "YOYO-AI/Qwen2.5-14B-YOYO-V4-p1",
524
  "scores": {
525
+ "average": 42.46,
526
+ "IFEval": 82.03,
527
+ "BBH": 50.25,
528
+ "MATH": 53.32,
529
+ "GPQA": 12.75,
530
+ "MUSR": 11.73,
531
+ "MMLU_PRO": 44.67,
532
+ "Architecture": "Qwen2ForCausalLM",
533
+ "Parameters": "14.766B",
534
+ "Chat_Template": "Yes"
535
  },
536
  "hf_url": "https://huggingface.co/YOYO-AI/Qwen2.5-14B-YOYO-V4-p1",
537
  "known_config": null
538
+ },
539
+ {
540
  "rank": 13,
541
  "name": "jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
542
  "scores": {
543
+ "average": 42.42,
544
+ "IFEval": 70.4,
545
+ "BBH": 54.85,
546
+ "MATH": 56.19,
547
+ "GPQA": 12.19,
548
+ "MUSR": 12.29,
549
+ "MMLU_PRO": 48.6,
550
+ "Architecture": "Phi3ForCausalLM",
551
+ "Parameters": "14.66B",
552
+ "Chat_Template": "Yes"
553
  },
554
  "hf_url": "https://huggingface.co/jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
555
  "known_config": null
556
+ },
557
+ {
558
  "rank": 14,
559
  "name": "sthenno-com/miscii-14b-1028",
560
  "scores": {
561
+ "average": 42.38,
562
+ "IFEval": 82.37,
563
+ "BBH": 49.26,
564
+ "MATH": 50.3,
565
+ "GPQA": 14.21,
566
+ "MUSR": 12,
567
+ "MMLU_PRO": 46.14,
568
+ "Architecture": "Qwen2ForCausalLM",
569
+ "Parameters": "14.77B",
570
+ "Chat_Template": "Yes"
571
  },
572
  "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1028",
573
  "known_config": null
574
+ },
575
+ {
576
  "rank": 15,
577
  "name": "sthenno-com/miscii-14b-1225",
578
  "scores": {
579
+ "average": 42.35,
580
+ "IFEval": 78.78,
581
+ "BBH": 50.91,
582
+ "MATH": 45.17,
583
+ "GPQA": 17,
584
+ "MUSR": 14.77,
585
+ "MMLU_PRO": 47.46,
586
+ "Architecture": "Qwen2ForCausalLM",
587
+ "Parameters": "14.766B",
588
+ "Chat_Template": "Yes"
589
  },
590
  "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
591
  "known_config": null
592
+ },
593
+ {
594
  "rank": 16,
595
  "name": "prithivMLmods/Sombrero-Opus-14B-Elite5",
596
  "scores": {
597
+ "average": 42.32,
598
+ "IFEval": 78.81,
599
+ "BBH": 50.17,
600
+ "MATH": 53.55,
601
+ "GPQA": 11.52,
602
+ "MUSR": 13.22,
603
+ "MMLU_PRO": 46.67,
604
+ "Architecture": "Qwen2ForCausalLM",
605
+ "Parameters": "14.766B",
606
+ "Chat_Template": "Yes"
607
  },
608
  "hf_url": "https://huggingface.co/prithivMLmods/Sombrero-Opus-14B-Elite5",
609
  "known_config": null
610
+ },
611
+ {
612
  "rank": 17,
613
  "name": "Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v8",
614
  "scores": {
615
+ "average": 42.26,
616
+ "IFEval": 73.84,
617
+ "BBH": 49.31,
618
+ "MATH": 41.69,
619
+ "GPQA": 18.23,
620
+ "MUSR": 21.96,
621
+ "MMLU_PRO": 48.5,
622
+ "Architecture": "Qwen2ForCausalLM",
623
+ "Parameters": "14.766B",
624
+ "Chat_Template": "No"
625
  },
626
  "hf_url": "https://huggingface.co/Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v8",
627
  "known_config": null
628
+ },
629
+ {
630
  "rank": 18,
631
  "name": "prithivMLmods/Equuleus-Opus-14B-Exp",
632
  "scores": {
633
+ "average": 42.2,
634
+ "IFEval": 70.01,
635
+ "BBH": 48.62,
636
+ "MATH": 45.85,
637
+ "GPQA": 18.23,
638
+ "MUSR": 21.9,
639
+ "MMLU_PRO": 48.6,
640
+ "Architecture": "Qwen2ForCausalLM",
641
+ "Parameters": "14.766B",
642
+ "Chat_Template": "No"
643
  },
644
  "hf_url": "https://huggingface.co/prithivMLmods/Equuleus-Opus-14B-Exp",
645
  "known_config": null
646
+ },
647
+ {
648
  "rank": 19,
649
  "name": "rombodawg/Rombos-LLM-V2.6-Qwen-14b",
650
  "scores": {
651
+ "average": 42.2,
652
+ "IFEval": 84.32,
653
+ "BBH": 49.28,
654
+ "MATH": 52.11,
655
+ "GPQA": 11.19,
656
+ "MUSR": 12.29,
657
+ "MMLU_PRO": 44.01,
658
+ "Architecture": "Qwen2ForCausalLM",
659
+ "Parameters": "14.77B",
660
+ "Chat_Template": "Yes"
661
  },
662
  "hf_url": "https://huggingface.co/rombodawg/Rombos-LLM-V2.6-Qwen-14b",
663
  "known_config": null
664
+ },
665
+ {
666
  "rank": 20,
667
  "name": "nbeerbower/EVA-abliterated-TIES-Qwen2.5-14B",
668
  "scores": {
669
+ "average": 42.16,
670
+ "IFEval": 78.36,
671
+ "BBH": 48.52,
672
+ "MATH": 50.45,
673
+ "GPQA": 13.98,
674
+ "MUSR": 14.88,
675
+ "MMLU_PRO": 46.79,
676
+ "Architecture": "Qwen2ForCausalLM",
677
+ "Parameters": "14.77B",
678
+ "Chat_Template": "Yes"
679
  },
680
  "hf_url": "https://huggingface.co/nbeerbower/EVA-abliterated-TIES-Qwen2.5-14B",
681
  "known_config": null
682
+ },
683
+ {
684
  "rank": 21,
685
  "name": "sometimesanotion/LamarckInfusion-14B-v1",
686
  "scores": {
687
+ "average": 42.06,
688
+ "IFEval": 71.98,
689
+ "BBH": 50.35,
690
+ "MATH": 41.69,
691
+ "GPQA": 18.79,
692
+ "MUSR": 20.9,
693
+ "MMLU_PRO": 48.63,
694
+ "Architecture": "Qwen2ForCausalLM",
695
+ "Parameters": "14.766B",
696
+ "Chat_Template": "No"
697
  },
698
  "hf_url": "https://huggingface.co/sometimesanotion/LamarckInfusion-14B-v1",
699
  "known_config": null
700
+ },
701
+ {
702
  "rank": 22,
703
  "name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
704
  "scores": {
705
+ "average": 41.99,
706
+ "IFEval": 82.4,
707
+ "BBH": 50.53,
708
+ "MATH": 46.53,
709
+ "GPQA": 12.53,
710
+ "MUSR": 13.88,
711
+ "MMLU_PRO": 46.07,
712
+ "Architecture": "Qwen2ForCausalLM",
713
+ "Parameters": "14.766B",
714
+ "Chat_Template": "Yes"
715
  },
716
  "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
717
  "known_config": null
718
+ },
719
+ {
720
  "rank": 23,
721
  "name": "Quazim0t0/Fugazi14b",
722
  "scores": {
723
+ "average": 41.94,
724
+ "IFEval": 69.98,
725
+ "BBH": 56.09,
726
+ "MATH": 46.53,
727
+ "GPQA": 13.53,
728
+ "MUSR": 16.42,
729
+ "MMLU_PRO": 49.08,
730
+ "Architecture": "LlamaForCausalLM",
731
+ "Parameters": "14.66B",
732
+ "Chat_Template": "Yes"
733
  },
734
  "hf_url": "https://huggingface.co/Quazim0t0/Fugazi14b",
735
  "known_config": null
736
+ },
737
+ {
738
  "rank": 24,
739
  "name": "1024m/QWEN-14B-B100",
740
  "scores": {
741
+ "average": 41.92,
742
+ "IFEval": 77.62,
743
+ "BBH": 49.78,
744
+ "MATH": 54.38,
745
+ "GPQA": 13.42,
746
+ "MUSR": 9.88,
747
+ "MMLU_PRO": 46.43,
748
+ "Architecture": "Qwen2ForCausalLM",
749
+ "Parameters": "14.77B",
750
+ "Chat_Template": "Yes"
751
  },
752
  "hf_url": "https://huggingface.co/1024m/QWEN-14B-B100",
753
  "known_config": null
754
+ },
755
+ {
756
  "rank": 25,
757
  "name": "Sakalti/Saka-14B",
758
  "scores": {
759
+ "average": 41.91,
760
+ "IFEval": 71.74,
761
+ "BBH": 49.72,
762
+ "MATH": 40.94,
763
+ "GPQA": 19.46,
764
+ "MUSR": 20.74,
765
+ "MMLU_PRO": 48.84,
766
+ "Architecture": "Qwen2ForCausalLM",
767
+ "Parameters": "14.766B",
768
+ "Chat_Template": "No"
769
  },
770
  "hf_url": "https://huggingface.co/Sakalti/Saka-14B",
771
  "known_config": null
772
+ },
773
+ {
774
  "rank": 26,
775
  "name": "prithivMLmods/Sombrero-Opus-14B-Elite6",
776
  "scores": {
777
+ "average": 41.88,
778
+ "IFEval": 72.26,
779
+ "BBH": 49.6,
780
+ "MATH": 40.79,
781
+ "GPQA": 19.13,
782
+ "MUSR": 20.74,
783
+ "MMLU_PRO": 48.78,
784
+ "Architecture": "Qwen2ForCausalLM",
785
+ "Parameters": "14.766B",
786
+ "Chat_Template": "No"
787
  },
788
  "hf_url": "https://huggingface.co/prithivMLmods/Sombrero-Opus-14B-Elite6",
789
  "known_config": null
790
+ },
791
+ {
792
  "rank": 27,
793
  "name": "YOYO-AI/Qwen2.5-14B-YOYO-latest-V2",
794
  "scores": {
795
+ "average": 41.85,
796
+ "IFEval": 77.71,
797
+ "BBH": 47.3,
798
+ "MATH": 51.59,
799
+ "GPQA": 13.87,
800
+ "MUSR": 13.68,
801
+ "MMLU_PRO": 46.93,
802
+ "Architecture": "Qwen2ForCausalLM",
803
+ "Parameters": "14.766B",
804
+ "Chat_Template": "Yes"
805
  },
806
  "hf_url": "https://huggingface.co/YOYO-AI/Qwen2.5-14B-YOYO-latest-V2",
807
  "known_config": null
808
+ },
809
+ {
810
  "rank": 28,
811
  "name": "Tsunami-th/Tsunami-1.0-14B-Instruct",
812
  "scores": {
813
+ "average": 41.84,
814
+ "IFEval": 78.29,
815
+ "BBH": 49.15,
816
+ "MATH": 45.85,
817
+ "GPQA": 14.21,
818
+ "MUSR": 16.34,
819
+ "MMLU_PRO": 47.21,
820
+ "Architecture": "Qwen2ForCausalLM",
821
+ "Parameters": "14.77B",
822
+ "Chat_Template": "Yes"
823
  },
824
  "hf_url": "https://huggingface.co/Tsunami-th/Tsunami-1.0-14B-Instruct",
825
  "known_config": null
826
+ },
827
+ {
828
  "rank": 29,
829
  "name": "sthenno/tempesthenno-kto-0205-ckpt80",
830
  "scores": {
831
+ "average": 41.79,
832
+ "IFEval": 80.54,
833
+ "BBH": 50.64,
834
+ "MATH": 45.92,
835
+ "GPQA": 13.09,
836
+ "MUSR": 12.93,
837
+ "MMLU_PRO": 47.62,
838
+ "Architecture": "Qwen2ForCausalLM",
839
+ "Parameters": "14.766B",
840
+ "Chat_Template": "No"
841
  },
842
  "hf_url": "https://huggingface.co/sthenno/tempesthenno-kto-0205-ckpt80",
843
  "known_config": null
844
+ },
845
+ {
846
  "rank": 30,
847
  "name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
848
  "scores": {
849
+ "average": 41.79,
850
+ "IFEval": 72.11,
851
+ "BBH": 49.85,
852
+ "MATH": 40.26,
853
+ "GPQA": 18.57,
854
+ "MUSR": 21.07,
855
+ "MMLU_PRO": 48.89,
856
+ "Architecture": "Qwen2ForCausalLM",
857
+ "Parameters": "14.766B",
858
+ "Chat_Template": "No"
859
  },
860
  "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
861
  "known_config": null
862
+ },
863
+ {
864
  "rank": 31,
865
  "name": "prithivMLmods/Porpoise-Opus-14B-Exp",
866
  "scores": {
867
+ "average": 41.77,
868
+ "IFEval": 70.98,
869
+ "BBH": 49.95,
870
+ "MATH": 40.41,
871
+ "GPQA": 19.13,
872
+ "MUSR": 21.3,
873
+ "MMLU_PRO": 48.85,
874
+ "Architecture": "Qwen2ForCausalLM",
875
+ "Parameters": "14.766B",
876
+ "Chat_Template": "No"
877
  },
878
  "hf_url": "https://huggingface.co/prithivMLmods/Porpoise-Opus-14B-Exp",
879
  "known_config": null
880
+ },
881
+ {
882
  "rank": 32,
883
  "name": "CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
884
  "scores": {
885
+ "average": 41.77,
886
+ "IFEval": 82.4,
887
+ "BBH": 48.2,
888
+ "MATH": 53.17,
889
+ "GPQA": 9.96,
890
+ "MUSR": 12.65,
891
+ "MMLU_PRO": 44.21,
892
+ "Architecture": "Qwen2ForCausalLM",
893
+ "Parameters": "14.77B",
894
+ "Chat_Template": "Yes"
895
  },
896
  "hf_url": "https://huggingface.co/CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
897
  "known_config": null
898
+ },
899
+ {
900
+ "rank": 33,
901
+ "name": "suayptalha/Lamarckvergence-14B",
902
+ "scores": {
903
+ "average": 43.32,
904
+ "IFEval": 76.56,
905
+ "BBH": 50.33,
906
+ "MATH": 54,
907
+ "GPQA": 15.1,
908
+ "MUSR": 16.34,
909
+ "MMLU_PRO": 47.59,
910
+ "Architecture": "Qwen2ForCausalLM",
911
+ "Parameters": "14.766B",
912
+ "Chat_Template": "Yes"
913
+ },
914
+ "hf_url": "https://huggingface.co/suayptalha/Lamarckvergence-14B",
915
+ "known_config": null
916
+ },
917
+ {
918
+ "rank": 34,
919
+ "name": "sthenno/tempesthenno-ppo-ckpt40",
920
+ "scores": {
921
+ "average": 42.74,
922
+ "IFEval": 79.23,
923
+ "BBH": 50.57,
924
+ "MATH": 47.36,
925
+ "GPQA": 17,
926
+ "MUSR": 14.56,
927
+ "MMLU_PRO": 47.69,
928
+ "Architecture": "Qwen2ForCausalLM",
929
+ "Parameters": "14.766B",
930
+ "Chat_Template": "Yes"
931
+ },
932
+ "hf_url": "https://huggingface.co/sthenno/tempesthenno-ppo-ckpt40",
933
+ "known_config": null
934
+ },
935
+ {
936
+ "rank": 35,
937
+ "name": "tanliboy/lambda-qwen2.5-14b-dpo-test",
938
+ "scores": {
939
+ "average": 42.62,
940
+ "IFEval": 82.31,
941
+ "BBH": 48.45,
942
+ "MATH": 54.61,
943
+ "GPQA": 14.99,
944
+ "MUSR": 12.59,
945
+ "MMLU_PRO": 42.75,
946
+ "Architecture": "Qwen2ForCausalLM",
947
+ "Parameters": "14.77B",
948
+ "Chat_Template": "Yes"
949
+ },
950
+ "hf_url": "https://huggingface.co/tanliboy/lambda-qwen2.5-14b-dpo-test",
951
+ "known_config": null
952
+ },
953
+ {
954
+ "rank": 36,
955
+ "name": "sthenno/tempesthenno-nuslerp-001",
956
+ "scores": {
957
+ "average": 42.59,
958
+ "IFEval": 79.26,
959
+ "BBH": 51.04,
960
+ "MATH": 47.58,
961
+ "GPQA": 16.44,
962
+ "MUSR": 13.88,
963
+ "MMLU_PRO": 47.3,
964
+ "Architecture": "Qwen2ForCausalLM",
965
+ "Parameters": "14.766B",
966
+ "Chat_Template": "Yes"
967
+ },
968
+ "hf_url": "https://huggingface.co/sthenno/tempesthenno-nuslerp-001",
969
+ "known_config": null
970
+ },
971
+ {
972
+ "rank": 37,
973
+ "name": "Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
974
+ "scores": {
975
+ "average": 42.55,
976
+ "IFEval": 82.92,
977
+ "BBH": 48.05,
978
+ "MATH": 54.23,
979
+ "GPQA": 12.3,
980
+ "MUSR": 13.15,
981
+ "MMLU_PRO": 44.65,
982
+ "Architecture": "Qwen2ForCausalLM",
983
+ "Parameters": "14.77B",
984
+ "Chat_Template": "Yes"
985
+ },
986
+ "hf_url": "https://huggingface.co/Goekdeniz-Guelmez/Josiefied-Qwen2.5-14B-Instruct-abliterated-v4",
987
+ "known_config": null
988
+ },
989
+ {
990
+ "rank": 38,
991
+ "name": "djuna/Q2.5-Veltha-14B",
992
+ "scores": {
993
+ "average": 42.52,
994
+ "IFEval": 82.92,
995
+ "BBH": 49.75,
996
+ "MATH": 47.89,
997
+ "GPQA": 14.54,
998
+ "MUSR": 12.26,
999
+ "MMLU_PRO": 47.76,
1000
+ "Architecture": "Qwen2ForCausalLM",
1001
+ "Parameters": "14.766B",
1002
+ "Chat_Template": "Yes"
1003
+ },
1004
+ "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B",
1005
+ "known_config": null
1006
+ },
1007
+ {
1008
+ "rank": 39,
1009
+ "name": "arcee-ai/Virtuoso-Small-v2",
1010
+ "scores": {
1011
+ "average": 42.48,
1012
+ "IFEval": 82.73,
1013
+ "BBH": 50.95,
1014
+ "MATH": 46.6,
1015
+ "GPQA": 13.76,
1016
+ "MUSR": 14.28,
1017
+ "MMLU_PRO": 46.53,
1018
+ "Architecture": "Qwen2ForCausalLM",
1019
+ "Parameters": "14.766B",
1020
+ "Chat_Template": "Yes"
1021
+ },
1022
+ "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small-v2",
1023
+ "known_config": null
1024
+ },
1025
+ {
1026
+ "rank": 40,
1027
+ "name": "jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
1028
+ "scores": {
1029
+ "average": 42.42,
1030
+ "IFEval": 70.4,
1031
+ "BBH": 54.85,
1032
+ "MATH": 56.19,
1033
+ "GPQA": 12.19,
1034
+ "MUSR": 12.29,
1035
+ "MMLU_PRO": 48.6,
1036
+ "Architecture": "Phi3ForCausalLM",
1037
+ "Parameters": "14.66B",
1038
+ "Chat_Template": "Yes"
1039
+ },
1040
+ "hf_url": "https://huggingface.co/jpacifico/Chocolatine-14B-Instruct-DPO-v1.3",
1041
+ "known_config": null
1042
+ },
1043
+ {
1044
+ "rank": 41,
1045
+ "name": "sthenno-com/miscii-14b-1028",
1046
+ "scores": {
1047
+ "average": 42.38,
1048
+ "IFEval": 82.37,
1049
+ "BBH": 49.26,
1050
+ "MATH": 50.3,
1051
+ "GPQA": 14.21,
1052
+ "MUSR": 12,
1053
+ "MMLU_PRO": 46.14,
1054
+ "Architecture": "Qwen2ForCausalLM",
1055
+ "Parameters": "14.77B",
1056
+ "Chat_Template": "Yes"
1057
+ },
1058
+ "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1028",
1059
+ "known_config": null
1060
+ },
1061
+ {
1062
+ "rank": 42,
1063
+ "name": "sthenno-com/miscii-14b-1225",
1064
+ "scores": {
1065
+ "average": 42.35,
1066
+ "IFEval": 78.78,
1067
+ "BBH": 50.91,
1068
+ "MATH": 45.17,
1069
+ "GPQA": 17,
1070
+ "MUSR": 14.77,
1071
+ "MMLU_PRO": 47.46,
1072
+ "Architecture": "Qwen2ForCausalLM",
1073
+ "Parameters": "14.766B",
1074
+ "Chat_Template": "Yes"
1075
+ },
1076
+ "hf_url": "https://huggingface.co/sthenno-com/miscii-14b-1225",
1077
+ "known_config": null
1078
+ },
1079
+ {
1080
+ "rank": 43,
1081
+ "name": "tensopolis/virtuoso-small-v2-tensopolis-v1",
1082
+ "scores": {
1083
+ "average": 42.34,
1084
+ "IFEval": 83.4,
1085
+ "BBH": 50.99,
1086
+ "MATH": 46.6,
1087
+ "GPQA": 12.98,
1088
+ "MUSR": 13.38,
1089
+ "MMLU_PRO": 46.67,
1090
+ "Architecture": "Qwen2ForCausalLM",
1091
+ "Parameters": "14.766B",
1092
+ "Chat_Template": "Yes"
1093
+ },
1094
+ "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-v2-tensopolis-v1",
1095
+ "known_config": null
1096
+ },
1097
+ {
1098
+ "rank": 44,
1099
+ "name": "rombodawg/Rombos-LLM-V2.6-Qwen-14b",
1100
+ "scores": {
1101
+ "average": 42.2,
1102
+ "IFEval": 84.32,
1103
+ "BBH": 49.28,
1104
+ "MATH": 52.11,
1105
+ "GPQA": 11.19,
1106
+ "MUSR": 12.29,
1107
+ "MMLU_PRO": 44.01,
1108
+ "Architecture": "Qwen2ForCausalLM",
1109
+ "Parameters": "14.77B",
1110
+ "Chat_Template": "Yes"
1111
+ },
1112
+ "hf_url": "https://huggingface.co/rombodawg/Rombos-LLM-V2.6-Qwen-14b",
1113
+ "known_config": null
1114
+ },
1115
+ {
1116
+ "rank": 45,
1117
+ "name": "1024m/QWEN-14B-B100",
1118
+ "scores": {
1119
+ "average": 41.92,
1120
+ "IFEval": 77.62,
1121
+ "BBH": 49.78,
1122
+ "MATH": 54.38,
1123
+ "GPQA": 13.42,
1124
+ "MUSR": 9.88,
1125
+ "MMLU_PRO": 46.43,
1126
+ "Architecture": "Qwen2ForCausalLM",
1127
+ "Parameters": "14.77B",
1128
+ "Chat_Template": "Yes"
1129
+ },
1130
+ "hf_url": "https://huggingface.co/1024m/QWEN-14B-B100",
1131
+ "known_config": null
1132
+ },
1133
+ {
1134
+ "rank": 46,
1135
+ "name": "Sakalti/Saka-14B",
1136
+ "scores": {
1137
+ "average": 41.91,
1138
+ "IFEval": 71.74,
1139
+ "BBH": 49.72,
1140
+ "MATH": 40.94,
1141
+ "GPQA": 19.46,
1142
+ "MUSR": 20.74,
1143
+ "MMLU_PRO": 48.84,
1144
+ "Architecture": "Qwen2ForCausalLM",
1145
+ "Parameters": "14.766B",
1146
+ "Chat_Template": "No"
1147
+ },
1148
+ "hf_url": "https://huggingface.co/Sakalti/Saka-14B",
1149
+ "known_config": null
1150
+ },
1151
+ {
1152
+ "rank": 47,
1153
+ "name": "Tsunami-th/Tsunami-1.0-14B-Instruct",
1154
+ "scores": {
1155
+ "average": 41.84,
1156
+ "IFEval": 78.29,
1157
+ "BBH": 49.15,
1158
+ "MATH": 45.85,
1159
+ "GPQA": 14.21,
1160
+ "MUSR": 16.34,
1161
+ "MMLU_PRO": 47.21,
1162
+ "Architecture": "Qwen2ForCausalLM",
1163
+ "Parameters": "14.77B",
1164
+ "Chat_Template": "Yes"
1165
+ },
1166
+ "hf_url": "https://huggingface.co/Tsunami-th/Tsunami-1.0-14B-Instruct",
1167
+ "known_config": null
1168
+ },
1169
+ {
1170
+ "rank": 48,
1171
+ "name": "sthenno/tempesthenno-kto-0205-ckpt80",
1172
+ "scores": {
1173
+ "average": 41.79,
1174
+ "IFEval": 80.54,
1175
+ "BBH": 50.64,
1176
+ "MATH": 45.92,
1177
+ "GPQA": 13.09,
1178
+ "MUSR": 12.93,
1179
+ "MMLU_PRO": 47.62,
1180
+ "Architecture": "Qwen2ForCausalLM",
1181
+ "Parameters": "14.766B",
1182
+ "Chat_Template": "No"
1183
+ },
1184
+ "hf_url": "https://huggingface.co/sthenno/tempesthenno-kto-0205-ckpt80",
1185
+ "known_config": null
1186
+ },
1187
+ {
1188
+ "rank": 49,
1189
+ "name": "sometimesanotion/Lamarck-14B-v0.7-rc4",
1190
+ "scores": {
1191
+ "average": 41.79,
1192
+ "IFEval": 72.11,
1193
+ "BBH": 49.85,
1194
+ "MATH": 40.26,
1195
+ "GPQA": 18.57,
1196
+ "MUSR": 21.07,
1197
+ "MMLU_PRO": 48.89,
1198
+ "Architecture": "Qwen2ForCausalLM",
1199
+ "Parameters": "14.766B",
1200
+ "Chat_Template": "No"
1201
+ },
1202
+ "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.7-rc4",
1203
+ "known_config": null
1204
+ },
1205
+ {
1206
+ "rank": 50,
1207
+ "name": "CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
1208
+ "scores": {
1209
+ "average": 41.77,
1210
+ "IFEval": 82.4,
1211
+ "BBH": 48.2,
1212
+ "MATH": 53.17,
1213
+ "GPQA": 9.96,
1214
+ "MUSR": 12.65,
1215
+ "MMLU_PRO": 44.21,
1216
+ "Architecture": "Qwen2ForCausalLM",
1217
+ "Parameters": "14.77B",
1218
+ "Chat_Template": "Yes"
1219
+ },
1220
+ "hf_url": "https://huggingface.co/CombinHorizon/Josiefied-abliteratedV4-Qwen2.5-14B-Inst-BaseMerge-TIES",
1221
+ "known_config": null
1222
+ },
1223
+ {
1224
+ "rank": 51,
1225
+ "name": "suayptalha/Luminis-phi-4",
1226
+ "scores": {
1227
+ "average": 41.76,
1228
+ "IFEval": 69,
1229
+ "BBH": 55.8,
1230
+ "MATH": 46.37,
1231
+ "GPQA": 13.53,
1232
+ "MUSR": 16.68,
1233
+ "MMLU_PRO": 49.15,
1234
+ "Architecture": "LlamaForCausalLM",
1235
+ "Parameters": "14.66B",
1236
+ "Chat_Template": "Yes"
1237
+ },
1238
+ "hf_url": "https://huggingface.co/suayptalha/Luminis-phi-4",
1239
+ "known_config": null
1240
+ },
1241
+ {
1242
+ "rank": 52,
1243
+ "name": "huihui-ai/Qwen2.5-14B-Instruct-abliterated-v2",
1244
+ "scores": {
1245
+ "average": 41.75,
1246
+ "IFEval": 83.28,
1247
+ "BBH": 47.41,
1248
+ "MATH": 53.02,
1249
+ "GPQA": 11.19,
1250
+ "MUSR": 11.58,
1251
+ "MMLU_PRO": 44.02,
1252
+ "Architecture": "Qwen2ForCausalLM",
1253
+ "Parameters": "14.77B",
1254
+ "Chat_Template": "Yes"
1255
+ },
1256
+ "hf_url": "https://huggingface.co/huihui-ai/Qwen2.5-14B-Instruct-abliterated-v2",
1257
+ "known_config": null
1258
+ },
1259
+ {
1260
+ "rank": 53,
1261
+ "name": "djuna/Q2.5-Veltha-14B-0.5",
1262
+ "scores": {
1263
+ "average": 41.61,
1264
+ "IFEval": 77.96,
1265
+ "BBH": 50.32,
1266
+ "MATH": 43.73,
1267
+ "GPQA": 15.77,
1268
+ "MUSR": 14.17,
1269
+ "MMLU_PRO": 47.72,
1270
+ "Architecture": "Qwen2ForCausalLM",
1271
+ "Parameters": "14.766B",
1272
+ "Chat_Template": "Yes"
1273
+ },
1274
+ "hf_url": "https://huggingface.co/djuna/Q2.5-Veltha-14B-0.5",
1275
+ "known_config": null
1276
+ },
1277
+ {
1278
+ "rank": 54,
1279
+ "name": "Qwen/Qwen2.5-14B-Instruct-1M",
1280
+ "scores": {
1281
+ "average": 41.56,
1282
+ "IFEval": 84.14,
1283
+ "BBH": 45.66,
1284
+ "MATH": 53.02,
1285
+ "GPQA": 12.42,
1286
+ "MUSR": 11.35,
1287
+ "MMLU_PRO": 42.77,
1288
+ "Architecture": "Qwen2ForCausalLM",
1289
+ "Parameters": "14.77B",
1290
+ "Chat_Template": "Yes"
1291
+ },
1292
+ "hf_url": "https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M",
1293
+ "known_config": null
1294
+ },
1295
+ {
1296
+ "rank": 55,
1297
+ "name": "notbdq/Qwen2.5-14B-Instruct-1M-GRPO-Reasoning",
1298
+ "scores": {
1299
+ "average": 41.56,
1300
+ "IFEval": 84.14,
1301
+ "BBH": 45.66,
1302
+ "MATH": 53.02,
1303
+ "GPQA": 12.42,
1304
+ "MUSR": 11.35,
1305
+ "MMLU_PRO": 42.77,
1306
+ "Architecture": "Qwen2ForCausalLM",
1307
+ "Parameters": "14.77B",
1308
+ "Chat_Template": "Yes"
1309
+ },
1310
+ "hf_url": "https://huggingface.co/notbdq/Qwen2.5-14B-Instruct-1M-GRPO-Reasoning",
1311
+ "known_config": null
1312
+ },
1313
+ {
1314
+ "rank": 56,
1315
+ "name": "sometimesanotion/Qwenvergence-14B-v11",
1316
+ "scores": {
1317
+ "average": 41.52,
1318
+ "IFEval": 71.92,
1319
+ "BBH": 47.55,
1320
+ "MATH": 46.45,
1321
+ "GPQA": 16.33,
1322
+ "MUSR": 18.76,
1323
+ "MMLU_PRO": 48.08,
1324
+ "Architecture": "Qwen2ForCausalLM",
1325
+ "Parameters": "14.766B",
1326
+ "Chat_Template": "No"
1327
+ },
1328
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v11",
1329
+ "known_config": null
1330
+ },
1331
+ {
1332
+ "rank": 57,
1333
+ "name": "sometimesanotion/Qwenvergence-14B-v10",
1334
+ "scores": {
1335
+ "average": 41.48,
1336
+ "IFEval": 67.57,
1337
+ "BBH": 46.75,
1338
+ "MATH": 47.89,
1339
+ "GPQA": 17.23,
1340
+ "MUSR": 22.33,
1341
+ "MMLU_PRO": 47.1,
1342
+ "Architecture": "Qwen2ForCausalLM",
1343
+ "Parameters": "14.766B",
1344
+ "Chat_Template": "No"
1345
+ },
1346
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v10",
1347
+ "known_config": null
1348
+ },
1349
+ {
1350
+ "rank": 58,
1351
+ "name": "CombinHorizon/huihui-ai-abliteratedV2-Qwen2.5-14B-Inst-BaseMerge-TIES",
1352
+ "scores": {
1353
+ "average": 41.47,
1354
+ "IFEval": 81.76,
1355
+ "BBH": 47.77,
1356
+ "MATH": 54.76,
1357
+ "GPQA": 8.61,
1358
+ "MUSR": 12.45,
1359
+ "MMLU_PRO": 43.45,
1360
+ "Architecture": "Qwen2ForCausalLM",
1361
+ "Parameters": "14.77B",
1362
+ "Chat_Template": "Yes"
1363
+ },
1364
+ "hf_url": "https://huggingface.co/CombinHorizon/huihui-ai-abliteratedV2-Qwen2.5-14B-Inst-BaseMerge-TIES",
1365
+ "known_config": null
1366
+ },
1367
+ {
1368
+ "rank": 59,
1369
+ "name": "RDson/WomboCombo-R1-Coder-14B-Preview",
1370
+ "scores": {
1371
+ "average": 41.46,
1372
+ "IFEval": 62.86,
1373
+ "BBH": 48.15,
1374
+ "MATH": 59.89,
1375
+ "GPQA": 9.51,
1376
+ "MUSR": 22.01,
1377
+ "MMLU_PRO": 46.31,
1378
+ "Architecture": "Qwen2ForCausalLM",
1379
+ "Parameters": "14.77B",
1380
+ "Chat_Template": "Yes"
1381
+ },
1382
+ "hf_url": "https://huggingface.co/RDson/WomboCombo-R1-Coder-14B-Preview",
1383
+ "known_config": null
1384
+ },
1385
+ {
1386
+ "rank": 60,
1387
+ "name": "jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
1388
+ "scores": {
1389
+ "average": 41.43,
1390
+ "IFEval": 73.23,
1391
+ "BBH": 49.57,
1392
+ "MATH": 41.09,
1393
+ "GPQA": 17.23,
1394
+ "MUSR": 19.3,
1395
+ "MMLU_PRO": 48.19,
1396
+ "Architecture": "Qwen2ForCausalLM",
1397
+ "Parameters": "14.766B",
1398
+ "Chat_Template": "No"
1399
+ },
1400
+ "hf_url": "https://huggingface.co/jpacifico/Chocolatine-2-14B-Instruct-v2.0b3",
1401
+ "known_config": null
1402
+ },
1403
+ {
1404
+ "rank": 61,
1405
+ "name": "Quazim0t0/Nova-14b-sce",
1406
+ "scores": {
1407
+ "average": 41.41,
1408
+ "IFEval": 70.22,
1409
+ "BBH": 56.03,
1410
+ "MATH": 41.62,
1411
+ "GPQA": 15.1,
1412
+ "MUSR": 16.43,
1413
+ "MMLU_PRO": 49.03,
1414
+ "Architecture": "LlamaForCausalLM",
1415
+ "Parameters": "14.66B",
1416
+ "Chat_Template": "Yes"
1417
+ },
1418
+ "hf_url": "https://huggingface.co/Quazim0t0/Nova-14b-sce",
1419
+ "known_config": null
1420
+ },
1421
+ {
1422
+ "rank": 62,
1423
+ "name": "v000000/Qwen2.5-14B-Gutenberg-Instruct-Slerpeno",
1424
+ "scores": {
1425
+ "average": 41.36,
1426
+ "IFEval": 81.97,
1427
+ "BBH": 48.45,
1428
+ "MATH": 53.25,
1429
+ "GPQA": 10.85,
1430
+ "MUSR": 10.05,
1431
+ "MMLU_PRO": 43.59,
1432
+ "Architecture": "Qwen2ForCausalLM",
1433
+ "Parameters": "14.77B",
1434
+ "Chat_Template": "Yes"
1435
+ },
1436
+ "hf_url": "https://huggingface.co/v000000/Qwen2.5-14B-Gutenberg-Instruct-Slerpeno",
1437
+ "known_config": null
1438
+ },
1439
+ {
1440
+ "rank": 63,
1441
+ "name": "Quazim0t0/NovaScotia-14b-stock",
1442
+ "scores": {
1443
+ "average": 41.35,
1444
+ "IFEval": 67.87,
1445
+ "BBH": 56.03,
1446
+ "MATH": 46.3,
1447
+ "GPQA": 13.2,
1448
+ "MUSR": 15.7,
1449
+ "MMLU_PRO": 48.99,
1450
+ "Architecture": "LlamaForCausalLM",
1451
+ "Parameters": "14.66B",
1452
+ "Chat_Template": "Yes"
1453
+ },
1454
+ "hf_url": "https://huggingface.co/Quazim0t0/NovaScotia-14b-stock",
1455
+ "known_config": null
1456
+ },
1457
+ {
1458
+ "rank": 64,
1459
+ "name": "Quazim0t0/ODB-14b-sce",
1460
+ "scores": {
1461
+ "average": 41.34,
1462
+ "IFEval": 70.16,
1463
+ "BBH": 56.19,
1464
+ "MATH": 41.16,
1465
+ "GPQA": 14.99,
1466
+ "MUSR": 16.5,
1467
+ "MMLU_PRO": 49.02,
1468
+ "Architecture": "LlamaForCausalLM",
1469
+ "Parameters": "14.66B",
1470
+ "Chat_Template": "Yes"
1471
+ },
1472
+ "hf_url": "https://huggingface.co/Quazim0t0/ODB-14b-sce",
1473
+ "known_config": null
1474
+ },
1475
+ {
1476
+ "rank": 65,
1477
+ "name": "LightningRodLabs/Flashlight-v1.1",
1478
+ "scores": {
1479
+ "average": 40.99,
1480
+ "IFEval": 67.21,
1481
+ "BBH": 55.43,
1482
+ "MATH": 53.25,
1483
+ "GPQA": 11.97,
1484
+ "MUSR": 9,
1485
+ "MMLU_PRO": 49.06,
1486
+ "Architecture": "Phi3ForCausalLM",
1487
+ "Parameters": "14.66B",
1488
+ "Chat_Template": "Yes"
1489
+ },
1490
+ "hf_url": "https://huggingface.co/LightningRodLabs/Flashlight-v1.1",
1491
+ "known_config": null
1492
+ },
1493
+ {
1494
+ "rank": 66,
1495
+ "name": "Quazim0t0/Mithril-14B-sce",
1496
+ "scores": {
1497
+ "average": 40.98,
1498
+ "IFEval": 69.58,
1499
+ "BBH": 55.93,
1500
+ "MATH": 38.22,
1501
+ "GPQA": 15.88,
1502
+ "MUSR": 17.37,
1503
+ "MMLU_PRO": 48.92,
1504
+ "Architecture": "LlamaForCausalLM",
1505
+ "Parameters": "14.66B",
1506
+ "Chat_Template": "Yes"
1507
+ },
1508
+ "hf_url": "https://huggingface.co/Quazim0t0/Mithril-14B-sce",
1509
+ "known_config": null
1510
+ },
1511
+ {
1512
+ "rank": 67,
1513
+ "name": "Sakalti/ultiima-14B-v0.2",
1514
+ "scores": {
1515
+ "average": 40.96,
1516
+ "IFEval": 70.7,
1517
+ "BBH": 49.51,
1518
+ "MATH": 39.95,
1519
+ "GPQA": 17.67,
1520
+ "MUSR": 19.19,
1521
+ "MMLU_PRO": 48.75,
1522
+ "Architecture": "Qwen2ForCausalLM",
1523
+ "Parameters": "14.766B",
1524
+ "Chat_Template": "No"
1525
+ },
1526
+ "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.2",
1527
+ "known_config": null
1528
+ },
1529
+ {
1530
+ "rank": 68,
1531
+ "name": "bunnycore/Phi-4-ReasoningRP",
1532
+ "scores": {
1533
+ "average": 40.95,
1534
+ "IFEval": 67.36,
1535
+ "BBH": 55.88,
1536
+ "MATH": 45.69,
1537
+ "GPQA": 12.53,
1538
+ "MUSR": 15.14,
1539
+ "MMLU_PRO": 49.12,
1540
+ "Architecture": "LlamaForCausalLM",
1541
+ "Parameters": "14.66B",
1542
+ "Chat_Template": "Yes"
1543
+ },
1544
+ "hf_url": "https://huggingface.co/bunnycore/Phi-4-ReasoningRP",
1545
+ "known_config": null
1546
+ },
1547
+ {
1548
+ "rank": 69,
1549
+ "name": "dwikitheduck/gen-inst-1",
1550
+ "scores": {
1551
+ "average": 40.88,
1552
+ "IFEval": 77.5,
1553
+ "BBH": 48.32,
1554
+ "MATH": 45.54,
1555
+ "GPQA": 16.22,
1556
+ "MUSR": 12.27,
1557
+ "MMLU_PRO": 45.43,
1558
+ "Architecture": "Qwen2ForCausalLM",
1559
+ "Parameters": "14.77B",
1560
+ "Chat_Template": "Yes"
1561
+ },
1562
+ "hf_url": "https://huggingface.co/dwikitheduck/gen-inst-1",
1563
+ "known_config": null
1564
+ },
1565
+ {
1566
+ "rank": 70,
1567
+ "name": "v000000/Qwen2.5-14B-Gutenberg-1e-Delta",
1568
+ "scores": {
1569
+ "average": 40.88,
1570
+ "IFEval": 80.45,
1571
+ "BBH": 48.62,
1572
+ "MATH": 52.64,
1573
+ "GPQA": 10.51,
1574
+ "MUSR": 9.38,
1575
+ "MMLU_PRO": 43.67,
1576
+ "Architecture": "Qwen2ForCausalLM",
1577
+ "Parameters": "14.77B",
1578
+ "Chat_Template": "Yes"
1579
+ },
1580
+ "hf_url": "https://huggingface.co/v000000/Qwen2.5-14B-Gutenberg-1e-Delta",
1581
+ "known_config": null
1582
+ },
1583
+ {
1584
+ "rank": 60,
1585
+ "name": "hotmailuser/QwenSlerp2-14B",
1586
+ "scores": {
1587
+ "average": 40.86,
1588
+ "IFEval": 70.37,
1589
+ "BBH": 49.68,
1590
+ "MATH": 39.65,
1591
+ "GPQA": 17.45,
1592
+ "MUSR": 19.35,
1593
+ "MMLU_PRO": 48.66,
1594
+ "Architecture": "Qwen2ForCausalLM",
1595
+ "Parameters": "14.766B",
1596
+ "Chat_Template": "No"
1597
+ },
1598
+ "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp2-14B",
1599
+ "known_config": null
1600
+ },
1601
+ {
1602
+ "rank": 71,
1603
+ "name": "Quazim0t0/Loke-14B-sce",
1604
+ "scores": {
1605
+ "average": 40.86,
1606
+ "IFEval": 68.48,
1607
+ "BBH": 55.83,
1608
+ "MATH": 39.05,
1609
+ "GPQA": 15.32,
1610
+ "MUSR": 17.56,
1611
+ "MMLU_PRO": 48.9,
1612
+ "Architecture": "LlamaForCausalLM",
1613
+ "Parameters": "14.66B",
1614
+ "Chat_Template": "Yes"
1615
+ },
1616
+ "hf_url": "https://huggingface.co/Quazim0t0/Loke-14B-sce",
1617
+ "known_config": null
1618
+ },
1619
+ {
1620
+ "rank": 72,
1621
+ "name": "Quazim0t0/mosaic-14b-sce",
1622
+ "scores": {
1623
+ "average": 40.83,
1624
+ "IFEval": 68.76,
1625
+ "BBH": 55.69,
1626
+ "MATH": 40.26,
1627
+ "GPQA": 14.99,
1628
+ "MUSR": 16.44,
1629
+ "MMLU_PRO": 48.85,
1630
+ "Architecture": "LlamaForCausalLM",
1631
+ "Parameters": "14.66B",
1632
+ "Chat_Template": "Yes"
1633
+ },
1634
+ "hf_url": "https://huggingface.co/Quazim0t0/mosaic-14b-sce",
1635
+ "known_config": null
1636
+ },
1637
+ {
1638
+ "rank": 73,
1639
+ "name": "bunnycore/Phi-4-Model-Stock",
1640
+ "scores": {
1641
+ "average": 40.79,
1642
+ "IFEval": 68.79,
1643
+ "BBH": 55.32,
1644
+ "MATH": 42.98,
1645
+ "GPQA": 13.98,
1646
+ "MUSR": 15.12,
1647
+ "MMLU_PRO": 48.54,
1648
+ "Architecture": "LlamaForCausalLM",
1649
+ "Parameters": "14.66B",
1650
+ "Chat_Template": "Yes"
1651
+ },
1652
+ "hf_url": "https://huggingface.co/bunnycore/Phi-4-Model-Stock",
1653
+ "known_config": null
1654
+ },
1655
+ {
1656
+ "rank": 74,
1657
+ "name": "unsloth/phi-4",
1658
+ "scores": {
1659
+ "average": 40.73,
1660
+ "IFEval": 68.82,
1661
+ "BBH": 55.25,
1662
+ "MATH": 50,
1663
+ "GPQA": 11.52,
1664
+ "MUSR": 10.13,
1665
+ "MMLU_PRO": 48.65,
1666
+ "Architecture": "LlamaForCausalLM",
1667
+ "Parameters": "14.66B",
1668
+ "Chat_Template": "Yes"
1669
+ },
1670
+ "hf_url": "https://huggingface.co/unsloth/phi-4",
1671
+ "known_config": null
1672
+ },
1673
+ {
1674
+ "rank": 75,
1675
+ "name": "pankajmathur/orca_mini_phi-4",
1676
+ "scores": {
1677
+ "average": 40.68,
1678
+ "IFEval": 77.81,
1679
+ "BBH": 54.63,
1680
+ "MATH": 29.53,
1681
+ "GPQA": 16.55,
1682
+ "MUSR": 18.25,
1683
+ "MMLU_PRO": 47.28,
1684
+ "Architecture": "LlamaForCausalLM",
1685
+ "Parameters": "14.66B",
1686
+ "Chat_Template": "Yes"
1687
+ },
1688
+ "hf_url": "https://huggingface.co/pankajmathur/orca_mini_phi-4",
1689
+ "known_config": null
1690
+ },
1691
+ {
1692
+ "rank": 76,
1693
+ "name": "pankajmathur/orca_mini_v9_2_14B",
1694
+ "scores": {
1695
+ "average": 40.68,
1696
+ "IFEval": 77.81,
1697
+ "BBH": 54.63,
1698
+ "MATH": 29.53,
1699
+ "GPQA": 16.55,
1700
+ "MUSR": 18.25,
1701
+ "MMLU_PRO": 47.28,
1702
+ "Architecture": "LlamaForCausalLM",
1703
+ "Parameters": "14.66B",
1704
+ "Chat_Template": "Yes"
1705
+ },
1706
+ "hf_url": "https://huggingface.co/pankajmathur/orca_mini_v9_2_14B",
1707
+ "known_config": null
1708
+ },
1709
+ {
1710
+ "rank": 77,
1711
+ "name": "sometimesanotion/Lamarck-14B-v0.6-model_stock",
1712
+ "scores": {
1713
+ "average": 40.68,
1714
+ "IFEval": 67.9,
1715
+ "BBH": 46.49,
1716
+ "MATH": 42.45,
1717
+ "GPQA": 17.9,
1718
+ "MUSR": 22.68,
1719
+ "MMLU_PRO": 46.64,
1720
+ "Architecture": "Qwen2ForCausalLM",
1721
+ "Parameters": "14B",
1722
+ "Chat_Template": "No"
1723
+ },
1724
+ "hf_url": "https://huggingface.co/sometimesanotion/Lamarck-14B-v0.6-model_stock",
1725
+ "known_config": null
1726
+ },
1727
+ {
1728
+ "rank": 78,
1729
+ "name": "sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock",
1730
+ "scores": {
1731
+ "average": 40.6,
1732
+ "IFEval": 68.6,
1733
+ "BBH": 46.37,
1734
+ "MATH": 40.94,
1735
+ "GPQA": 17.79,
1736
+ "MUSR": 23.35,
1737
+ "MMLU_PRO": 46.59,
1738
+ "Architecture": "Qwen2ForCausalLM",
1739
+ "Parameters": "14B",
1740
+ "Chat_Template": "No"
1741
+ },
1742
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock",
1743
+ "known_config": null
1744
+ },
1745
+ {
1746
+ "rank": 79,
1747
+ "name": "Quazim0t0/Oasis-14B-ties",
1748
+ "scores": {
1749
+ "average": 40.59,
1750
+ "IFEval": 69.37,
1751
+ "BBH": 55.75,
1752
+ "MATH": 37.54,
1753
+ "GPQA": 15.32,
1754
+ "MUSR": 16.63,
1755
+ "MMLU_PRO": 48.94,
1756
+ "Architecture": "LlamaForCausalLM",
1757
+ "Parameters": "14.66B",
1758
+ "Chat_Template": "Yes"
1759
+ },
1760
+ "hf_url": "https://huggingface.co/Quazim0t0/Oasis-14B-ties",
1761
+ "known_config": null
1762
+ },
1763
+ {
1764
+ "rank": 80,
1765
+ "name": "LightningRodLabs/Flashlight-v1.0",
1766
+ "scores": {
1767
+ "average": 40.57,
1768
+ "IFEval": 67.45,
1769
+ "BBH": 55.15,
1770
+ "MATH": 49.7,
1771
+ "GPQA": 12.3,
1772
+ "MUSR": 9.93,
1773
+ "MMLU_PRO": 48.91,
1774
+ "Architecture": "LlamaForCausalLM",
1775
+ "Parameters": "14.66B",
1776
+ "Chat_Template": "Yes"
1777
+ },
1778
+ "hf_url": "https://huggingface.co/LightningRodLabs/Flashlight-v1.0",
1779
+ "known_config": null
1780
+ },
1781
+ {
1782
+ "rank": 81,
1783
+ "name": "arcee-ai/Virtuoso-Small",
1784
+ "scores": {
1785
+ "average": 40.54,
1786
+ "IFEval": 79.35,
1787
+ "BBH": 50.4,
1788
+ "MATH": 40.94,
1789
+ "GPQA": 11.52,
1790
+ "MUSR": 14.44,
1791
+ "MMLU_PRO": 46.57,
1792
+ "Architecture": "Qwen2ForCausalLM",
1793
+ "Parameters": "14.77B",
1794
+ "Chat_Template": "Yes"
1795
+ },
1796
+ "hf_url": "https://huggingface.co/arcee-ai/Virtuoso-Small",
1797
+ "known_config": null
1798
+ },
1799
+ {
1800
+ "rank": 82,
1801
+ "name": "Quazim0t0/GuiltySpark-14B-ties",
1802
+ "scores": {
1803
+ "average": 40.52,
1804
+ "IFEval": 68.54,
1805
+ "BBH": 55.72,
1806
+ "MATH": 38.37,
1807
+ "GPQA": 15.32,
1808
+ "MUSR": 16.3,
1809
+ "MMLU_PRO": 48.89,
1810
+ "Architecture": "LlamaForCausalLM",
1811
+ "Parameters": "14.66B",
1812
+ "Chat_Template": "Yes"
1813
+ },
1814
+ "hf_url": "https://huggingface.co/Quazim0t0/GuiltySpark-14B-ties",
1815
+ "known_config": null
1816
+ },
1817
+ {
1818
+ "rank": 83,
1819
+ "name": "ozone-ai/0x-lite",
1820
+ "scores": {
1821
+ "average": 40.48,
1822
+ "IFEval": 77.4,
1823
+ "BBH": 47.53,
1824
+ "MATH": 50.45,
1825
+ "GPQA": 9.28,
1826
+ "MUSR": 11.76,
1827
+ "MMLU_PRO": 46.49,
1828
+ "Architecture": "Qwen2ForCausalLM",
1829
+ "Parameters": "14.77B",
1830
+ "Chat_Template": "Yes"
1831
+ },
1832
+ "hf_url": "https://huggingface.co/ozone-ai/0x-lite",
1833
+ "known_config": null
1834
+ },
1835
+ {
1836
+ "rank": 84,
1837
+ "name": "Quazim0t0/Casa-14b-sce",
1838
+ "scores": {
1839
+ "average": 40.41,
1840
+ "IFEval": 66.54,
1841
+ "BBH": 55.4,
1842
+ "MATH": 46.98,
1843
+ "GPQA": 11.07,
1844
+ "MUSR": 13.31,
1845
+ "MMLU_PRO": 49.17,
1846
+ "Architecture": "LlamaForCausalLM",
1847
+ "Parameters": "14.66B",
1848
+ "Chat_Template": "Yes"
1849
+ },
1850
+ "hf_url": "https://huggingface.co/Quazim0t0/Casa-14b-sce",
1851
+ "known_config": null
1852
+ },
1853
+ {
1854
+ "rank": 85,
1855
+ "name": "Sakalti/ultiima-14B-v0.3",
1856
+ "scores": {
1857
+ "average": 40.38,
1858
+ "IFEval": 70.4,
1859
+ "BBH": 48.45,
1860
+ "MATH": 39.65,
1861
+ "GPQA": 16.89,
1862
+ "MUSR": 18.73,
1863
+ "MMLU_PRO": 48.18,
1864
+ "Architecture": "Qwen2ForCausalLM",
1865
+ "Parameters": "14.766B",
1866
+ "Chat_Template": "No"
1867
+ },
1868
+ "hf_url": "https://huggingface.co/Sakalti/ultiima-14B-v0.3",
1869
+ "known_config": null
1870
+ },
1871
+ {
1872
+ "rank": 86,
1873
+ "name": "ehristoforu/fp4-14b-v1-fix",
1874
+ "scores": {
1875
+ "average": 40.37,
1876
+ "IFEval": 67.42,
1877
+ "BBH": 54.33,
1878
+ "MATH": 42.07,
1879
+ "GPQA": 13.87,
1880
+ "MUSR": 16.18,
1881
+ "MMLU_PRO": 48.37,
1882
+ "Architecture": "LlamaForCausalLM",
1883
+ "Parameters": "14.66B",
1884
+ "Chat_Template": "Yes"
1885
+ },
1886
+ "hf_url": "https://huggingface.co/ehristoforu/fp4-14b-v1-fix",
1887
+ "known_config": null
1888
+ },
1889
+ {
1890
+ "rank": 87,
1891
+ "name": "FINGU-AI/Chocolatine-Fusion-14B",
1892
+ "scores": {
1893
+ "average": 40.36,
1894
+ "IFEval": 69.49,
1895
+ "BBH": 48.6,
1896
+ "MATH": 38.52,
1897
+ "GPQA": 16.22,
1898
+ "MUSR": 21.99,
1899
+ "MMLU_PRO": 47.35,
1900
+ "Architecture": "Qwen2ForCausalLM",
1901
+ "Parameters": "8.367B",
1902
+ "Chat_Template": "No"
1903
+ },
1904
+ "hf_url": "https://huggingface.co/FINGU-AI/Chocolatine-Fusion-14B",
1905
+ "known_config": null
1906
+ },
1907
+ {
1908
+ "rank": 88,
1909
+ "name": "hotmailuser/QwenSlerp-14B",
1910
+ "scores": {
1911
+ "average": 40.35,
1912
+ "IFEval": 70.25,
1913
+ "BBH": 49.42,
1914
+ "MATH": 38.37,
1915
+ "GPQA": 18.34,
1916
+ "MUSR": 16.83,
1917
+ "MMLU_PRO": 48.89,
1918
+ "Architecture": "Qwen2ForCausalLM",
1919
+ "Parameters": "14.766B",
1920
+ "Chat_Template": "No"
1921
+ },
1922
+ "hf_url": "https://huggingface.co/hotmailuser/QwenSlerp-14B",
1923
+ "known_config": null
1924
+ },
1925
+ {
1926
+ "rank": 89,
1927
+ "name": "Triangle104/Robo-Gutenberg_V1.0",
1928
+ "scores": {
1929
+ "average": 40.35,
1930
+ "IFEval": 60.08,
1931
+ "BBH": 50.29,
1932
+ "MATH": 45.62,
1933
+ "GPQA": 18.12,
1934
+ "MUSR": 19.2,
1935
+ "MMLU_PRO": 48.79,
1936
+ "Architecture": "Qwen2ForCausalLM",
1937
+ "Parameters": "14.77B",
1938
+ "Chat_Template": "No"
1939
+ },
1940
+ "hf_url": "https://huggingface.co/Triangle104/Robo-Gutenberg_V1.0",
1941
+ "known_config": null
1942
+ },
1943
+ {
1944
+ "rank": 90,
1945
+ "name": "Quazim0t0/Adamant-14B-sce",
1946
+ "scores": {
1947
+ "average": 40.32,
1948
+ "IFEval": 68.58,
1949
+ "BBH": 54.97,
1950
+ "MATH": 39.88,
1951
+ "GPQA": 13.42,
1952
+ "MUSR": 16.51,
1953
+ "MMLU_PRO": 48.57,
1954
+ "Architecture": "LlamaForCausalLM",
1955
+ "Parameters": "14.66B",
1956
+ "Chat_Template": "Yes"
1957
+ },
1958
+ "hf_url": "https://huggingface.co/Quazim0t0/Adamant-14B-sce",
1959
+ "known_config": null
1960
+ },
1961
+ {
1962
+ "rank": 91,
1963
+ "name": "Quazim0t0/Phi4Basis-14B-sce",
1964
+ "scores": {
1965
+ "average": 40.31,
1966
+ "IFEval": 65.02,
1967
+ "BBH": 55.67,
1968
+ "MATH": 47.89,
1969
+ "GPQA": 10.51,
1970
+ "MUSR": 14.02,
1971
+ "MMLU_PRO": 48.78,
1972
+ "Architecture": "LlamaForCausalLM",
1973
+ "Parameters": "14.66B",
1974
+ "Chat_Template": "Yes"
1975
+ },
1976
+ "hf_url": "https://huggingface.co/Quazim0t0/Phi4Basis-14B-sce",
1977
+ "known_config": null
1978
+ },
1979
+ {
1980
+ "rank": 92,
1981
+ "name": "Quazim0t0/bloom-14b-stock",
1982
+ "scores": {
1983
+ "average": 40.29,
1984
+ "IFEval": 65.75,
1985
+ "BBH": 55.27,
1986
+ "MATH": 48.11,
1987
+ "GPQA": 10.85,
1988
+ "MUSR": 13.17,
1989
+ "MMLU_PRO": 48.59,
1990
+ "Architecture": "LlamaForCausalLM",
1991
+ "Parameters": "14.66B",
1992
+ "Chat_Template": "Yes"
1993
+ },
1994
+ "hf_url": "https://huggingface.co/Quazim0t0/bloom-14b-stock",
1995
+ "known_config": null
1996
+ },
1997
+ {
1998
+ "rank": 93,
1999
+ "name": "sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01",
2000
+ "scores": {
2001
+ "average": 40.28,
2002
+ "IFEval": 68.72,
2003
+ "BBH": 47.71,
2004
+ "MATH": 39.95,
2005
+ "GPQA": 18.23,
2006
+ "MUSR": 19.56,
2007
+ "MMLU_PRO": 47.5,
2008
+ "Architecture": "Qwen2ForCausalLM",
2009
+ "Parameters": "14B",
2010
+ "Chat_Template": "No"
2011
+ },
2012
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01",
2013
+ "known_config": null
2014
+ },
2015
+ {
2016
+ "rank": 94,
2017
+ "name": "Quazim0t0/Halo-14B-sce",
2018
+ "scores": {
2019
+ "average": 40.26,
2020
+ "IFEval": 67.54,
2021
+ "BBH": 55.27,
2022
+ "MATH": 42.9,
2023
+ "GPQA": 12.98,
2024
+ "MUSR": 14.24,
2025
+ "MMLU_PRO": 48.63,
2026
+ "Architecture": "LlamaForCausalLM",
2027
+ "Parameters": "14.66B",
2028
+ "Chat_Template": "Yes"
2029
+ },
2030
+ "hf_url": "https://huggingface.co/Quazim0t0/Halo-14B-sce",
2031
+ "known_config": null
2032
+ },
2033
+ {
2034
+ "rank": 95,
2035
+ "name": "prithivMLmods/Calcium-Opus-14B-Elite2",
2036
+ "scores": {
2037
+ "average": 40.25,
2038
+ "IFEval": 61.76,
2039
+ "BBH": 46.81,
2040
+ "MATH": 46.9,
2041
+ "GPQA": 16,
2042
+ "MUSR": 22.24,
2043
+ "MMLU_PRO": 47.79,
2044
+ "Architecture": "Qwen2ForCausalLM",
2045
+ "Parameters": "14.766B",
2046
+ "Chat_Template": "No"
2047
+ },
2048
+ "hf_url": "https://huggingface.co/prithivMLmods/Calcium-Opus-14B-Elite2",
2049
+ "known_config": null
2050
+ },
2051
+ {
2052
+ "rank": 96,
2053
+ "name": "SicariusSicariiStuff/Impish_QWEN_14B-1M",
2054
+ "scores": {
2055
+ "average": 40.24,
2056
+ "IFEval": 78.68,
2057
+ "BBH": 47.22,
2058
+ "MATH": 39.65,
2059
+ "GPQA": 13.42,
2060
+ "MUSR": 17.52,
2061
+ "MMLU_PRO": 44.93,
2062
+ "Architecture": "Qwen2ForCausalLM",
2063
+ "Parameters": "14.77B",
2064
+ "Chat_Template": "Yes"
2065
+ },
2066
+ "hf_url": "https://huggingface.co/SicariusSicariiStuff/Impish_QWEN_14B-1M",
2067
+ "known_config": null
2068
+ },
2069
+ {
2070
+ "rank": 97,
2071
+ "name": "bunnycore/Phi-4-Stock-Ex",
2072
+ "scores": {
2073
+ "average": 40.22,
2074
+ "IFEval": 65.75,
2075
+ "BBH": 55.2,
2076
+ "MATH": 40.86,
2077
+ "GPQA": 13.42,
2078
+ "MUSR": 17.46,
2079
+ "MMLU_PRO": 48.61,
2080
+ "Architecture": "LlamaForCausalLM",
2081
+ "Parameters": "14.66B",
2082
+ "Chat_Template": "Yes"
2083
+ },
2084
+ "hf_url": "https://huggingface.co/bunnycore/Phi-4-Stock-Ex",
2085
+ "known_config": null
2086
+ },
2087
+ {
2088
+ "rank": 98,
2089
+ "name": "sometimesanotion/Qwenvergence-14B-qv256",
2090
+ "scores": {
2091
+ "average": 40.12,
2092
+ "IFEval": 70.06,
2093
+ "BBH": 47.08,
2094
+ "MATH": 38.97,
2095
+ "GPQA": 17.11,
2096
+ "MUSR": 21.07,
2097
+ "MMLU_PRO": 46.42,
2098
+ "Architecture": "Qwen2ForCausalLM",
2099
+ "Parameters": "14B",
2100
+ "Chat_Template": "No"
2101
+ },
2102
+ "hf_url": "https://huggingface.co/sometimesanotion/Qwenvergence-14B-qv256",
2103
+ "known_config": null
2104
+ },
2105
+ {
2106
+ "rank": 99,
2107
+ "name": "tensopolis/virtuoso-small-tensopolis-v2",
2108
+ "scores": {
2109
+ "average": 40.11,
2110
+ "IFEval": 80.2,
2111
+ "BBH": 50.23,
2112
+ "MATH": 38.75,
2113
+ "GPQA": 10.51,
2114
+ "MUSR": 14.84,
2115
+ "MMLU_PRO": 46.15,
2116
+ "Architecture": "Qwen2ForCausalLM",
2117
+ "Parameters": "14.77B",
2118
+ "Chat_Template": "Yes"
2119
+ },
2120
+ "hf_url": "https://huggingface.co/tensopolis/virtuoso-small-tensopolis-v2",
2121
+ "known_config": null
2122
+ }
2123
+ ]
2124
  ]
 
2125
 
2126
 
2127