model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
timestamp[ns]
score
float64
21.8
83
kingbri/airolima-chronos-grad-l2-13B
main
d2ad57b2b50361485b2b04e59a989161599cb08b
{ "arc:challenge": 59.6, "hellaswag": 83.5, "hendrycksTest": 55.8, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.9
TFLai/llama-7b-4bit-alpaca
main
74fddbcad2dfc24d476efda6bf97b08194625e91
{ "arc:challenge": 52.6, "hellaswag": 77.8, "hendrycksTest": 34.6, "truthfulqa:mc": 34.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
49.8
TFLai/OpenOrca-Platypus2-13B-QLoRA-0.80-epoch
main
39ae03b77b4f1d453b02468ce6bb4ddeb6526b77
{ "arc:challenge": 62.4, "hellaswag": 83, "hendrycksTest": 59.4, "truthfulqa:mc": 52.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.2
TFLai/EnsembleV5-Nova-13B
main
7ba38d309709d35149b4a18f94096875885035ae
{ "arc:challenge": 62.7, "hellaswag": 82.6, "hendrycksTest": 56.8, "truthfulqa:mc": 49.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63
TFLai/Orca-Nova-13B
main
5a6c3686749ecb76971a915403da8c07a98078a6
{ "arc:challenge": 62.4, "hellaswag": 82.5, "hendrycksTest": 57.4, "truthfulqa:mc": 46 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.1
TFLai/Nous-Hermes-Platypus2-13B-QLoRA-0.80-epoch
main
6e49d3d205e7f2e15c01ace0901da8931bbaab3b
{ "arc:challenge": 59.9, "hellaswag": 83.3, "hendrycksTest": 56.7, "truthfulqa:mc": 51.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.7
TFLai/pythia-2.8b-4bit-alpaca
main
40e84b6d38aac92a0302c2a682498794ef0fd901
{ "arc:challenge": 34.7, "hellaswag": 59, "hendrycksTest": 25.5, "truthfulqa:mc": 39.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
39.6
TFLai/Platypus2-13B-QLoRA-0.80-epoch
main
114eb8efd2de1c9eae85d92de490b95c854dfae9
{ "arc:challenge": 57.8, "hellaswag": 81.6, "hendrycksTest": 55.6, "truthfulqa:mc": 39.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.7
TFLai/gpt-neox-20b-4bit-alpaca
main
{ "arc:challenge": 43.9, "hellaswag": 67.4, "hendrycksTest": 25.1, "truthfulqa:mc": 35.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43
TFLai/Athena-Platypus2-13B-QLora-0.80-epoch
main
f7b6c11b4df16079dfdd1e8dd8c489a8835c7cc4
{ "arc:challenge": 56.7, "hellaswag": 80.6, "hendrycksTest": 55.4, "truthfulqa:mc": 53.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.6
TFLai/Limarp-Platypus2-13B-QLoRA-0.80-epoch
main
0a8560232ff73ca3c3f8e217b4517fa6c4f55558
{ "arc:challenge": 60.5, "hellaswag": 82.8, "hendrycksTest": 56.5, "truthfulqa:mc": 44.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61
TFLai/gpt-neo-1.3B-4bit-alpaca
main
137d483d1dc757c81c59bd190016f7c5df01f978
{ "arc:challenge": 28.2, "hellaswag": 46.3, "hendrycksTest": 25.2, "truthfulqa:mc": 39.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
34.8
TFLai/MythoMix-Platypus2-13B-QLoRA-0.80-epoch
main
3d91f63d82abd598d5b80d24d74feb6b00b7d80f
{ "arc:challenge": 60.3, "hellaswag": 83.7, "hendrycksTest": 55.7, "truthfulqa:mc": 52.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63
TFLai/llama-2-13b-4bit-alpaca-gpt4
main
ccf1ad19b07196fa3fab67261b7a0f9bcf28638f
{ "arc:challenge": 57.7, "hellaswag": 81, "hendrycksTest": 51.8, "truthfulqa:mc": 45.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59
TFLai/PuddleJumper-Platypus2-13B-QLoRA-0.80-epoch
main
4b5aabc51907e4cba49f373c6dc09a2634f2fb8a
{ "arc:challenge": 54.5, "hellaswag": 79.4, "hendrycksTest": 55.1, "truthfulqa:mc": 54.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.8
TFLai/Luban-Platypus2-13B-QLora-0.80-epoch
main
15a99bc147cf9b744cbab7a7c8c5f232cd0c8d10
{ "arc:challenge": 60.2, "hellaswag": 82.2, "hendrycksTest": 58, "truthfulqa:mc": 55.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.9
TFLai/Stable-Platypus2-13B-QLoRA-0.80-epoch
main
0c15b8540335b3e21a976a5fc5c33b47927fea6c
{ "arc:challenge": 62.3, "hellaswag": 82.5, "hendrycksTest": 57.1, "truthfulqa:mc": 51.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.3
TFLai/Nova-13B-50-step
main
1a827ccb7f00157b3cc9ce538d61a6ba8d5a65db
{ "arc:challenge": 61.6, "hellaswag": 82.3, "hendrycksTest": 57.3, "truthfulqa:mc": 51.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.2
TFLai/OpenOrcaPlatypus2-Platypus2-13B-QLora-0.80-epoch
main
5427ceec420f943a0b011a4d96f3efc292306933
{ "arc:challenge": 59.8, "hellaswag": 82.7, "hendrycksTest": 57, "truthfulqa:mc": 52.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.1
open-llm-leaderboard/bloom-560m-4bit-alpaca-auto-eval-adapter-applied
main
61e0b861d59319a96bba5af8c246e69d82e8e6e6
{ "arc:challenge": 24, "hellaswag": 29.2, "hendrycksTest": 25.2, "truthfulqa:mc": 45.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.9
TFLai/llama-13b-4bit-alpaca
main
d717be9d77986fb9100597dc78fbbfbde77bc2b1
{ "arc:challenge": 55.7, "hellaswag": 80.9, "hendrycksTest": 42.4, "truthfulqa:mc": 44.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56
TFLai/MythicalDestroyerV2-Platypus2-13B-QLora-0.80-epoch
main
ada55b32fe8ed55b7691d997ad2e86f232c91aad
{ "arc:challenge": 57.3, "hellaswag": 81.2, "hendrycksTest": 55.6, "truthfulqa:mc": 56 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.5
TFLai/OrcaMini-Platypus2-13B-QLoRA-0.80-epoch
main
1f81c0439f60d848e3cbc7f06fcd58b5161a8557
{ "arc:challenge": 60.8, "hellaswag": 82.6, "hendrycksTest": 56.4, "truthfulqa:mc": 53.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.3
TFLai/SpeechlessV1-Nova-13B
main
fbe6f0e32b5ecf9d75510d0b11a286466f46d79e
{ "arc:challenge": 61.8, "hellaswag": 82.7, "hendrycksTest": 57.7, "truthfulqa:mc": 51.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.4
TFLai/gpt2-turkish-uncased
main
4807e7df1dfb9d60c6d98e3cfeff62cb6b9a1579
{ "arc:challenge": 24.5, "hellaswag": 25.1, "hendrycksTest": 26.6, "truthfulqa:mc": 52.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.1
open-llm-leaderboard/bloomz-1b7-4bit-alpaca-auto-eval-adapter-applied
main
7c46d9e7aa05a8f711a93603199f9476742fe9d7
{ "arc:challenge": 29.1, "hellaswag": 47.4, "hendrycksTest": 31.8, "truthfulqa:mc": 41.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
37.4
TFLai/Ensemble5-Platypus2-13B-QLora-0.80-epoch
main
2af03c3287c60c4ba2fb6afa86c26cf722ab001d
{ "arc:challenge": 59.7, "hellaswag": 82.7, "hendrycksTest": 56.9, "truthfulqa:mc": 52.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63
TFLai/Airboros2.1-Platypus2-13B-QLora-0.80-epoch
main
45bd1e47218ba2e075e03f6407980eb839e67eb3
{ "arc:challenge": 59, "hellaswag": 82.5, "hendrycksTest": 54.6, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61
TFLai/Nova-13B
main
ae1145f9fa846ab8d39d8b7da888287ef917efb5
{ "arc:challenge": 62.7, "hellaswag": 82.6, "hendrycksTest": 58, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.6
pszemraj/pythia-31m-simplewiki-scratch-bf16
main
4eaec0542e7609fd3f364cb34491f05d7c61a3d0
{ "arc:challenge": 22.8, "hellaswag": 25.6, "hendrycksTest": 23.1, "truthfulqa:mc": 49.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.3
pszemraj/pythia-31m-simplepile-lite-2048-scratch-2e
main
91f011eb99502e667ebc2803f354ce5f5209ccf1
{ "arc:challenge": 21.6, "hellaswag": 25.8, "hendrycksTest": 25, "truthfulqa:mc": 50.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.8
pszemraj/pythia-31m-simplewiki-2048
main
95d47818055661250b55144c7d9beaf05dc126d8
{ "arc:challenge": 22.2, "hellaswag": 25.6, "hendrycksTest": 23.1, "truthfulqa:mc": 49.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.1
pszemraj/pythia-31m-KI_v1-2048-scratch
main
b29a3229f8d5317adeabafeb20677ec7bea9d703
{ "arc:challenge": 23.1, "hellaswag": 25.2, "hendrycksTest": 23.1, "truthfulqa:mc": 51.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.8
pszemraj/pythia-6.9b-HC3
main
c5c60ea656e921e6c5415f6feaebac4dd9b2aa2a
{ "arc:challenge": 36.5, "hellaswag": 61.8, "hendrycksTest": 26.9, "truthfulqa:mc": 45 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
42.6
pszemraj/pythia-31m-goodwiki-deduped-2048-scratch
main
01a3cd918dd7c233bc0c3c0c948a9a462a5359d1
{ "arc:challenge": 23.1, "hellaswag": 25.7, "hendrycksTest": 23.1, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.8
elinas/chronos-33b
main
3c11f81d9180618f13777276b1eb0eb70ab99cf0
{ "arc:challenge": 62.2, "hellaswag": 83.5, "hendrycksTest": 55.9, "truthfulqa:mc": 46.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.1
elinas/chronos-70b-v2
main
373af41ca0b2855972b8d471fd63e72b63e4c9fc
{ "arc:challenge": 68.1, "hellaswag": 86.5, "hendrycksTest": 68.3, "truthfulqa:mc": 53.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
69.1
elinas/chronos-13b-v2
main
e5d411138e72370c5613dfea0f66ded99f6e62f9
{ "arc:challenge": 58.7, "hellaswag": 82.5, "hendrycksTest": 53.4, "truthfulqa:mc": 50.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.3
TigerResearch/tigerbot-7b-base
main
300831494aa1eb16e59799310a09531f60dcc904
{ "arc:challenge": 47.7, "hellaswag": 72.1, "hendrycksTest": 45.1, "truthfulqa:mc": 42.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
51.8
TigerResearch/tigerbot-70b-chat
main
7e506c4a056821e5d151a0e46572cd74d04194be
{ "arc:challenge": 76.8, "hellaswag": 87.8, "hendrycksTest": 66.1, "truthfulqa:mc": 55.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.4
TigerResearch/tigerbot-70b-base
main
8af85526293eb8625375f3f7a1bab69825176e48
{ "arc:challenge": 62.5, "hellaswag": 83.6, "hendrycksTest": 65.5, "truthfulqa:mc": 52.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
66.1
Yhyu13/llama-30B-hf-openassitant
main
fba493af11a73cf5a2ee7857dd7aecb98c659dc4
{ "arc:challenge": 61.3, "hellaswag": 84.7, "hendrycksTest": 58.5, "truthfulqa:mc": 42.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.7
Yhyu13/chimera-inst-chat-13b-hf
main
a6943d2d30d0af904b3321559157d589e60f9e0f
{ "arc:challenge": 55.4, "hellaswag": 78.9, "hendrycksTest": 50.6, "truthfulqa:mc": 50.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.8
huggyllama/llama-13b
main
bf57045473f207bb1de1ed035ace226f4d9f9bba
{ "arc:challenge": 56.1, "hellaswag": 80.9, "hendrycksTest": 47.6, "truthfulqa:mc": 39.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56
huggyllama/llama-30b
main
2b1edcdb3c7ced7bce6c1aa75c94545777c3118b
{ "arc:challenge": 61.4, "hellaswag": 84.7, "hendrycksTest": 58.4, "truthfulqa:mc": 42.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.7
huggyllama/llama-65b
main
49707c5313d34d1c5a846e29cf2a2a650c22c8ee
{ "arc:challenge": 63.5, "hellaswag": 86.1, "hendrycksTest": 63.9, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.2
NYTK/PULI-GPTrio
main
c85efce322a0f6d93d64f7b9096525753da6913e
{ "arc:challenge": 30.7, "hellaswag": 53.5, "hendrycksTest": 24.7, "truthfulqa:mc": 39 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
37
nthngdy/pythia-owt2-70m-100k
main
b288893319b6cdce499148f4482043c350116560
{ "arc:challenge": 20.9, "hellaswag": 28.3, "hendrycksTest": 25, "truthfulqa:mc": 45.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
29.8
nthngdy/pythia-owt2-70m-50k
main
9fce9b8252f7891dbd50299a8c3bd71cd25454db
{ "arc:challenge": 21.5, "hellaswag": 28.2, "hendrycksTest": 25.7, "truthfulqa:mc": 44.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30
dhmeltzer/llama-7b-SFT_eli5_wiki65k_1024_r_64_alpha_16_merged
main
03ed53d4eb389dcb6c1227e642e5682b8677e7e6
{ "arc:challenge": 53.8, "hellaswag": 78.8, "hendrycksTest": 46, "truthfulqa:mc": 43.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.5
dhmeltzer/llama-7b-SFT_ds_wiki65k_1024_r_64_alpha_16_merged
main
684c4f4612fadae47c2c7db9fe9e9be4aaafc7e2
{ "arc:challenge": 54.3, "hellaswag": 78.2, "hendrycksTest": 44.6, "truthfulqa:mc": 38.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.9
dhmeltzer/Llama-2-13b-hf-eli5-wiki-1024_r_64_alpha_16
main
6a0a2b6672c7b36c714a66c4a836e0b50c6cb5e6
{ "arc:challenge": 60, "hellaswag": 82.4, "hendrycksTest": 55.4, "truthfulqa:mc": 39.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.4
dhmeltzer/Llama-2-13b-hf-ds_wiki_1024_full_r_64_alpha_16
main
a3ed7416156963f49bf4dc056188e006c0c214d2
{ "arc:challenge": 59, "hellaswag": 82.3, "hendrycksTest": 55.4, "truthfulqa:mc": 35.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.1
dhmeltzer/Llama-2-13b-hf-ds_eli5_1024_r_64_alpha_16
main
891be2d8f205baa04c8a92f6ab1225f0d0c3e5bd
{ "arc:challenge": 60.4, "hellaswag": 82.6, "hendrycksTest": 55.9, "truthfulqa:mc": 43.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.6
dhmeltzer/llama-7b-SFT-qlora-eli5-wiki_DPO_ds_RM_top_2_1024_r_64_alpha_16
main
f1f3b9fdb1e2d8d8fa913d57a8fe15d7bdf72c20
{ "arc:challenge": 54.1, "hellaswag": 78.7, "hendrycksTest": 45.4, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.4
dhmeltzer/llama-7b-SFT_ds_eli5_1024_r_64_alpha_16_merged
main
6ca41503b383c654aee8d5496e70fbdfaa33db10
{ "arc:challenge": 53.4, "hellaswag": 77.9, "hendrycksTest": 43.6, "truthfulqa:mc": 40.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.9
openBuddy/openbuddy-llama2-34b-v11.1-bf16
main
21ac0d26c0097e5ac5b4a757493574b156da7731
{ "arc:challenge": 50, "hellaswag": 71.2, "hendrycksTest": 55.7, "truthfulqa:mc": 53 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.5
speechlessai/speechless-codellama-34b-v1.0
main
1d64d871cd56da3031e19bc267ef8bd0b85b9936
{ "arc:challenge": 52.5, "hellaswag": 74.1, "hendrycksTest": 53.5, "truthfulqa:mc": 47.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56.8
speechlessai/speechless-codellama-dolphin-orca-platypus-13b
main
25e1c346c2a01588a728307d5c35fbeecd58b51b
{ "arc:challenge": 45.8, "hellaswag": 67.7, "hendrycksTest": 45.9, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
51
gywy/llama2-13b-chinese-v2
main
8f6b11ca4344ac230d6b55defa4e04e60a39f9b5
{ "arc:challenge": 53.9, "hellaswag": 74.6, "hendrycksTest": 49.7, "truthfulqa:mc": 45.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.9
gywy/llama2-13b-chinese-v1
main
c65de036628bb9024a74a85df3cd80aa6ccaf15c
{ "arc:challenge": 59.8, "hellaswag": 75.7, "hendrycksTest": 54.2, "truthfulqa:mc": 45.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.8
Secbone/llama-2-13B-instructed
main
e676fbd9015beacfba5d71426beace7605200477
{ "arc:challenge": 59.4, "hellaswag": 83.9, "hendrycksTest": 55.6, "truthfulqa:mc": 46.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.4
PygmalionAI/pygmalion-1.3b
main
bef2c90128c00ff6f16c0f397463423b7d988e17
{ "arc:challenge": 28.1, "hellaswag": 47, "hendrycksTest": 24.1, "truthfulqa:mc": 37.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
34.2
PygmalionAI/pygmalion-6b
main
30e2405100eac6bd53f75964cc7345eeafd19f7d
{ "arc:challenge": 38.9, "hellaswag": 64.8, "hendrycksTest": 28, "truthfulqa:mc": 40.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43
PygmalionAI/metharme-1.3b
main
62ec4ff53042f692ef0661e54f371747214707a4
{ "arc:challenge": 34.4, "hellaswag": 55.9, "hendrycksTest": 25.1, "truthfulqa:mc": 37.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
38.3
PygmalionAI/pygmalion-2.7b
main
9533805293bc48e8ddfe9dc1940d8cbc5662113e
{ "arc:challenge": 32.8, "hellaswag": 54.1, "hendrycksTest": 23.3, "truthfulqa:mc": 37.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
36.8
PygmalionAI/mythalion-13b
main
24916f62b8243a7e4646ea53eeb45d890cbd308f
{ "arc:challenge": 61.3, "hellaswag": 83.8, "hendrycksTest": 56.5, "truthfulqa:mc": 46.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62
PygmalionAI/pygmalion-350m
main
d65832d913f6b396e2ffb64c373d9383c9da9303
{ "arc:challenge": 25, "hellaswag": 37.8, "hendrycksTest": 25.7, "truthfulqa:mc": 40.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.2
euclaise/falcon_1b_stage1
main
f85d91ff3f6cadc93f7222a19b9c4930c8842366
{ "arc:challenge": 35.2, "hellaswag": 62.4, "hendrycksTest": 24.5, "truthfulqa:mc": 40 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
40.5
euclaise/gpt-neox-122m-minipile-digits
main
3e9187385d31234b04021ddc8b03cbd5cfef9fb4
{ "arc:challenge": 20.7, "hellaswag": 27, "hendrycksTest": 25.3, "truthfulqa:mc": 49.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.6
euclaise/falcon_1b_stage2
main
c3ef73a8c9dc06fae4bfe4460d2f293147aecbb0
{ "arc:challenge": 35.5, "hellaswag": 65.6, "hendrycksTest": 23.8, "truthfulqa:mc": 38.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
40.8
LoupGarou/WizardCoder-Guanaco-15B-V1.0
main
ab5ea678d63eb2324658dcc8cfae267eabc366ef
{ "arc:challenge": 30.5, "hellaswag": 45.6, "hendrycksTest": 26.8, "truthfulqa:mc": 46.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
37.3
LoupGarou/WizardCoder-Guanaco-15B-V1.1
main
979531c84ec0b4e1712d6a5cec6907126a21e605
{ "arc:challenge": 32.6, "hellaswag": 45.4, "hendrycksTest": 25.9, "truthfulqa:mc": 42.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
36.6
luffycodes/mcq-vicuna-13b-v1.5
main
f769a92cfeffe8ee07beee8814ce7eca7cd62805
{ "arc:challenge": 56.2, "hellaswag": 81.1, "hendrycksTest": 53.4, "truthfulqa:mc": 44.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.7
luffycodes/nash-vicuna-13b-v1dot5-ep2-w-rag-w-simple
main
848ef91ab46a72260542283918a971347c6bfa93
{ "arc:challenge": 59.1, "hellaswag": 80.6, "hendrycksTest": 56.1, "truthfulqa:mc": 51.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.8
luffycodes/mcq-hal-vicuna-13b-v1.5
main
bb3029bce8347b09c2fd6908475b195bcabe53e3
{ "arc:challenge": 56.1, "hellaswag": 80.7, "hendrycksTest": 52.9, "truthfulqa:mc": 45.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.7
uukuguy/speechless-llama2-hermes-orca-platypus-13b
main
f227ad33b16726b099e35e5dc47f4db1f22665a7
{ "arc:challenge": 60.9, "hellaswag": 83.5, "hendrycksTest": 59.4, "truthfulqa:mc": 54.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.5
uukuguy/speechless-codellama-orca-13b
main
6fdfeabe817235df3d560a6e6465c3722bc3a4ba
{ "arc:challenge": 44.4, "hellaswag": 65.2, "hendrycksTest": 43.5, "truthfulqa:mc": 45.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
49.8
uukuguy/speechless-hermes-coig-lite-13b
main
2ee11d9c7acaefb723796227e2ad099b165f0dd9
{ "arc:challenge": 59.6, "hellaswag": 82.3, "hendrycksTest": 55.3, "truthfulqa:mc": 47.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.2
uukuguy/speechless-codellama-orca-airoboros-13b-0.10e
main
dbd1d1f7ad7b6b359f8246141650b25ca0bb8cbb
{ "arc:challenge": 29.3, "hellaswag": 25.7, "hendrycksTest": 25.7, "truthfulqa:mc": 49.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.6
uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b
main
4410d8a20871927e9fe981c01bc8314b451b2fcd
{ "arc:challenge": 59.6, "hellaswag": 82.6, "hendrycksTest": 58.3, "truthfulqa:mc": 56 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.1
uukuguy/speechless-orca-platypus-coig-lite-4k-0.5e-13b
main
081d1da5cfa2f6ad43abdf4fb5e41f8ec5846224
{ "arc:challenge": 58, "hellaswag": 80.2, "hendrycksTest": 57.3, "truthfulqa:mc": 48 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.9
uukuguy/speechless-llama2-13b
main
5341819accf229a625b163b5611aa973cf9f9718
{ "arc:challenge": 62.2, "hellaswag": 81.9, "hendrycksTest": 58.7, "truthfulqa:mc": 55.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.6
uukuguy/speechless-codellama-platypus-13b
main
7a771bd8899b9ef4ba9680e96f84dc85810a67d6
{ "arc:challenge": 46.2, "hellaswag": 68.9, "hendrycksTest": 44.5, "truthfulqa:mc": 45 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
51.2
uukuguy/speechless-orca-platypus-coig-lite-2k-0.6e-13b
main
65214c9923d55795ecd6e7f9e0fcee5ba5f26929
{ "arc:challenge": 59.9, "hellaswag": 80.8, "hendrycksTest": 58.3, "truthfulqa:mc": 48 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.8
uukuguy/speechless-codellama-dolphin-orca-platypus-13b
main
0c41023f8f665946a2c46c3823afee431408bcbd
{ "arc:challenge": 44.8, "hellaswag": 68.6, "hendrycksTest": 44, "truthfulqa:mc": 46.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
50.9
uukuguy/speechless-codellama-orca-platypus-13b-0.10e
main
119abfc73f9ce541a40779f167fe21e95faed4e8
{ "arc:challenge": 28.8, "hellaswag": 25.9, "hendrycksTest": 25.4, "truthfulqa:mc": 49.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.3
uukuguy/speechless-llama2-luban-orca-platypus-13b
main
908cfb670611875b52045c4bab81cff53f0279a7
{ "arc:challenge": 62.5, "hellaswag": 82.8, "hendrycksTest": 59.2, "truthfulqa:mc": 54.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.8
uukuguy/speechless-orca-platypus-coig-lite-4k-0.6e-13b
main
6bf4cf6211489bdbea70585a4a5c0f39deefb4e5
{ "arc:challenge": 58.8, "hellaswag": 79.9, "hendrycksTest": 56.8, "truthfulqa:mc": 48.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61
aiplanet/effi-7b
main
d58c62ee27cae60392bd0bd53e1fd05ea82e273b
{ "arc:challenge": 55.1, "hellaswag": 78.1, "hendrycksTest": 35.9, "truthfulqa:mc": 39.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
52.2
aiplanet/effi-13b
main
1b4b4c72dd41ddc1a80f2db6c85170e50a91ed7a
{ "arc:challenge": 53.3, "hellaswag": 81.2, "hendrycksTest": 53.6, "truthfulqa:mc": 44.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.2
rishiraj/bloom-560m-guanaco
main
17b886fe53bdb4cea75a7f40da1e8e987124edef
{ "arc:challenge": 27.9, "hellaswag": 26.1, "hendrycksTest": 24.5, "truthfulqa:mc": 49.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32
hakurei/lotus-12B
main
f212b695aabf5dafb5dccf5013ddb765ba1e47d7
{ "arc:challenge": 30.7, "hellaswag": 52.7, "hendrycksTest": 24.5, "truthfulqa:mc": 40.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
37
hakurei/instruct-12b
main
ff4699b502b79c716330b6f761002588a65dcba6
{ "arc:challenge": 42.6, "hellaswag": 66.8, "hendrycksTest": 26.8, "truthfulqa:mc": 32 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
42.1
Weyaxi/ChatAYT-Lora-Assamble-Marcoroni
main
51c9b600023cd26c4eb3754b9a89c60dde959ccc
{ "arc:challenge": 62.5, "hellaswag": 83.1, "hendrycksTest": 58.7, "truthfulqa:mc": 56.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.1
Weyaxi/Luban-Marcoroni-13B-v2
main
d7c704a08218dcc03963bc08e9113e281c056f53
{ "arc:challenge": 63.5, "hellaswag": 82.9, "hendrycksTest": 58.7, "truthfulqa:mc": 55.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.2
Weyaxi/Luban-Marcoroni-13B
main
bf152c36935acd67a9029c017f0c1ff2d7a92314
{ "arc:challenge": 63.7, "hellaswag": 82.9, "hendrycksTest": 58.7, "truthfulqa:mc": 55.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.2
Weyaxi/llama-2-alpacagpt4-1000step
main
{ "arc:challenge": 66.4, "hellaswag": 84.5, "hendrycksTest": 62.7, "truthfulqa:mc": 55.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
67.3
Weyaxi/Luban-Marcoroni-13B-v3
main
9b68680ed8351ef8ef6948169e69a888af40002e
{ "arc:challenge": 63.7, "hellaswag": 82.9, "hendrycksTest": 58.6, "truthfulqa:mc": 55.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.2
lloorree/jfdslijsijdgis
main
1e67eaa4ef618a5a0d8c52e5e107635c706b34c5
{ "arc:challenge": 69.6, "hellaswag": 87.3, "hendrycksTest": 70, "truthfulqa:mc": 59.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.5