Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- README.md +244 -0
- tinymistral-248m-v3.Q4_0.gguf +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tinymistral-248m-v3.Q4_0.gguf filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language:
|
3 |
+
- en
|
4 |
+
license: apache-2.0
|
5 |
+
datasets:
|
6 |
+
- Locutusque/TM-DATA-V2
|
7 |
+
- LLM360/TxT360
|
8 |
+
- mlfoundations/dclm-baseline-1.0
|
9 |
+
- Skylion007/openwebtext
|
10 |
+
- JeanKaddour/minipile
|
11 |
+
- eminorhan/gutenberg_en
|
12 |
+
model-index:
|
13 |
+
- name: TinyMistral-248M-v3
|
14 |
+
results:
|
15 |
+
- task:
|
16 |
+
type: text-generation
|
17 |
+
name: Text Generation
|
18 |
+
dataset:
|
19 |
+
name: IFEval (0-Shot)
|
20 |
+
type: HuggingFaceH4/ifeval
|
21 |
+
args:
|
22 |
+
num_few_shot: 0
|
23 |
+
metrics:
|
24 |
+
- type: inst_level_strict_acc and prompt_level_strict_acc
|
25 |
+
value: 16.39
|
26 |
+
name: strict accuracy
|
27 |
+
source:
|
28 |
+
url: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard?query=M4-ai/TinyMistral-248M-v3
|
29 |
+
name: Open LLM Leaderboard
|
30 |
+
- task:
|
31 |
+
type: text-generation
|
32 |
+
name: Text Generation
|
33 |
+
dataset:
|
34 |
+
name: BBH (3-Shot)
|
35 |
+
type: BBH
|
36 |
+
args:
|
37 |
+
num_few_shot: 3
|
38 |
+
metrics:
|
39 |
+
- type: acc_norm
|
40 |
+
value: 1.78
|
41 |
+
name: normalized accuracy
|
42 |
+
source:
|
43 |
+
url: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard?query=M4-ai/TinyMistral-248M-v3
|
44 |
+
name: Open LLM Leaderboard
|
45 |
+
- task:
|
46 |
+
type: text-generation
|
47 |
+
name: Text Generation
|
48 |
+
dataset:
|
49 |
+
name: MATH Lvl 5 (4-Shot)
|
50 |
+
type: hendrycks/competition_math
|
51 |
+
args:
|
52 |
+
num_few_shot: 4
|
53 |
+
metrics:
|
54 |
+
- type: exact_match
|
55 |
+
value: 0.0
|
56 |
+
name: exact match
|
57 |
+
source:
|
58 |
+
url: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard?query=M4-ai/TinyMistral-248M-v3
|
59 |
+
name: Open LLM Leaderboard
|
60 |
+
- task:
|
61 |
+
type: text-generation
|
62 |
+
name: Text Generation
|
63 |
+
dataset:
|
64 |
+
name: GPQA (0-shot)
|
65 |
+
type: Idavidrein/gpqa
|
66 |
+
args:
|
67 |
+
num_few_shot: 0
|
68 |
+
metrics:
|
69 |
+
- type: acc_norm
|
70 |
+
value: 0.0
|
71 |
+
name: acc_norm
|
72 |
+
source:
|
73 |
+
url: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard?query=M4-ai/TinyMistral-248M-v3
|
74 |
+
name: Open LLM Leaderboard
|
75 |
+
- task:
|
76 |
+
type: text-generation
|
77 |
+
name: Text Generation
|
78 |
+
dataset:
|
79 |
+
name: MuSR (0-shot)
|
80 |
+
type: TAUR-Lab/MuSR
|
81 |
+
args:
|
82 |
+
num_few_shot: 0
|
83 |
+
metrics:
|
84 |
+
- type: acc_norm
|
85 |
+
value: 5.15
|
86 |
+
name: acc_norm
|
87 |
+
source:
|
88 |
+
url: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard?query=M4-ai/TinyMistral-248M-v3
|
89 |
+
name: Open LLM Leaderboard
|
90 |
+
- task:
|
91 |
+
type: text-generation
|
92 |
+
name: Text Generation
|
93 |
+
dataset:
|
94 |
+
name: MMLU-PRO (5-shot)
|
95 |
+
type: TIGER-Lab/MMLU-Pro
|
96 |
+
config: main
|
97 |
+
split: test
|
98 |
+
args:
|
99 |
+
num_few_shot: 5
|
100 |
+
metrics:
|
101 |
+
- type: acc
|
102 |
+
value: 1.47
|
103 |
+
name: accuracy
|
104 |
+
source:
|
105 |
+
url: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard?query=M4-ai/TinyMistral-248M-v3
|
106 |
+
name: Open LLM Leaderboard
|
107 |
+
---
|
108 |
+
|
109 |
+
still in training. Trained on about ~21 billion tokens so far.
|
110 |
+
|
111 |
+
| Tasks |Version| Filter |n-shot| Metric | | Value | |Stderr|
|
112 |
+
|----------------------------------------|-------|----------------|-----:|-----------|---|------:|---|-----:|
|
113 |
+
|Open LLM Leaderboard | N/A| | | | | | | |
|
114 |
+
| - arc_challenge | 1|none | 25|acc |↑ | 0.2005|± |0.0117|
|
115 |
+
| | |none | 25|acc_norm |↑ | 0.2406|± |0.0125|
|
116 |
+
| - gsm8k | 3|flexible-extract| 5|exact_match|↑ | 0.0083|± |0.0025|
|
117 |
+
| | |strict-match | 5|exact_match|↑ | 0.0000|± |0.0000|
|
118 |
+
| - hellaswag | 1|none | 10|acc |↑ | 0.2724|± |0.0044|
|
119 |
+
| | |none | 10|acc_norm |↑ | 0.2838|± |0.0045|
|
120 |
+
| - mmlu | 2|none | |acc |↑ | 0.2290|± |0.0035|
|
121 |
+
| - humanities | 2|none | |acc |↑ | 0.2380|± |0.0062|
|
122 |
+
| - formal_logic | 1|none | 5|acc |↑ | 0.2460|± |0.0385|
|
123 |
+
| - high_school_european_history | 1|none | 5|acc |↑ | 0.1818|± |0.0301|
|
124 |
+
| - high_school_us_history | 1|none | 5|acc |↑ | 0.2647|± |0.0310|
|
125 |
+
| - high_school_world_history | 1|none | 5|acc |↑ | 0.2911|± |0.0296|
|
126 |
+
| - international_law | 1|none | 5|acc |↑ | 0.2149|± |0.0375|
|
127 |
+
| - jurisprudence | 1|none | 5|acc |↑ | 0.2685|± |0.0428|
|
128 |
+
| - logical_fallacies | 1|none | 5|acc |↑ | 0.2209|± |0.0326|
|
129 |
+
| - moral_disputes | 1|none | 5|acc |↑ | 0.2457|± |0.0232|
|
130 |
+
| - moral_scenarios | 1|none | 5|acc |↑ | 0.2369|± |0.0142|
|
131 |
+
| - philosophy | 1|none | 5|acc |↑ | 0.1865|± |0.0221|
|
132 |
+
| - prehistory | 1|none | 5|acc |↑ | 0.1975|± |0.0222|
|
133 |
+
| - professional_law | 1|none | 5|acc |↑ | 0.2432|± |0.0110|
|
134 |
+
| - world_religions | 1|none | 5|acc |↑ | 0.3099|± |0.0355|
|
135 |
+
| - other | 2|none | |acc |↑ | 0.2375|± |0.0076|
|
136 |
+
| - business_ethics | 1|none | 5|acc |↑ | 0.3200|± |0.0469|
|
137 |
+
| - clinical_knowledge | 1|none | 5|acc |↑ | 0.2226|± |0.0256|
|
138 |
+
| - college_medicine | 1|none | 5|acc |↑ | 0.1965|± |0.0303|
|
139 |
+
| - global_facts | 1|none | 5|acc |↑ | 0.1800|± |0.0386|
|
140 |
+
| - human_aging | 1|none | 5|acc |↑ | 0.3004|± |0.0308|
|
141 |
+
| - management | 1|none | 5|acc |↑ | 0.1942|± |0.0392|
|
142 |
+
| - marketing | 1|none | 5|acc |↑ | 0.2735|± |0.0292|
|
143 |
+
| - medical_genetics | 1|none | 5|acc |↑ | 0.3000|± |0.0461|
|
144 |
+
| - miscellaneous | 1|none | 5|acc |↑ | 0.2478|± |0.0154|
|
145 |
+
| - nutrition | 1|none | 5|acc |↑ | 0.2222|± |0.0238|
|
146 |
+
| - professional_accounting | 1|none | 5|acc |↑ | 0.2021|± |0.0240|
|
147 |
+
| - professional_medicine | 1|none | 5|acc |↑ | 0.1912|± |0.0239|
|
148 |
+
| - virology | 1|none | 5|acc |↑ | 0.2590|± |0.0341|
|
149 |
+
| - social sciences | 2|none | |acc |↑ | 0.2203|± |0.0075|
|
150 |
+
| - econometrics | 1|none | 5|acc |↑ | 0.2368|± |0.0400|
|
151 |
+
| - high_school_geography | 1|none | 5|acc |↑ | 0.2020|± |0.0286|
|
152 |
+
| - high_school_government_and_politics| 1|none | 5|acc |↑ | 0.1865|± |0.0281|
|
153 |
+
| - high_school_macroeconomics | 1|none | 5|acc |↑ | 0.2205|± |0.0210|
|
154 |
+
| - high_school_microeconomics | 1|none | 5|acc |↑ | 0.2143|± |0.0267|
|
155 |
+
| - high_school_psychology | 1|none | 5|acc |↑ | 0.1908|± |0.0168|
|
156 |
+
| - human_sexuality | 1|none | 5|acc |↑ | 0.2672|± |0.0388|
|
157 |
+
| - professional_psychology | 1|none | 5|acc |↑ | 0.2386|± |0.0172|
|
158 |
+
| - public_relations | 1|none | 5|acc |↑ | 0.1727|± |0.0362|
|
159 |
+
| - security_studies | 1|none | 5|acc |↑ | 0.2367|± |0.0272|
|
160 |
+
| - sociology | 1|none | 5|acc |↑ | 0.2488|± |0.0306|
|
161 |
+
| - us_foreign_policy | 1|none | 5|acc |↑ | 0.2600|± |0.0441|
|
162 |
+
| - stem | 2|none | |acc |↑ | 0.2157|± |0.0073|
|
163 |
+
| - abstract_algebra | 1|none | 5|acc |↑ | 0.2200|± |0.0416|
|
164 |
+
| - anatomy | 1|none | 5|acc |↑ | 0.1778|± |0.0330|
|
165 |
+
| - astronomy | 1|none | 5|acc |↑ | 0.1908|± |0.0320|
|
166 |
+
| - college_biology | 1|none | 5|acc |↑ | 0.2778|± |0.0375|
|
167 |
+
| - college_chemistry | 1|none | 5|acc |↑ | 0.2200|± |0.0416|
|
168 |
+
| - college_computer_science | 1|none | 5|acc |↑ | 0.2100|± |0.0409|
|
169 |
+
| - college_mathematics | 1|none | 5|acc |↑ | 0.2100|± |0.0409|
|
170 |
+
| - college_physics | 1|none | 5|acc |↑ | 0.2157|± |0.0409|
|
171 |
+
| - computer_security | 1|none | 5|acc |↑ | 0.2700|± |0.0446|
|
172 |
+
| - conceptual_physics | 1|none | 5|acc |↑ | 0.2638|± |0.0288|
|
173 |
+
| - electrical_engineering | 1|none | 5|acc |↑ | 0.2483|± |0.0360|
|
174 |
+
| - elementary_mathematics | 1|none | 5|acc |↑ | 0.2037|± |0.0207|
|
175 |
+
| - high_school_biology | 1|none | 5|acc |↑ | 0.1774|± |0.0217|
|
176 |
+
| - high_school_chemistry | 1|none | 5|acc |↑ | 0.2020|± |0.0282|
|
177 |
+
| - high_school_computer_science | 1|none | 5|acc |↑ | 0.2500|± |0.0435|
|
178 |
+
| - high_school_mathematics | 1|none | 5|acc |↑ | 0.2148|± |0.0250|
|
179 |
+
| - high_school_physics | 1|none | 5|acc |↑ | 0.2053|± |0.0330|
|
180 |
+
| - high_school_statistics | 1|none | 5|acc |↑ | 0.1481|± |0.0242|
|
181 |
+
| - machine_learning | 1|none | 5|acc |↑ | 0.3125|± |0.0440|
|
182 |
+
| - truthfulqa_gen | 3|none | 0|bleu_acc |↑ | 0.2362|± |0.0149|
|
183 |
+
| | |none | 0|bleu_diff |↑ |-1.0138|± |0.2569|
|
184 |
+
| | |none | 0|bleu_max |↑ | 7.9522|± |0.4088|
|
185 |
+
| | |none | 0|rouge1_acc |↑ | 0.2595|± |0.0153|
|
186 |
+
| | |none | 0|rouge1_diff|↑ |-1.9129|± |0.4349|
|
187 |
+
| | |none | 0|rouge1_max |↑ |21.7885|± |0.7307|
|
188 |
+
| | |none | 0|rouge2_acc |↑ | 0.1200|± |0.0114|
|
189 |
+
| | |none | 0|rouge2_diff|↑ |-1.9771|± |0.3475|
|
190 |
+
| | |none | 0|rouge2_max |↑ | 9.0199|± |0.5842|
|
191 |
+
| | |none | 0|rougeL_acc |↑ | 0.2570|± |0.0153|
|
192 |
+
| | |none | 0|rougeL_diff|↑ |-1.8812|± |0.4185|
|
193 |
+
| | |none | 0|rougeL_max |↑ |19.6284|± |0.6850|
|
194 |
+
| - truthfulqa_mc1 | 2|none | 0|acc |↑ | 0.1983|± |0.0140|
|
195 |
+
| - truthfulqa_mc2 | 2|none | 0|acc |↑ | 0.3861|± |0.0147|
|
196 |
+
| - winogrande | 1|none | 5|acc |↑ | 0.4972|± |0.0141|
|
197 |
+
|
198 |
+
| Groups |Version|Filter|n-shot|Metric| |Value | |Stderr|
|
199 |
+
|-------------------|------:|------|------|------|---|-----:|---|-----:|
|
200 |
+
| - mmlu | 2|none | |acc |↑ |0.2290|± |0.0035|
|
201 |
+
| - humanities | 2|none | |acc |↑ |0.2380|± |0.0062|
|
202 |
+
| - other | 2|none | |acc |↑ |0.2375|± |0.0076|
|
203 |
+
| - social sciences| 2|none | |acc |↑ |0.2203|± |0.0075|
|
204 |
+
| - stem | 2|none | |acc |↑ |0.2157|± |0.0073|
|
205 |
+
|
206 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
207 |
+
|---------------------------------|------:|------|-----:|--------|---|-----:|---|-----:|
|
208 |
+
|agieval_nous | 0|none | |acc_norm|↑ |0.2133|± |0.0081|
|
209 |
+
| - agieval_aqua_rat | 1|none | 0|acc |↑ |0.2047|± |0.0254|
|
210 |
+
| | |none | 0|acc_norm|↑ |0.1969|± |0.0250|
|
211 |
+
| - agieval_logiqa_en | 1|none | 0|acc |↑ |0.2043|± |0.0158|
|
212 |
+
| | |none | 0|acc_norm|↑ |0.2304|± |0.0165|
|
213 |
+
| - agieval_lsat_ar | 1|none | 0|acc |↑ |0.1739|± |0.0250|
|
214 |
+
| | |none | 0|acc_norm|↑ |0.1957|± |0.0262|
|
215 |
+
| - agieval_lsat_lr | 1|none | 0|acc |↑ |0.1549|± |0.0160|
|
216 |
+
| | |none | 0|acc_norm|↑ |0.1608|± |0.0163|
|
217 |
+
| - agieval_lsat_rc | 1|none | 0|acc |↑ |0.1636|± |0.0226|
|
218 |
+
| | |none | 0|acc_norm|↑ |0.2119|± |0.0250|
|
219 |
+
| - agieval_sat_en | 1|none | 0|acc |↑ |0.2670|± |0.0309|
|
220 |
+
| | |none | 0|acc_norm|↑ |0.2621|± |0.0307|
|
221 |
+
| - agieval_sat_en_without_passage| 1|none | 0|acc |↑ |0.2670|± |0.0309|
|
222 |
+
| | |none | 0|acc_norm|↑ |0.2621|± |0.0307|
|
223 |
+
| - agieval_sat_math | 1|none | 0|acc |↑ |0.2182|± |0.0279|
|
224 |
+
| | |none | 0|acc_norm|↑ |0.2318|± |0.0285|
|
225 |
+
|arc_challenge | 1|none | 0|acc |↑ |0.1945|± |0.0116|
|
226 |
+
| | |none | 0|acc_norm|↑ |0.2372|± |0.0124|
|
227 |
+
|truthfulqa_mc2 | 2|none | 0|acc |↑ |0.3861|± |0.0147|
|
228 |
+
|
229 |
+
| Groups |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
230 |
+
|------------|------:|------|------|--------|---|-----:|---|-----:|
|
231 |
+
|agieval_nous| 0|none | |acc_norm|↑ |0.2133|± |0.0081|
|
232 |
+
# [Open LLM Leaderboard Evaluation Results](https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard)
|
233 |
+
Detailed results can be found [here](https://huggingface.co/datasets/open-llm-leaderboard/details_M4-ai__TinyMistral-248M-v3)
|
234 |
+
|
235 |
+
| Metric |Value|
|
236 |
+
|-------------------|----:|
|
237 |
+
|Avg. | 4.13|
|
238 |
+
|IFEval (0-Shot) |16.39|
|
239 |
+
|BBH (3-Shot) | 1.78|
|
240 |
+
|MATH Lvl 5 (4-Shot)| 0.00|
|
241 |
+
|GPQA (0-shot) | 0.00|
|
242 |
+
|MuSR (0-shot) | 5.15|
|
243 |
+
|MMLU-PRO (5-shot) | 1.47|
|
244 |
+
|
tinymistral-248m-v3.Q4_0.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:51729500f91b5d89e56b0d4952af4401777c4437a0a48ea9cbaabd9e22d78ddb
|
3 |
+
size 148776608
|