puneeshkhanna
commited on
Commit
•
ff07b22
1
Parent(s):
44788f8
Update README.md
Browse files
README.md
CHANGED
@@ -31,7 +31,7 @@ Falcon3-10B-Base supports 4 languages (english, french, spanish, portuguese) and
|
|
31 |
- Use SwiGLu and RMSNorm
|
32 |
- 32K context length
|
33 |
- 131K vocab size
|
34 |
-
- Depth-up-scaled from **Falcon3-7B-Base** with 2
|
35 |
- Supports EN, FR, ES, PT
|
36 |
- Developed by [Technology Innovation Institute](https://www.tii.ae)
|
37 |
- License: TII Falcon-LLM License 2.0
|
@@ -87,24 +87,24 @@ We report in the following table our internal pipeline benchmarks:
|
|
87 |
<tr>
|
88 |
<td rowspan="3">General</td>
|
89 |
<td>MMLU (5-shot)</td>
|
90 |
-
<td>
|
91 |
<td>69.6</td>
|
92 |
<td>68.8</td>
|
93 |
-
<td>73.1</td>
|
94 |
</tr>
|
95 |
<tr>
|
96 |
<td>MMLU-PRO (5-shot)</td>
|
97 |
-
<td>
|
98 |
<td>39.3</td>
|
99 |
<td>34.7</td>
|
100 |
-
<td>42.5</td>
|
101 |
</tr>
|
102 |
<tr>
|
103 |
<td>IFEval</td>
|
104 |
-
<td>
|
105 |
<td>29.1</td>
|
106 |
<td>16.1</td>
|
107 |
-
<td>36.4</td>
|
108 |
</tr>
|
109 |
<tr>
|
110 |
<td rowspan="2">Math</td>
|
@@ -112,72 +112,72 @@ We report in the following table our internal pipeline benchmarks:
|
|
112 |
<td>69.1</td>
|
113 |
<td>63.8</td>
|
114 |
<td>55.3</td>
|
115 |
-
<td>81.4</td>
|
116 |
</tr>
|
117 |
<tr>
|
118 |
<td>MATH(4-shot)</td>
|
119 |
-
<td>
|
120 |
<td>9.2</td>
|
121 |
<td>4.9</td>
|
122 |
-
<td>22.9</td>
|
123 |
</tr>
|
124 |
<tr>
|
125 |
<td rowspan="4">Reasoning</td>
|
126 |
<td>Arc Challenge (25-shot)</td>
|
127 |
-
<td>
|
128 |
-
<td>
|
129 |
-
<td>
|
130 |
-
<td>
|
131 |
</tr>
|
132 |
<tr>
|
133 |
<td>GPQA (0-shot)</td>
|
134 |
-
<td>
|
135 |
-
<td>36.6</td>
|
136 |
<td>28.8</td>
|
137 |
<td>34.1</td>
|
138 |
</tr>
|
139 |
<tr>
|
140 |
<td>MUSR (0-shot)</td>
|
141 |
-
<td>
|
142 |
<td>43.3</td>
|
143 |
<td>39.2</td>
|
144 |
<td>44.2</td>
|
145 |
</tr>
|
146 |
<tr>
|
147 |
<td>BBH (3-shot)</td>
|
148 |
-
<td>
|
149 |
<td>51.3</td>
|
150 |
<td>50.2</td>
|
151 |
-
<td>59.7</td>
|
152 |
</tr>
|
153 |
<tr>
|
154 |
<td rowspan="4">CommonSense Understanding</td>
|
155 |
<td>PIQA (0-shot)</td>
|
156 |
-
<td>
|
157 |
-
<td>
|
158 |
-
<td>
|
159 |
-
<td>79.
|
160 |
</tr>
|
161 |
<tr>
|
162 |
<td>SciQ (0-shot)</td>
|
163 |
-
<td>97.
|
164 |
-
<td>95.
|
165 |
-
<td>
|
166 |
-
<td>
|
167 |
</tr>
|
168 |
<tr>
|
169 |
<td>Winogrande (0-shot)</td>
|
170 |
-
<td>74.2</td>
|
171 |
<td>72.7</td>
|
172 |
<td>73.2</td>
|
173 |
<td>73.6</td>
|
174 |
</tr>
|
175 |
<tr>
|
176 |
<td>OpenbookQA (0-shot)</td>
|
177 |
-
<td>
|
178 |
-
<td>
|
179 |
-
<td>
|
180 |
-
<td>
|
181 |
</tr>
|
182 |
</tbody>
|
183 |
</table>
|
|
|
31 |
- Use SwiGLu and RMSNorm
|
32 |
- 32K context length
|
33 |
- 131K vocab size
|
34 |
+
- Depth-up-scaled from **Falcon3-7B-Base** with 2 Teratokens of datasets comprising of web, code, STEM, high quality and mutlilingual data using 2048 H100 GPU chips
|
35 |
- Supports EN, FR, ES, PT
|
36 |
- Developed by [Technology Innovation Institute](https://www.tii.ae)
|
37 |
- License: TII Falcon-LLM License 2.0
|
|
|
87 |
<tr>
|
88 |
<td rowspan="3">General</td>
|
89 |
<td>MMLU (5-shot)</td>
|
90 |
+
<td>70.8</td>
|
91 |
<td>69.6</td>
|
92 |
<td>68.8</td>
|
93 |
+
<td><b>73.1</b></td>
|
94 |
</tr>
|
95 |
<tr>
|
96 |
<td>MMLU-PRO (5-shot)</td>
|
97 |
+
<td>41.4</td>
|
98 |
<td>39.3</td>
|
99 |
<td>34.7</td>
|
100 |
+
<td><b>42.5</b></td>
|
101 |
</tr>
|
102 |
<tr>
|
103 |
<td>IFEval</td>
|
104 |
+
<td>21.2</td>
|
105 |
<td>29.1</td>
|
106 |
<td>16.1</td>
|
107 |
+
<td><b>36.4</b></td>
|
108 |
</tr>
|
109 |
<tr>
|
110 |
<td rowspan="2">Math</td>
|
|
|
112 |
<td>69.1</td>
|
113 |
<td>63.8</td>
|
114 |
<td>55.3</td>
|
115 |
+
<td><b>81.4</b></td>
|
116 |
</tr>
|
117 |
<tr>
|
118 |
<td>MATH(4-shot)</td>
|
119 |
+
<td>10.5</td>
|
120 |
<td>9.2</td>
|
121 |
<td>4.9</td>
|
122 |
+
<td><b>22.9</b></td>
|
123 |
</tr>
|
124 |
<tr>
|
125 |
<td rowspan="4">Reasoning</td>
|
126 |
<td>Arc Challenge (25-shot)</td>
|
127 |
+
<td>67.5</td>
|
128 |
+
<td>61.7</td>
|
129 |
+
<td>64.4</td>
|
130 |
+
<td><b>66.8</b></td>
|
131 |
</tr>
|
132 |
<tr>
|
133 |
<td>GPQA (0-shot)</td>
|
134 |
+
<td>33.4</td>
|
135 |
+
<td><b>36.6</b></td>
|
136 |
<td>28.8</td>
|
137 |
<td>34.1</td>
|
138 |
</tr>
|
139 |
<tr>
|
140 |
<td>MUSR (0-shot)</td>
|
141 |
+
<td><b>45.2</b></td>
|
142 |
<td>43.3</td>
|
143 |
<td>39.2</td>
|
144 |
<td>44.2</td>
|
145 |
</tr>
|
146 |
<tr>
|
147 |
<td>BBH (3-shot)</td>
|
148 |
+
<td>54.3</td>
|
149 |
<td>51.3</td>
|
150 |
<td>50.2</td>
|
151 |
+
<td><b>59.7</b></td>
|
152 |
</tr>
|
153 |
<tr>
|
154 |
<td rowspan="4">CommonSense Understanding</td>
|
155 |
<td>PIQA (0-shot)</td>
|
156 |
+
<td><b>83.0</b></td>
|
157 |
+
<td>80.5</td>
|
158 |
+
<td>82.1</td>
|
159 |
+
<td>79.5</td>
|
160 |
</tr>
|
161 |
<tr>
|
162 |
<td>SciQ (0-shot)</td>
|
163 |
+
<td>97.1</td>
|
164 |
+
<td>95.2</td>
|
165 |
+
<td>95.2</td>
|
166 |
+
<td>93.5</td>
|
167 |
</tr>
|
168 |
<tr>
|
169 |
<td>Winogrande (0-shot)</td>
|
170 |
+
<td><b>74.2</b></td>
|
171 |
<td>72.7</td>
|
172 |
<td>73.2</td>
|
173 |
<td>73.6</td>
|
174 |
</tr>
|
175 |
<tr>
|
176 |
<td>OpenbookQA (0-shot)</td>
|
177 |
+
<td>47.2</td>
|
178 |
+
<td>45.2</td>
|
179 |
+
<td>47.2</td>
|
180 |
+
<td>45.0</td>
|
181 |
</tr>
|
182 |
</tbody>
|
183 |
</table>
|