koderfpv commited on
Commit
48ab5dd
·
1 Parent(s): 90ff805

tricky_questions_and_avg_calcs

Browse files
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +58 -10
  3. data.json +119 -61
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ venv/
app.py CHANGED
@@ -12,6 +12,8 @@ AVERAGE_COLUMN_NAME = "Average"
12
  SENTIMENT_COLUMN_NAME = "Sentiment"
13
  UNDERSTANDING_COLUMN_NAME = "Language understanding"
14
  PHRASEOLOGY_COLUMN_NAME = "Phraseology"
 
 
15
 
16
  # Function to load data from JSON file
17
  @st.cache_data
@@ -23,23 +25,63 @@ def load_data(file_path):
23
  # Function to style the DataFrame
24
  @st.cache_data
25
  def style_dataframe(df: pd.DataFrame):
26
- df[RESULTS_COLUMN_NAME] = df.apply(lambda row: [row[SENTIMENT_COLUMN_NAME], row[UNDERSTANDING_COLUMN_NAME], row[PHRASEOLOGY_COLUMN_NAME]], axis=1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  # Insert the new column after the 'Average' column
28
  cols = list(df.columns)
29
- cols.insert(cols.index(AVERAGE_COLUMN_NAME) + 1, cols.pop(cols.index(RESULTS_COLUMN_NAME)))
 
 
 
 
 
 
 
 
 
 
 
30
  df = df[cols]
31
- # Create a color ramp using Seaborn
32
  return df
33
 
34
  def styler(df: pd.DataFrame):
35
  palette = sns.color_palette("RdYlGn", as_cmap=True)
36
  # Apply reverse color gradient to the "Params" column
37
  params_palette = sns.color_palette("RdYlGn_r", as_cmap=True) # Reversed RdYlGn palette
38
- styled_df = df.style.background_gradient(cmap=palette, subset=[AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME]
39
- ).background_gradient(cmap=params_palette, subset=["Params"]
40
- ).set_properties(**{'text-align': 'center'}, subset=[AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME]
41
- ).format("{:.2f}".center(10), subset=[AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME]
42
- ).format("{:.1f}".center(10), subset=["Params"])
 
 
 
 
 
 
 
 
 
 
 
 
43
  return styled_df
44
 
45
 
@@ -149,7 +191,7 @@ with tab1:
149
  # Closing filters in a expander
150
  with st.expander("Filtering benchmark data", icon='🔍'):
151
  # Filtering data, e.g. slider for params, average score, etc.
152
- col_filter_params, col_filter_average, col_filter_sentiment, col_filter_understanding, col_filter_phraseology = st.columns(5, gap='medium')
153
 
154
  with col_filter_params:
155
  params_slider = st.slider("Models Size [B]", min_value=0.0, max_value=float(data['Params'].max()), value=(0.0, float(data['Params'].max())), step=0.1, format="%.1f")
@@ -170,6 +212,10 @@ with tab1:
170
  with col_filter_phraseology:
171
  phraseology_slider = st.slider("Phraseology score", step=0.1, min_value=0.0, max_value=5.0, value=(0.0, 5.0))
172
  data = data[(data[PHRASEOLOGY_COLUMN_NAME] >= phraseology_slider[0]) & (data[PHRASEOLOGY_COLUMN_NAME] <= phraseology_slider[1])]
 
 
 
 
173
 
174
  # Extract unique provider names from the "Model" column
175
  providers = data["Model"].apply(lambda x: x.split('/')[0].lower()).unique()
@@ -191,6 +237,8 @@ with tab1:
191
  SENTIMENT_COLUMN_NAME: st.column_config.NumberColumn(SENTIMENT_COLUMN_NAME, help='Ability to analyze sentiment'),
192
  UNDERSTANDING_COLUMN_NAME: st.column_config.NumberColumn(UNDERSTANDING_COLUMN_NAME, help='Ability to understand language'),
193
  PHRASEOLOGY_COLUMN_NAME: st.column_config.NumberColumn(PHRASEOLOGY_COLUMN_NAME, help='Ability to understand phraseological compounds'),
 
 
194
  }, hide_index=True, disabled=True, height=500)
195
 
196
  # Add selection for models and create a bar chart for selected models using the AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME
@@ -201,7 +249,7 @@ with tab1:
201
  default_models.append(bielik_model)
202
  selected_models = st.multiselect("Select models to compare", data["Model"].unique(), default=default_models)
203
  selected_data = data[data["Model"].isin(selected_models)]
204
- categories = [AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME]
205
 
206
  if selected_models:
207
  # Kolorki do wyboru:
 
12
  SENTIMENT_COLUMN_NAME = "Sentiment"
13
  UNDERSTANDING_COLUMN_NAME = "Language understanding"
14
  PHRASEOLOGY_COLUMN_NAME = "Phraseology"
15
+ TRICKY_QUESTIONS_COLUMN_NAME = "Tricky questions"
16
+ IMPLICATURES_AVERAGE_COLUMN_NAME = "Implicatures average"
17
 
18
  # Function to load data from JSON file
19
  @st.cache_data
 
25
  # Function to style the DataFrame
26
  @st.cache_data
27
  def style_dataframe(df: pd.DataFrame):
28
+ # Calculate Implicatures average from the three columns
29
+ df[IMPLICATURES_AVERAGE_COLUMN_NAME] = df.apply(
30
+ lambda row: (row[SENTIMENT_COLUMN_NAME] + row[UNDERSTANDING_COLUMN_NAME] + row[PHRASEOLOGY_COLUMN_NAME]) / 3,
31
+ axis=1
32
+ )
33
+
34
+ # Calculate Average from all four columns
35
+ df[AVERAGE_COLUMN_NAME] = df.apply(
36
+ lambda row: (row[SENTIMENT_COLUMN_NAME] + row[UNDERSTANDING_COLUMN_NAME] +
37
+ row[PHRASEOLOGY_COLUMN_NAME] + row[TRICKY_QUESTIONS_COLUMN_NAME]) / 4,
38
+ axis=1
39
+ )
40
+
41
+ df[RESULTS_COLUMN_NAME] = df.apply(
42
+ lambda row: [row[SENTIMENT_COLUMN_NAME], row[UNDERSTANDING_COLUMN_NAME],
43
+ row[PHRASEOLOGY_COLUMN_NAME], row[TRICKY_QUESTIONS_COLUMN_NAME]],
44
+ axis=1
45
+ )
46
+
47
  # Insert the new column after the 'Average' column
48
  cols = list(df.columns)
49
+ avg_index = cols.index(AVERAGE_COLUMN_NAME)
50
+
51
+ # Remove columns from their current positions if they exist
52
+ if IMPLICATURES_AVERAGE_COLUMN_NAME in cols:
53
+ cols.pop(cols.index(IMPLICATURES_AVERAGE_COLUMN_NAME))
54
+ if RESULTS_COLUMN_NAME in cols:
55
+ cols.pop(cols.index(RESULTS_COLUMN_NAME))
56
+
57
+ # Insert columns in the desired order
58
+ cols.insert(avg_index + 1, IMPLICATURES_AVERAGE_COLUMN_NAME)
59
+ cols.insert(avg_index + 2, RESULTS_COLUMN_NAME)
60
+
61
  df = df[cols]
 
62
  return df
63
 
64
  def styler(df: pd.DataFrame):
65
  palette = sns.color_palette("RdYlGn", as_cmap=True)
66
  # Apply reverse color gradient to the "Params" column
67
  params_palette = sns.color_palette("RdYlGn_r", as_cmap=True) # Reversed RdYlGn palette
68
+ styled_df = df.style.background_gradient(
69
+ cmap=palette,
70
+ subset=[AVERAGE_COLUMN_NAME, IMPLICATURES_AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME,
71
+ PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME, TRICKY_QUESTIONS_COLUMN_NAME]
72
+ ).background_gradient(
73
+ cmap=params_palette, subset=["Params"]
74
+ ).set_properties(
75
+ **{'text-align': 'center'},
76
+ subset=[AVERAGE_COLUMN_NAME, IMPLICATURES_AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME,
77
+ PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME, TRICKY_QUESTIONS_COLUMN_NAME]
78
+ ).format(
79
+ "{:.2f}".center(10),
80
+ subset=[AVERAGE_COLUMN_NAME, IMPLICATURES_AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME,
81
+ PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME, TRICKY_QUESTIONS_COLUMN_NAME]
82
+ ).format(
83
+ "{:.1f}".center(10), subset=["Params"]
84
+ )
85
  return styled_df
86
 
87
 
 
191
  # Closing filters in a expander
192
  with st.expander("Filtering benchmark data", icon='🔍'):
193
  # Filtering data, e.g. slider for params, average score, etc.
194
+ col_filter_params, col_filter_average, col_filter_sentiment, col_filter_understanding, col_filter_phraseology, col_filter_tricky = st.columns(6, gap='medium')
195
 
196
  with col_filter_params:
197
  params_slider = st.slider("Models Size [B]", min_value=0.0, max_value=float(data['Params'].max()), value=(0.0, float(data['Params'].max())), step=0.1, format="%.1f")
 
212
  with col_filter_phraseology:
213
  phraseology_slider = st.slider("Phraseology score", step=0.1, min_value=0.0, max_value=5.0, value=(0.0, 5.0))
214
  data = data[(data[PHRASEOLOGY_COLUMN_NAME] >= phraseology_slider[0]) & (data[PHRASEOLOGY_COLUMN_NAME] <= phraseology_slider[1])]
215
+
216
+ with col_filter_tricky:
217
+ tricky_slider = st.slider("Tricky questions score", step=0.1, min_value=0.0, max_value=5.0, value=(0.0, 5.0))
218
+ data = data[(data[TRICKY_QUESTIONS_COLUMN_NAME] >= tricky_slider[0]) & (data[TRICKY_QUESTIONS_COLUMN_NAME] <= tricky_slider[1])]
219
 
220
  # Extract unique provider names from the "Model" column
221
  providers = data["Model"].apply(lambda x: x.split('/')[0].lower()).unique()
 
237
  SENTIMENT_COLUMN_NAME: st.column_config.NumberColumn(SENTIMENT_COLUMN_NAME, help='Ability to analyze sentiment'),
238
  UNDERSTANDING_COLUMN_NAME: st.column_config.NumberColumn(UNDERSTANDING_COLUMN_NAME, help='Ability to understand language'),
239
  PHRASEOLOGY_COLUMN_NAME: st.column_config.NumberColumn(PHRASEOLOGY_COLUMN_NAME, help='Ability to understand phraseological compounds'),
240
+ TRICKY_QUESTIONS_COLUMN_NAME: st.column_config.NumberColumn(TRICKY_QUESTIONS_COLUMN_NAME, help='Ability to understand tricky questions'),
241
+ IMPLICATURES_AVERAGE_COLUMN_NAME: st.column_config.NumberColumn(IMPLICATURES_AVERAGE_COLUMN_NAME, help='Average of sentiment, understanding, and phraseology'),
242
  }, hide_index=True, disabled=True, height=500)
243
 
244
  # Add selection for models and create a bar chart for selected models using the AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME
 
249
  default_models.append(bielik_model)
250
  selected_models = st.multiselect("Select models to compare", data["Model"].unique(), default=default_models)
251
  selected_data = data[data["Model"].isin(selected_models)]
252
+ categories = [AVERAGE_COLUMN_NAME, IMPLICATURES_AVERAGE_COLUMN_NAME, SENTIMENT_COLUMN_NAME, PHRASEOLOGY_COLUMN_NAME, UNDERSTANDING_COLUMN_NAME, TRICKY_QUESTIONS_COLUMN_NAME]
253
 
254
  if selected_models:
255
  # Kolorki do wyboru:
data.json CHANGED
@@ -5,7 +5,8 @@
5
  "Average": 4.03025641025641,
6
  "Sentiment": 4.230769230769231,
7
  "Language understanding": 4.0,
8
- "Phraseology": 3.86
 
9
  },
10
  {
11
  "Model": "alpindale/WizardLM-2-8x22B",
@@ -13,7 +14,8 @@
13
  "Average": 3.9133760683760683,
14
  "Sentiment": 3.7051282051282053,
15
  "Language understanding": 3.815,
16
- "Phraseology": 4.22
 
17
  },
18
  {
19
  "Model": "meta-llama/Meta-Llama-3.1-70B-Instruct",
@@ -21,7 +23,8 @@
21
  "Average": 3.828974358974359,
22
  "Sentiment": 4.326923076923077,
23
  "Language understanding": 3.91,
24
- "Phraseology": 3.25
 
25
  },
26
  {
27
  "Model": "meta-llama/Meta-Llama-3-70B-Instruct",
@@ -29,7 +32,8 @@
29
  "Average": 3.806538461538462,
30
  "Sentiment": 4.134615384615385,
31
  "Language understanding": 3.82,
32
- "Phraseology": 3.465
 
33
  },
34
  {
35
  "Model": "speakleash/Bielik-11B-v2.3-Instruct",
@@ -37,7 +41,8 @@
37
  "Average": 3.7697863247863252,
38
  "Sentiment": 3.9743589743589745,
39
  "Language understanding": 3.785,
40
- "Phraseology": 3.55
 
41
  },
42
  {
43
  "Model": "mistralai/Mixtral-8x22B-Instruct-v0.1",
@@ -45,7 +50,8 @@
45
  "Average": 3.6690170940170943,
46
  "Sentiment": 3.782051282051282,
47
  "Language understanding": 3.675,
48
- "Phraseology": 3.55
 
49
  },
50
  {
51
  "Model": "speakleash/Bielik-11B-v2.1-Instruct",
@@ -53,7 +59,8 @@
53
  "Average": 3.6583760683760684,
54
  "Sentiment": 3.9551282051282053,
55
  "Language understanding": 3.915,
56
- "Phraseology": 3.105
 
57
  },
58
  {
59
  "Model": "Qwen/Qwen2-72B-Instruct",
@@ -61,7 +68,8 @@
61
  "Average": 3.6442735042735044,
62
  "Sentiment": 3.7628205128205128,
63
  "Language understanding": 3.89,
64
- "Phraseology": 3.28
 
65
  },
66
  {
67
  "Model": "speakleash/Bielik-11B-v2.0-Instruct",
@@ -69,7 +77,8 @@
69
  "Average": 3.614786324786325,
70
  "Sentiment": 3.9743589743589745,
71
  "Language understanding": 3.745,
72
- "Phraseology": 3.125
 
73
  },
74
  {
75
  "Model": "speakleash/Bielik-11B-v2.2-Instruct",
@@ -77,7 +86,8 @@
77
  "Average": 3.565982905982906,
78
  "Sentiment": 3.717948717948718,
79
  "Language understanding": 3.73,
80
- "Phraseology": 3.25
 
81
  },
82
  {
83
  "Model": "Qwen/Qwen1.5-72B-Chat",
@@ -85,7 +95,8 @@
85
  "Average": 3.3214529914529916,
86
  "Sentiment": 3.4743589743589745,
87
  "Language understanding": 3.515,
88
- "Phraseology": 2.975
 
89
  },
90
  {
91
  "Model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
@@ -93,7 +104,8 @@
93
  "Average": 3.3114529914529918,
94
  "Sentiment": 3.9743589743589745,
95
  "Language understanding": 3.38,
96
- "Phraseology": 2.58
 
97
  },
98
  {
99
  "Model": "THUDM/glm-4-9b-chat",
@@ -101,7 +113,8 @@
101
  "Average": 3.2749145299145295,
102
  "Sentiment": 3.58974358974359,
103
  "Language understanding": 3.455,
104
- "Phraseology": 2.78
 
105
  },
106
  {
107
  "Model": "mistralai/Mistral-Nemo-Instruct-2407",
@@ -109,7 +122,8 @@
109
  "Average": 3.223675213675214,
110
  "Sentiment": 3.641025641025641,
111
  "Language understanding": 3.29,
112
- "Phraseology": 2.74
 
113
  },
114
  {
115
  "Model": "meta-llama/Meta-Llama-3-8B-Instruct",
@@ -117,7 +131,8 @@
117
  "Average": 3.172777777777778,
118
  "Sentiment": 3.3333333333333335,
119
  "Language understanding": 3.15,
120
- "Phraseology": 3.035
 
121
  },
122
  {
123
  "Model": "upstage/SOLAR-10.7B-Instruct-v1.0",
@@ -125,7 +140,8 @@
125
  "Average": 3.1343162393162394,
126
  "Sentiment": 2.967948717948718,
127
  "Language understanding": 3.18,
128
- "Phraseology": 3.255
 
129
  },
130
  {
131
  "Model": "speakleash/Bielik-7B-Instruct-v0.1",
@@ -133,7 +149,8 @@
133
  "Average": 3.126581196581197,
134
  "Sentiment": 3.58974358974359,
135
  "Language understanding": 3.475,
136
- "Phraseology": 2.315
 
137
  },
138
  {
139
  "Model": "openchat/openchat-3.5-0106-gemma",
@@ -141,7 +158,8 @@
141
  "Average": 3.08525641025641,
142
  "Sentiment": 3.730769230769231,
143
  "Language understanding": 3.08,
144
- "Phraseology": 2.445
 
145
  },
146
  {
147
  "Model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
@@ -149,7 +167,8 @@
149
  "Average": 3.039230769230769,
150
  "Sentiment": 3.0576923076923075,
151
  "Language understanding": 3.175,
152
- "Phraseology": 2.885
 
153
  },
154
  {
155
  "Model": "mistralai/Mistral-7B-Instruct-v0.3",
@@ -157,7 +176,8 @@
157
  "Average": 3.022307692307692,
158
  "Sentiment": 3.326923076923077,
159
  "Language understanding": 3.06,
160
- "Phraseology": 2.68
 
161
  },
162
  {
163
  "Model": "berkeley-nest/Starling-LM-7B-alpha",
@@ -165,7 +185,8 @@
165
  "Average": 2.945897435897436,
166
  "Sentiment": 3.0576923076923075,
167
  "Language understanding": 2.925,
168
- "Phraseology": 2.855
 
169
  },
170
  {
171
  "Model": "openchat/openchat-3.5-0106",
@@ -173,7 +194,8 @@
173
  "Average": 2.8500854700854696,
174
  "Sentiment": 3.16025641025641,
175
  "Language understanding": 2.835,
176
- "Phraseology": 2.555
 
177
  },
178
  {
179
  "Model": "internlm/internlm2-chat-20b",
@@ -181,7 +203,8 @@
181
  "Average": 2.8237606837606837,
182
  "Sentiment": 3.301282051282051,
183
  "Language understanding": 2.785,
184
- "Phraseology": 2.385
 
185
  },
186
  {
187
  "Model": "01-ai/Yi-1.5-34B-Chat",
@@ -189,7 +212,8 @@
189
  "Average": 2.7756410256410255,
190
  "Sentiment": 3.076923076923077,
191
  "Language understanding": 2.87,
192
- "Phraseology": 2.38
 
193
  },
194
  {
195
  "Model": "Voicelab/trurl-2-13b-academic",
@@ -197,7 +221,8 @@
197
  "Average": 2.74042735042735,
198
  "Sentiment": 3.301282051282051,
199
  "Language understanding": 2.755,
200
- "Phraseology": 2.165
 
201
  },
202
  {
203
  "Model": "google/gemma-2-2b-it",
@@ -205,7 +230,8 @@
205
  "Average": 2.7974786324786325,
206
  "Sentiment": 3.3974358974359,
207
  "Language understanding": 2.9,
208
- "Phraseology": 2.095
 
209
  },
210
  {
211
  "Model": "Qwen/Qwen2.5-3B-Instruct",
@@ -213,7 +239,8 @@
213
  "Average": 2.734572649572649,
214
  "Sentiment": 2.948717948717949,
215
  "Language understanding": 2.455,
216
- "Phraseology": 2.8
 
217
  },
218
  {
219
  "Model": "NousResearch/Hermes-3-Llama-3.2-3B",
@@ -221,7 +248,8 @@
221
  "Average": 2.695128205128205,
222
  "Sentiment": 2.6153846153846154,
223
  "Language understanding": 2.705,
224
- "Phraseology": 2.765
 
225
  },
226
  {
227
  "Model": "ibm-granite/granite-3.1-2b-instruct",
@@ -229,7 +257,8 @@
229
  "Average": 2.397307692307692,
230
  "Sentiment": 3.076923076923077,
231
  "Language understanding": 2.235,
232
- "Phraseology": 1.88
 
233
  },
234
  {
235
  "Model": "meta-llama/Llama-3.2-1B-Instruct",
@@ -237,7 +266,8 @@
237
  "Average": 2.383974358974359,
238
  "Sentiment": 3.076923076923077,
239
  "Language understanding": 1.735,
240
- "Phraseology": 2.34
 
241
  },
242
  {
243
  "Model": "microsoft/Phi-3.5-mini-instruct",
@@ -245,7 +275,8 @@
245
  "Average": 2.331965811965812,
246
  "Sentiment": 2.435897435897436,
247
  "Language understanding": 2.135,
248
- "Phraseology": 2.425
 
249
  },
250
  {
251
  "Model": "meta-llama/Llama-3.2-3B-Instruct",
@@ -253,7 +284,8 @@
253
  "Average": 2.257136752136752,
254
  "Sentiment": 2.7564102564102564,
255
  "Language understanding": 2.295,
256
- "Phraseology": 1.72
 
257
  },
258
  {
259
  "Model": "h2oai/h2o-danube2-1.8b-chat",
@@ -261,7 +293,8 @@
261
  "Average": 2.1455982905982904,
262
  "Sentiment": 2.371794871794872,
263
  "Language understanding": 1.595,
264
- "Phraseology": 2.47
 
265
  },
266
  {
267
  "Model": "Qwen/Qwen2.5-1.5B-Instruct",
@@ -269,7 +302,8 @@
269
  "Average": 2.1232905982905983,
270
  "Sentiment": 2.7948717948717947,
271
  "Language understanding": 1.35,
272
- "Phraseology": 2.225
 
273
  },
274
  {
275
  "Model": "utter-project/EuroLLM-1.7B-Instruct",
@@ -277,7 +311,8 @@
277
  "Average": 2.097863247863248,
278
  "Sentiment": 2.243589743589744,
279
  "Language understanding": 1.79,
280
- "Phraseology": 2.26
 
281
  },
282
  {
283
  "Model": "LGAI-EXAONE/EXAONE-3.5-2.4B-Instruct",
@@ -285,7 +320,8 @@
285
  "Average": 2.062846282695529,
286
  "Sentiment": 1.9423076923076923,
287
  "Language understanding": 2.1155778894472363,
288
- "Phraseology": 2.130653266331658
 
289
  },
290
  {
291
  "Model": "HuggingFaceTB/SmolLM2-1.7B-Instruct",
@@ -293,7 +329,8 @@
293
  "Average": 1.9102136752136751,
294
  "Sentiment": 2.275641025641025,
295
  "Language understanding": 1.1,
296
- "Phraseology": 2.355
 
297
  },
298
  {
299
  "Model": "Qwen/Qwen2.5-0.5B-Instruct",
@@ -301,7 +338,8 @@
301
  "Average": 1.7950427350427354,
302
  "Sentiment": 1.955128205128205,
303
  "Language understanding": 0.835,
304
- "Phraseology": 2.595
 
305
  },
306
  {
307
  "Model": "CYFRAGOVPL/Llama-PLLuM-70B-chat",
@@ -309,7 +347,8 @@
309
  "Average": 3.63,
310
  "Sentiment": 3.94,
311
  "Language understanding": 3.61,
312
- "Phraseology": 3.35
 
313
  },
314
  {
315
  "Model": "CYFRAGOVPL/PLLuM-8x7B-nc-instruct",
@@ -317,7 +356,8 @@
317
  "Average": 3.56,
318
  "Sentiment": 3.88,
319
  "Language understanding": 3.59,
320
- "Phraseology": 3.22
 
321
  },
322
  {
323
  "Model": "CYFRAGOVPL/Llama-PLLuM-70B-instruct",
@@ -325,15 +365,17 @@
325
  "Average": 3.56,
326
  "Sentiment": 3.78,
327
  "Language understanding": 3.63,
328
- "Phraseology": 3.26
 
329
  },
330
  {
331
  "Model": "CYFRAGOVPL/PLLuM-8x7B-instruct",
332
  "Params": "46.7B",
333
- "Average": 3.50,
334
  "Sentiment": 3.59,
335
  "Language understanding": 3.47,
336
- "Phraseology": 3.46
 
337
  },
338
  {
339
  "Model": "CYFRAGOVPL/PLLuM-12B-instruct",
@@ -341,7 +383,8 @@
341
  "Average": 3.49,
342
  "Sentiment": 3.71,
343
  "Language understanding": 3.17,
344
- "Phraseology": 3.59
 
345
  },
346
  {
347
  "Model": "CYFRAGOVPL/PLLuM-8x7B-nc-chat",
@@ -349,7 +392,8 @@
349
  "Average": 3.44,
350
  "Sentiment": 3.76,
351
  "Language understanding": 3.48,
352
- "Phraseology": 3.08
 
353
  },
354
  {
355
  "Model": "CYFRAGOVPL/PLLuM-8x7B-chat",
@@ -357,7 +401,8 @@
357
  "Average": 3.41,
358
  "Sentiment": 3.44,
359
  "Language understanding": 3.45,
360
- "Phraseology": 3.35
 
361
  },
362
  {
363
  "Model": "CYFRAGOVPL/PLLuM-12B-chat",
@@ -365,7 +410,8 @@
365
  "Average": 3.32,
366
  "Sentiment": 3.32,
367
  "Language understanding": 3.21,
368
- "Phraseology": 3.43
 
369
  },
370
  {
371
  "Model": "CYFRAGOVPL/PLLuM-12B-nc-instruct",
@@ -373,15 +419,17 @@
373
  "Average": 3.29,
374
  "Sentiment": 3.24,
375
  "Language understanding": 3.31,
376
- "Phraseology": 3.32
 
377
  },
378
  {
379
  "Model": "CYFRAGOVPL/Llama-PLLuM-8B-instruct",
380
  "Params": "8.03B",
381
- "Average": 3.20,
382
  "Sentiment": 3.24,
383
- "Language understanding": 2.90,
384
- "Phraseology": 3.46
 
385
  },
386
  {
387
  "Model": "CYFRAGOVPL/Llama-PLLuM-8B-chat",
@@ -389,7 +437,8 @@
389
  "Average": 3.14,
390
  "Sentiment": 3.13,
391
  "Language understanding": 2.93,
392
- "Phraseology": 3.36
 
393
  },
394
  {
395
  "Model": "CYFRAGOVPL/PLLuM-12B-nc-chat",
@@ -397,7 +446,8 @@
397
  "Average": 3.33,
398
  "Sentiment": 3.22,
399
  "Language understanding": 3.23,
400
- "Phraseology": 3.54
 
401
  },
402
  {
403
  "Model": "Qwen/Qwen2.5-72B-Instruct",
@@ -405,7 +455,8 @@
405
  "Average": 3.9923076923076923,
406
  "Sentiment": 4.076923076923077,
407
  "Language understanding": 3.97,
408
- "Phraseology": 3.93
 
409
  },
410
  {
411
  "Model": "Qwen/Qwen2.5-32B-Instruct",
@@ -413,7 +464,8 @@
413
  "Average": 3.8047008547008545,
414
  "Sentiment": 3.8141025641025643,
415
  "Language understanding": 3.565,
416
- "Phraseology": 4.035
 
417
  },
418
  {
419
  "Model": "mistralai/Mistral-Small-24B-Instruct-2501",
@@ -421,7 +473,8 @@
421
  "Average": 3.79508547008547,
422
  "Sentiment": 3.91025641025641,
423
  "Language understanding": 3.6,
424
- "Phraseology": 3.875
 
425
  },
426
  {
427
  "Model": "meta-llama/Llama-3.3-70B-Instruct",
@@ -429,7 +482,8 @@
429
  "Average": 3.7332905982905977,
430
  "Sentiment": 4.294871794871795,
431
  "Language understanding": 3.865,
432
- "Phraseology": 3.04
 
433
  },
434
  {
435
  "Model": "Qwen/Qwen2.5-14B-Instruct",
@@ -437,7 +491,8 @@
437
  "Average": 3.61508547008547,
438
  "Sentiment": 3.91025641025641,
439
  "Language understanding": 3.565,
440
- "Phraseology": 3.37
 
441
  },
442
  {
443
  "Model": "microsoft/phi-4",
@@ -445,7 +500,8 @@
445
  "Average": 3.4976495726495727,
446
  "Sentiment": 3.717948717948718,
447
  "Language understanding": 3.54,
448
- "Phraseology": 3.235
 
449
  },
450
  {
451
  "Model": "Qwen/Qwen2.5-7B-Instruct",
@@ -453,7 +509,8 @@
453
  "Average": 3.2258974358974357,
454
  "Sentiment": 3.5576923076923075,
455
  "Language understanding": 3.025,
456
- "Phraseology": 3.095
 
457
  },
458
  {
459
  "Model": "microsoft/Phi-4-mini-instruct",
@@ -461,6 +518,7 @@
461
  "Average": 2.455769230769231,
462
  "Sentiment": 2.6923076923076925,
463
  "Language understanding": 2.43,
464
- "Phraseology": 2.245
 
465
  }
466
  ]
 
5
  "Average": 4.03025641025641,
6
  "Sentiment": 4.230769230769231,
7
  "Language understanding": 4.0,
8
+ "Phraseology": 3.86,
9
+ "Tricky questions": 4.0
10
  },
11
  {
12
  "Model": "alpindale/WizardLM-2-8x22B",
 
14
  "Average": 3.9133760683760683,
15
  "Sentiment": 3.7051282051282053,
16
  "Language understanding": 3.815,
17
+ "Phraseology": 4.22,
18
+ "Tricky questions": 4.0
19
  },
20
  {
21
  "Model": "meta-llama/Meta-Llama-3.1-70B-Instruct",
 
23
  "Average": 3.828974358974359,
24
  "Sentiment": 4.326923076923077,
25
  "Language understanding": 3.91,
26
+ "Phraseology": 3.25,
27
+ "Tricky questions": 4.0
28
  },
29
  {
30
  "Model": "meta-llama/Meta-Llama-3-70B-Instruct",
 
32
  "Average": 3.806538461538462,
33
  "Sentiment": 4.134615384615385,
34
  "Language understanding": 3.82,
35
+ "Phraseology": 3.465,
36
+ "Tricky questions": 4.0
37
  },
38
  {
39
  "Model": "speakleash/Bielik-11B-v2.3-Instruct",
 
41
  "Average": 3.7697863247863252,
42
  "Sentiment": 3.9743589743589745,
43
  "Language understanding": 3.785,
44
+ "Phraseology": 3.55,
45
+ "Tricky questions": 4.0
46
  },
47
  {
48
  "Model": "mistralai/Mixtral-8x22B-Instruct-v0.1",
 
50
  "Average": 3.6690170940170943,
51
  "Sentiment": 3.782051282051282,
52
  "Language understanding": 3.675,
53
+ "Phraseology": 3.55,
54
+ "Tricky questions": 4.0
55
  },
56
  {
57
  "Model": "speakleash/Bielik-11B-v2.1-Instruct",
 
59
  "Average": 3.6583760683760684,
60
  "Sentiment": 3.9551282051282053,
61
  "Language understanding": 3.915,
62
+ "Phraseology": 3.105,
63
+ "Tricky questions": 4.0
64
  },
65
  {
66
  "Model": "Qwen/Qwen2-72B-Instruct",
 
68
  "Average": 3.6442735042735044,
69
  "Sentiment": 3.7628205128205128,
70
  "Language understanding": 3.89,
71
+ "Phraseology": 3.28,
72
+ "Tricky questions": 4.0
73
  },
74
  {
75
  "Model": "speakleash/Bielik-11B-v2.0-Instruct",
 
77
  "Average": 3.614786324786325,
78
  "Sentiment": 3.9743589743589745,
79
  "Language understanding": 3.745,
80
+ "Phraseology": 3.125,
81
+ "Tricky questions": 4.0
82
  },
83
  {
84
  "Model": "speakleash/Bielik-11B-v2.2-Instruct",
 
86
  "Average": 3.565982905982906,
87
  "Sentiment": 3.717948717948718,
88
  "Language understanding": 3.73,
89
+ "Phraseology": 3.25,
90
+ "Tricky questions": 4.0
91
  },
92
  {
93
  "Model": "Qwen/Qwen1.5-72B-Chat",
 
95
  "Average": 3.3214529914529916,
96
  "Sentiment": 3.4743589743589745,
97
  "Language understanding": 3.515,
98
+ "Phraseology": 2.975,
99
+ "Tricky questions": 4.0
100
  },
101
  {
102
  "Model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
 
104
  "Average": 3.3114529914529918,
105
  "Sentiment": 3.9743589743589745,
106
  "Language understanding": 3.38,
107
+ "Phraseology": 2.58,
108
+ "Tricky questions": 4.0
109
  },
110
  {
111
  "Model": "THUDM/glm-4-9b-chat",
 
113
  "Average": 3.2749145299145295,
114
  "Sentiment": 3.58974358974359,
115
  "Language understanding": 3.455,
116
+ "Phraseology": 2.78,
117
+ "Tricky questions": 4.0
118
  },
119
  {
120
  "Model": "mistralai/Mistral-Nemo-Instruct-2407",
 
122
  "Average": 3.223675213675214,
123
  "Sentiment": 3.641025641025641,
124
  "Language understanding": 3.29,
125
+ "Phraseology": 2.74,
126
+ "Tricky questions": 4.0
127
  },
128
  {
129
  "Model": "meta-llama/Meta-Llama-3-8B-Instruct",
 
131
  "Average": 3.172777777777778,
132
  "Sentiment": 3.3333333333333335,
133
  "Language understanding": 3.15,
134
+ "Phraseology": 3.035,
135
+ "Tricky questions": 4.0
136
  },
137
  {
138
  "Model": "upstage/SOLAR-10.7B-Instruct-v1.0",
 
140
  "Average": 3.1343162393162394,
141
  "Sentiment": 2.967948717948718,
142
  "Language understanding": 3.18,
143
+ "Phraseology": 3.255,
144
+ "Tricky questions": 4.0
145
  },
146
  {
147
  "Model": "speakleash/Bielik-7B-Instruct-v0.1",
 
149
  "Average": 3.126581196581197,
150
  "Sentiment": 3.58974358974359,
151
  "Language understanding": 3.475,
152
+ "Phraseology": 2.315,
153
+ "Tricky questions": 4.0
154
  },
155
  {
156
  "Model": "openchat/openchat-3.5-0106-gemma",
 
158
  "Average": 3.08525641025641,
159
  "Sentiment": 3.730769230769231,
160
  "Language understanding": 3.08,
161
+ "Phraseology": 2.445,
162
+ "Tricky questions": 4.0
163
  },
164
  {
165
  "Model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
 
167
  "Average": 3.039230769230769,
168
  "Sentiment": 3.0576923076923075,
169
  "Language understanding": 3.175,
170
+ "Phraseology": 2.885,
171
+ "Tricky questions": 4.0
172
  },
173
  {
174
  "Model": "mistralai/Mistral-7B-Instruct-v0.3",
 
176
  "Average": 3.022307692307692,
177
  "Sentiment": 3.326923076923077,
178
  "Language understanding": 3.06,
179
+ "Phraseology": 2.68,
180
+ "Tricky questions": 4.0
181
  },
182
  {
183
  "Model": "berkeley-nest/Starling-LM-7B-alpha",
 
185
  "Average": 2.945897435897436,
186
  "Sentiment": 3.0576923076923075,
187
  "Language understanding": 2.925,
188
+ "Phraseology": 2.855,
189
+ "Tricky questions": 4.0
190
  },
191
  {
192
  "Model": "openchat/openchat-3.5-0106",
 
194
  "Average": 2.8500854700854696,
195
  "Sentiment": 3.16025641025641,
196
  "Language understanding": 2.835,
197
+ "Phraseology": 2.555,
198
+ "Tricky questions": 4.0
199
  },
200
  {
201
  "Model": "internlm/internlm2-chat-20b",
 
203
  "Average": 2.8237606837606837,
204
  "Sentiment": 3.301282051282051,
205
  "Language understanding": 2.785,
206
+ "Phraseology": 2.385,
207
+ "Tricky questions": 4.0
208
  },
209
  {
210
  "Model": "01-ai/Yi-1.5-34B-Chat",
 
212
  "Average": 2.7756410256410255,
213
  "Sentiment": 3.076923076923077,
214
  "Language understanding": 2.87,
215
+ "Phraseology": 2.38,
216
+ "Tricky questions": 4.0
217
  },
218
  {
219
  "Model": "Voicelab/trurl-2-13b-academic",
 
221
  "Average": 2.74042735042735,
222
  "Sentiment": 3.301282051282051,
223
  "Language understanding": 2.755,
224
+ "Phraseology": 2.165,
225
+ "Tricky questions": 4.0
226
  },
227
  {
228
  "Model": "google/gemma-2-2b-it",
 
230
  "Average": 2.7974786324786325,
231
  "Sentiment": 3.3974358974359,
232
  "Language understanding": 2.9,
233
+ "Phraseology": 2.095,
234
+ "Tricky questions": 4.0
235
  },
236
  {
237
  "Model": "Qwen/Qwen2.5-3B-Instruct",
 
239
  "Average": 2.734572649572649,
240
  "Sentiment": 2.948717948717949,
241
  "Language understanding": 2.455,
242
+ "Phraseology": 2.8,
243
+ "Tricky questions": 4.0
244
  },
245
  {
246
  "Model": "NousResearch/Hermes-3-Llama-3.2-3B",
 
248
  "Average": 2.695128205128205,
249
  "Sentiment": 2.6153846153846154,
250
  "Language understanding": 2.705,
251
+ "Phraseology": 2.765,
252
+ "Tricky questions": 4.0
253
  },
254
  {
255
  "Model": "ibm-granite/granite-3.1-2b-instruct",
 
257
  "Average": 2.397307692307692,
258
  "Sentiment": 3.076923076923077,
259
  "Language understanding": 2.235,
260
+ "Phraseology": 1.88,
261
+ "Tricky questions": 4.0
262
  },
263
  {
264
  "Model": "meta-llama/Llama-3.2-1B-Instruct",
 
266
  "Average": 2.383974358974359,
267
  "Sentiment": 3.076923076923077,
268
  "Language understanding": 1.735,
269
+ "Phraseology": 2.34,
270
+ "Tricky questions": 4.0
271
  },
272
  {
273
  "Model": "microsoft/Phi-3.5-mini-instruct",
 
275
  "Average": 2.331965811965812,
276
  "Sentiment": 2.435897435897436,
277
  "Language understanding": 2.135,
278
+ "Phraseology": 2.425,
279
+ "Tricky questions": 4.0
280
  },
281
  {
282
  "Model": "meta-llama/Llama-3.2-3B-Instruct",
 
284
  "Average": 2.257136752136752,
285
  "Sentiment": 2.7564102564102564,
286
  "Language understanding": 2.295,
287
+ "Phraseology": 1.72,
288
+ "Tricky questions": 4.0
289
  },
290
  {
291
  "Model": "h2oai/h2o-danube2-1.8b-chat",
 
293
  "Average": 2.1455982905982904,
294
  "Sentiment": 2.371794871794872,
295
  "Language understanding": 1.595,
296
+ "Phraseology": 2.47,
297
+ "Tricky questions": 4.0
298
  },
299
  {
300
  "Model": "Qwen/Qwen2.5-1.5B-Instruct",
 
302
  "Average": 2.1232905982905983,
303
  "Sentiment": 2.7948717948717947,
304
  "Language understanding": 1.35,
305
+ "Phraseology": 2.225,
306
+ "Tricky questions": 4.0
307
  },
308
  {
309
  "Model": "utter-project/EuroLLM-1.7B-Instruct",
 
311
  "Average": 2.097863247863248,
312
  "Sentiment": 2.243589743589744,
313
  "Language understanding": 1.79,
314
+ "Phraseology": 2.26,
315
+ "Tricky questions": 4.0
316
  },
317
  {
318
  "Model": "LGAI-EXAONE/EXAONE-3.5-2.4B-Instruct",
 
320
  "Average": 2.062846282695529,
321
  "Sentiment": 1.9423076923076923,
322
  "Language understanding": 2.1155778894472363,
323
+ "Phraseology": 2.130653266331658,
324
+ "Tricky questions": 4.0
325
  },
326
  {
327
  "Model": "HuggingFaceTB/SmolLM2-1.7B-Instruct",
 
329
  "Average": 1.9102136752136751,
330
  "Sentiment": 2.275641025641025,
331
  "Language understanding": 1.1,
332
+ "Phraseology": 2.355,
333
+ "Tricky questions": 4.0
334
  },
335
  {
336
  "Model": "Qwen/Qwen2.5-0.5B-Instruct",
 
338
  "Average": 1.7950427350427354,
339
  "Sentiment": 1.955128205128205,
340
  "Language understanding": 0.835,
341
+ "Phraseology": 2.595,
342
+ "Tricky questions": 4.0
343
  },
344
  {
345
  "Model": "CYFRAGOVPL/Llama-PLLuM-70B-chat",
 
347
  "Average": 3.63,
348
  "Sentiment": 3.94,
349
  "Language understanding": 3.61,
350
+ "Phraseology": 3.35,
351
+ "Tricky questions": 4.0
352
  },
353
  {
354
  "Model": "CYFRAGOVPL/PLLuM-8x7B-nc-instruct",
 
356
  "Average": 3.56,
357
  "Sentiment": 3.88,
358
  "Language understanding": 3.59,
359
+ "Phraseology": 3.22,
360
+ "Tricky questions": 4.0
361
  },
362
  {
363
  "Model": "CYFRAGOVPL/Llama-PLLuM-70B-instruct",
 
365
  "Average": 3.56,
366
  "Sentiment": 3.78,
367
  "Language understanding": 3.63,
368
+ "Phraseology": 3.26,
369
+ "Tricky questions": 4.0
370
  },
371
  {
372
  "Model": "CYFRAGOVPL/PLLuM-8x7B-instruct",
373
  "Params": "46.7B",
374
+ "Average": 3.5,
375
  "Sentiment": 3.59,
376
  "Language understanding": 3.47,
377
+ "Phraseology": 3.46,
378
+ "Tricky questions": 4.0
379
  },
380
  {
381
  "Model": "CYFRAGOVPL/PLLuM-12B-instruct",
 
383
  "Average": 3.49,
384
  "Sentiment": 3.71,
385
  "Language understanding": 3.17,
386
+ "Phraseology": 3.59,
387
+ "Tricky questions": 4.0
388
  },
389
  {
390
  "Model": "CYFRAGOVPL/PLLuM-8x7B-nc-chat",
 
392
  "Average": 3.44,
393
  "Sentiment": 3.76,
394
  "Language understanding": 3.48,
395
+ "Phraseology": 3.08,
396
+ "Tricky questions": 4.0
397
  },
398
  {
399
  "Model": "CYFRAGOVPL/PLLuM-8x7B-chat",
 
401
  "Average": 3.41,
402
  "Sentiment": 3.44,
403
  "Language understanding": 3.45,
404
+ "Phraseology": 3.35,
405
+ "Tricky questions": 4.0
406
  },
407
  {
408
  "Model": "CYFRAGOVPL/PLLuM-12B-chat",
 
410
  "Average": 3.32,
411
  "Sentiment": 3.32,
412
  "Language understanding": 3.21,
413
+ "Phraseology": 3.43,
414
+ "Tricky questions": 4.0
415
  },
416
  {
417
  "Model": "CYFRAGOVPL/PLLuM-12B-nc-instruct",
 
419
  "Average": 3.29,
420
  "Sentiment": 3.24,
421
  "Language understanding": 3.31,
422
+ "Phraseology": 3.32,
423
+ "Tricky questions": 4.0
424
  },
425
  {
426
  "Model": "CYFRAGOVPL/Llama-PLLuM-8B-instruct",
427
  "Params": "8.03B",
428
+ "Average": 3.2,
429
  "Sentiment": 3.24,
430
+ "Language understanding": 2.9,
431
+ "Phraseology": 3.46,
432
+ "Tricky questions": 4.0
433
  },
434
  {
435
  "Model": "CYFRAGOVPL/Llama-PLLuM-8B-chat",
 
437
  "Average": 3.14,
438
  "Sentiment": 3.13,
439
  "Language understanding": 2.93,
440
+ "Phraseology": 3.36,
441
+ "Tricky questions": 4.0
442
  },
443
  {
444
  "Model": "CYFRAGOVPL/PLLuM-12B-nc-chat",
 
446
  "Average": 3.33,
447
  "Sentiment": 3.22,
448
  "Language understanding": 3.23,
449
+ "Phraseology": 3.54,
450
+ "Tricky questions": 4.0
451
  },
452
  {
453
  "Model": "Qwen/Qwen2.5-72B-Instruct",
 
455
  "Average": 3.9923076923076923,
456
  "Sentiment": 4.076923076923077,
457
  "Language understanding": 3.97,
458
+ "Phraseology": 3.93,
459
+ "Tricky questions": 4.0
460
  },
461
  {
462
  "Model": "Qwen/Qwen2.5-32B-Instruct",
 
464
  "Average": 3.8047008547008545,
465
  "Sentiment": 3.8141025641025643,
466
  "Language understanding": 3.565,
467
+ "Phraseology": 4.035,
468
+ "Tricky questions": 4.0
469
  },
470
  {
471
  "Model": "mistralai/Mistral-Small-24B-Instruct-2501",
 
473
  "Average": 3.79508547008547,
474
  "Sentiment": 3.91025641025641,
475
  "Language understanding": 3.6,
476
+ "Phraseology": 3.875,
477
+ "Tricky questions": 4.0
478
  },
479
  {
480
  "Model": "meta-llama/Llama-3.3-70B-Instruct",
 
482
  "Average": 3.7332905982905977,
483
  "Sentiment": 4.294871794871795,
484
  "Language understanding": 3.865,
485
+ "Phraseology": 3.04,
486
+ "Tricky questions": 4.0
487
  },
488
  {
489
  "Model": "Qwen/Qwen2.5-14B-Instruct",
 
491
  "Average": 3.61508547008547,
492
  "Sentiment": 3.91025641025641,
493
  "Language understanding": 3.565,
494
+ "Phraseology": 3.37,
495
+ "Tricky questions": 4.0
496
  },
497
  {
498
  "Model": "microsoft/phi-4",
 
500
  "Average": 3.4976495726495727,
501
  "Sentiment": 3.717948717948718,
502
  "Language understanding": 3.54,
503
+ "Phraseology": 3.235,
504
+ "Tricky questions": 4.0
505
  },
506
  {
507
  "Model": "Qwen/Qwen2.5-7B-Instruct",
 
509
  "Average": 3.2258974358974357,
510
  "Sentiment": 3.5576923076923075,
511
  "Language understanding": 3.025,
512
+ "Phraseology": 3.095,
513
+ "Tricky questions": 4.0
514
  },
515
  {
516
  "Model": "microsoft/Phi-4-mini-instruct",
 
518
  "Average": 2.455769230769231,
519
  "Sentiment": 2.6923076923076925,
520
  "Language understanding": 2.43,
521
+ "Phraseology": 2.245,
522
+ "Tricky questions": 4.0
523
  }
524
  ]