barthfab commited on
Commit
c17af60
1 Parent(s): aa76947

Delete LeoLM/leo-mistral-hessianai-7b

Browse files
LeoLM/leo-mistral-hessianai-7b/results_2024_06_03T12-54-52.json DELETED
@@ -1,564 +0,0 @@
1
- {
2
- "config_general": {
3
- "lighteval_sha": "",
4
- "num_few_shot_default": 0,
5
- "num_fewshot_seeds": 1,
6
- "override_batch_size": "auto:6",
7
- "max_samples": "null",
8
- "job_id": "",
9
- "model_name": "LeoLM/leo-mistral-hessianai-7b",
10
- "model_sha": "",
11
- "model_dtype": "torch.bfloat16",
12
- "model_size": ""
13
- },
14
- "results": {
15
- "harness|hellaswag_fr|10": {
16
- "acc,none": 0.43671021632041124,
17
- "acc_stderr,none": 0.0051328542662624314,
18
- "acc_norm,none": 0.5965945598629256,
19
- "acc_norm_stderr,none": 0.005076996223223659,
20
- "alias": "hellaswag_fr"
21
- },
22
- "harness|hellaswag_es|10": {
23
- "acc,none": 0.44058032856838064,
24
- "acc_stderr,none": 0.005127930254173133,
25
- "acc_norm,none": 0.5971836995946235,
26
- "acc_norm_stderr,none": 0.005066034956620756,
27
- "alias": "hellaswag_es"
28
- },
29
- "harness|belebele_spa_Latn|5": {
30
- "acc,none": 0.6188888888888889,
31
- "acc_stderr,none": 0.016197660884519086,
32
- "acc_norm,none": 0.6188888888888889,
33
- "acc_norm_stderr,none": 0.016197660884519086,
34
- "alias": "belebele_spa_Latn"
35
- },
36
- "harness|truthfulqa_mc2_m_de|0": {
37
- "acc,none": 0.25253807106598986,
38
- "acc_stderr,none": 0.015487117806488176,
39
- "alias": "truthfulqa_mc2_m_de"
40
- },
41
- "harness|hellaswag_it|10": {
42
- "acc,none": 0.40998585880561295,
43
- "acc_stderr,none": 0.0051299205759801824,
44
- "acc_norm,none": 0.5574893941042097,
45
- "acc_norm_stderr,none": 0.005180541284770694,
46
- "alias": "hellaswag_it"
47
- },
48
- "harness|mmlu_m_de|5": {
49
- "acc,none": 0.4883089455423141,
50
- "acc_stderr,none": 0.004341388228400307,
51
- "alias": "mmlu_m_de"
52
- },
53
- "harness|truthfulqa_mc2_m_fr|0": {
54
- "acc,none": 0.2604828462515883,
55
- "acc_stderr,none": 0.01565497640803747,
56
- "alias": "truthfulqa_mc2_m_fr"
57
- },
58
- "harness|arc_challenge|25": {
59
- "acc,none": 0.48890784982935154,
60
- "acc_stderr,none": 0.014607794914013048,
61
- "acc_norm,none": 0.5221843003412969,
62
- "acc_norm_stderr,none": 0.014597001927076135,
63
- "alias": "arc_challenge"
64
- },
65
- "harness|hendrycksTest|5": {
66
- "acc,none": 0.5388121350235009,
67
- "acc_stderr,none": 0.12523575513464408,
68
- "alias": "mmlu"
69
- },
70
- "harness|hendrycksTest-humanities|5": {
71
- "acc,none": 0.5388121350235009,
72
- "acc_stderr,none": 0.12523575513464408,
73
- "alias": "mmlu"
74
- },
75
- "harness|hendrycksTest-formal_logic|5": {
76
- "acc,none": 0.5388121350235009,
77
- "acc_stderr,none": 0.12523575513464408,
78
- "alias": "mmlu"
79
- },
80
- "harness|hendrycksTest-high_school_european_history|5": {
81
- "acc,none": 0.5388121350235009,
82
- "acc_stderr,none": 0.12523575513464408,
83
- "alias": "mmlu"
84
- },
85
- "harness|hendrycksTest-high_school_us_history|5": {
86
- "acc,none": 0.5388121350235009,
87
- "acc_stderr,none": 0.12523575513464408,
88
- "alias": "mmlu"
89
- },
90
- "harness|hendrycksTest-high_school_world_history|5": {
91
- "acc,none": 0.5388121350235009,
92
- "acc_stderr,none": 0.12523575513464408,
93
- "alias": "mmlu"
94
- },
95
- "harness|hendrycksTest-international_law|5": {
96
- "acc,none": 0.5388121350235009,
97
- "acc_stderr,none": 0.12523575513464408,
98
- "alias": "mmlu"
99
- },
100
- "harness|hendrycksTest-jurisprudence|5": {
101
- "acc,none": 0.5388121350235009,
102
- "acc_stderr,none": 0.12523575513464408,
103
- "alias": "mmlu"
104
- },
105
- "harness|hendrycksTest-logical_fallacies|5": {
106
- "acc,none": 0.5388121350235009,
107
- "acc_stderr,none": 0.12523575513464408,
108
- "alias": "mmlu"
109
- },
110
- "harness|hendrycksTest-moral_disputes|5": {
111
- "acc,none": 0.5388121350235009,
112
- "acc_stderr,none": 0.12523575513464408,
113
- "alias": "mmlu"
114
- },
115
- "harness|hendrycksTest-moral_scenarios|5": {
116
- "acc,none": 0.5388121350235009,
117
- "acc_stderr,none": 0.12523575513464408,
118
- "alias": "mmlu"
119
- },
120
- "harness|hendrycksTest-philosophy|5": {
121
- "acc,none": 0.5388121350235009,
122
- "acc_stderr,none": 0.12523575513464408,
123
- "alias": "mmlu"
124
- },
125
- "harness|hendrycksTest-prehistory|5": {
126
- "acc,none": 0.5388121350235009,
127
- "acc_stderr,none": 0.12523575513464408,
128
- "alias": "mmlu"
129
- },
130
- "harness|hendrycksTest-professional_law|5": {
131
- "acc,none": 0.5388121350235009,
132
- "acc_stderr,none": 0.12523575513464408,
133
- "alias": "mmlu"
134
- },
135
- "harness|hendrycksTest-world_religions|5": {
136
- "acc,none": 0.5388121350235009,
137
- "acc_stderr,none": 0.12523575513464408,
138
- "alias": "mmlu"
139
- },
140
- "harness|hendrycksTest-other|5": {
141
- "acc,none": 0.5388121350235009,
142
- "acc_stderr,none": 0.12523575513464408,
143
- "alias": "mmlu"
144
- },
145
- "harness|hendrycksTest-business_ethics|5": {
146
- "acc,none": 0.5388121350235009,
147
- "acc_stderr,none": 0.12523575513464408,
148
- "alias": "mmlu"
149
- },
150
- "harness|hendrycksTest-clinical_knowledge|5": {
151
- "acc,none": 0.5388121350235009,
152
- "acc_stderr,none": 0.12523575513464408,
153
- "alias": "mmlu"
154
- },
155
- "harness|hendrycksTest-college_medicine|5": {
156
- "acc,none": 0.5388121350235009,
157
- "acc_stderr,none": 0.12523575513464408,
158
- "alias": "mmlu"
159
- },
160
- "harness|hendrycksTest-global_facts|5": {
161
- "acc,none": 0.5388121350235009,
162
- "acc_stderr,none": 0.12523575513464408,
163
- "alias": "mmlu"
164
- },
165
- "harness|hendrycksTest-human_aging|5": {
166
- "acc,none": 0.5388121350235009,
167
- "acc_stderr,none": 0.12523575513464408,
168
- "alias": "mmlu"
169
- },
170
- "harness|hendrycksTest-management|5": {
171
- "acc,none": 0.5388121350235009,
172
- "acc_stderr,none": 0.12523575513464408,
173
- "alias": "mmlu"
174
- },
175
- "harness|hendrycksTest-marketing|5": {
176
- "acc,none": 0.5388121350235009,
177
- "acc_stderr,none": 0.12523575513464408,
178
- "alias": "mmlu"
179
- },
180
- "harness|hendrycksTest-medical_genetics|5": {
181
- "acc,none": 0.5388121350235009,
182
- "acc_stderr,none": 0.12523575513464408,
183
- "alias": "mmlu"
184
- },
185
- "harness|hendrycksTest-miscellaneous|5": {
186
- "acc,none": 0.5388121350235009,
187
- "acc_stderr,none": 0.12523575513464408,
188
- "alias": "mmlu"
189
- },
190
- "harness|hendrycksTest-nutrition|5": {
191
- "acc,none": 0.5388121350235009,
192
- "acc_stderr,none": 0.12523575513464408,
193
- "alias": "mmlu"
194
- },
195
- "harness|hendrycksTest-professional_accounting|5": {
196
- "acc,none": 0.5388121350235009,
197
- "acc_stderr,none": 0.12523575513464408,
198
- "alias": "mmlu"
199
- },
200
- "harness|hendrycksTest-professional_medicine|5": {
201
- "acc,none": 0.5388121350235009,
202
- "acc_stderr,none": 0.12523575513464408,
203
- "alias": "mmlu"
204
- },
205
- "harness|hendrycksTest-virology|5": {
206
- "acc,none": 0.5388121350235009,
207
- "acc_stderr,none": 0.12523575513464408,
208
- "alias": "mmlu"
209
- },
210
- "harness|hendrycksTest-social_sciences|5": {
211
- "acc,none": 0.5388121350235009,
212
- "acc_stderr,none": 0.12523575513464408,
213
- "alias": "mmlu"
214
- },
215
- "harness|hendrycksTest-econometrics|5": {
216
- "acc,none": 0.5388121350235009,
217
- "acc_stderr,none": 0.12523575513464408,
218
- "alias": "mmlu"
219
- },
220
- "harness|hendrycksTest-high_school_geography|5": {
221
- "acc,none": 0.5388121350235009,
222
- "acc_stderr,none": 0.12523575513464408,
223
- "alias": "mmlu"
224
- },
225
- "harness|hendrycksTest-high_school_government_and_politics|5": {
226
- "acc,none": 0.5388121350235009,
227
- "acc_stderr,none": 0.12523575513464408,
228
- "alias": "mmlu"
229
- },
230
- "harness|hendrycksTest-high_school_macroeconomics|5": {
231
- "acc,none": 0.5388121350235009,
232
- "acc_stderr,none": 0.12523575513464408,
233
- "alias": "mmlu"
234
- },
235
- "harness|hendrycksTest-high_school_microeconomics|5": {
236
- "acc,none": 0.5388121350235009,
237
- "acc_stderr,none": 0.12523575513464408,
238
- "alias": "mmlu"
239
- },
240
- "harness|hendrycksTest-high_school_psychology|5": {
241
- "acc,none": 0.5388121350235009,
242
- "acc_stderr,none": 0.12523575513464408,
243
- "alias": "mmlu"
244
- },
245
- "harness|hendrycksTest-human_sexuality|5": {
246
- "acc,none": 0.5388121350235009,
247
- "acc_stderr,none": 0.12523575513464408,
248
- "alias": "mmlu"
249
- },
250
- "harness|hendrycksTest-professional_psychology|5": {
251
- "acc,none": 0.5388121350235009,
252
- "acc_stderr,none": 0.12523575513464408,
253
- "alias": "mmlu"
254
- },
255
- "harness|hendrycksTest-public_relations|5": {
256
- "acc,none": 0.5388121350235009,
257
- "acc_stderr,none": 0.12523575513464408,
258
- "alias": "mmlu"
259
- },
260
- "harness|hendrycksTest-security_studies|5": {
261
- "acc,none": 0.5388121350235009,
262
- "acc_stderr,none": 0.12523575513464408,
263
- "alias": "mmlu"
264
- },
265
- "harness|hendrycksTest-sociology|5": {
266
- "acc,none": 0.5388121350235009,
267
- "acc_stderr,none": 0.12523575513464408,
268
- "alias": "mmlu"
269
- },
270
- "harness|hendrycksTest-us_foreign_policy|5": {
271
- "acc,none": 0.5388121350235009,
272
- "acc_stderr,none": 0.12523575513464408,
273
- "alias": "mmlu"
274
- },
275
- "harness|hendrycksTest-stem|5": {
276
- "acc,none": 0.5388121350235009,
277
- "acc_stderr,none": 0.12523575513464408,
278
- "alias": "mmlu"
279
- },
280
- "harness|hendrycksTest-abstract_algebra|5": {
281
- "acc,none": 0.5388121350235009,
282
- "acc_stderr,none": 0.12523575513464408,
283
- "alias": "mmlu"
284
- },
285
- "harness|hendrycksTest-anatomy|5": {
286
- "acc,none": 0.5388121350235009,
287
- "acc_stderr,none": 0.12523575513464408,
288
- "alias": "mmlu"
289
- },
290
- "harness|hendrycksTest-astronomy|5": {
291
- "acc,none": 0.5388121350235009,
292
- "acc_stderr,none": 0.12523575513464408,
293
- "alias": "mmlu"
294
- },
295
- "harness|hendrycksTest-college_biology|5": {
296
- "acc,none": 0.5388121350235009,
297
- "acc_stderr,none": 0.12523575513464408,
298
- "alias": "mmlu"
299
- },
300
- "harness|hendrycksTest-college_chemistry|5": {
301
- "acc,none": 0.5388121350235009,
302
- "acc_stderr,none": 0.12523575513464408,
303
- "alias": "mmlu"
304
- },
305
- "harness|hendrycksTest-college_computer_science|5": {
306
- "acc,none": 0.5388121350235009,
307
- "acc_stderr,none": 0.12523575513464408,
308
- "alias": "mmlu"
309
- },
310
- "harness|hendrycksTest-college_mathematics|5": {
311
- "acc,none": 0.5388121350235009,
312
- "acc_stderr,none": 0.12523575513464408,
313
- "alias": "mmlu"
314
- },
315
- "harness|hendrycksTest-college_physics|5": {
316
- "acc,none": 0.5388121350235009,
317
- "acc_stderr,none": 0.12523575513464408,
318
- "alias": "mmlu"
319
- },
320
- "harness|hendrycksTest-computer_security|5": {
321
- "acc,none": 0.5388121350235009,
322
- "acc_stderr,none": 0.12523575513464408,
323
- "alias": "mmlu"
324
- },
325
- "harness|hendrycksTest-conceptual_physics|5": {
326
- "acc,none": 0.5388121350235009,
327
- "acc_stderr,none": 0.12523575513464408,
328
- "alias": "mmlu"
329
- },
330
- "harness|hendrycksTest-electrical_engineering|5": {
331
- "acc,none": 0.5388121350235009,
332
- "acc_stderr,none": 0.12523575513464408,
333
- "alias": "mmlu"
334
- },
335
- "harness|hendrycksTest-elementary_mathematics|5": {
336
- "acc,none": 0.5388121350235009,
337
- "acc_stderr,none": 0.12523575513464408,
338
- "alias": "mmlu"
339
- },
340
- "harness|hendrycksTest-high_school_biology|5": {
341
- "acc,none": 0.5388121350235009,
342
- "acc_stderr,none": 0.12523575513464408,
343
- "alias": "mmlu"
344
- },
345
- "harness|hendrycksTest-high_school_chemistry|5": {
346
- "acc,none": 0.5388121350235009,
347
- "acc_stderr,none": 0.12523575513464408,
348
- "alias": "mmlu"
349
- },
350
- "harness|hendrycksTest-high_school_computer_science|5": {
351
- "acc,none": 0.5388121350235009,
352
- "acc_stderr,none": 0.12523575513464408,
353
- "alias": "mmlu"
354
- },
355
- "harness|hendrycksTest-high_school_mathematics|5": {
356
- "acc,none": 0.5388121350235009,
357
- "acc_stderr,none": 0.12523575513464408,
358
- "alias": "mmlu"
359
- },
360
- "harness|hendrycksTest-high_school_physics|5": {
361
- "acc,none": 0.5388121350235009,
362
- "acc_stderr,none": 0.12523575513464408,
363
- "alias": "mmlu"
364
- },
365
- "harness|hendrycksTest-high_school_statistics|5": {
366
- "acc,none": 0.5388121350235009,
367
- "acc_stderr,none": 0.12523575513464408,
368
- "alias": "mmlu"
369
- },
370
- "harness|hendrycksTest-machine_learning|5": {
371
- "acc,none": 0.5388121350235009,
372
- "acc_stderr,none": 0.12523575513464408,
373
- "alias": "mmlu"
374
- },
375
- "harness|belebele_deu_Latn|5": {
376
- "acc,none": 0.6911111111111111,
377
- "acc_stderr,none": 0.015409737297257866,
378
- "acc_norm,none": 0.6911111111111111,
379
- "acc_norm_stderr,none": 0.015409737297257866,
380
- "alias": "belebele_deu_Latn"
381
- },
382
- "harness|truthfulqa_mc2_m_it|0": {
383
- "acc,none": 0.2413793103448276,
384
- "acc_stderr,none": 0.01530238012354208,
385
- "alias": "truthfulqa_mc2_m_it"
386
- },
387
- "harness|arc_challenge_m_es|25": {
388
- "acc,none": 0.39914529914529917,
389
- "acc_stderr,none": 0.014323296609432226,
390
- "acc_norm,none": 0.44358974358974357,
391
- "acc_norm_stderr,none": 0.014530516631461753,
392
- "alias": "arc_challenge_m_es"
393
- },
394
- "harness|hellaswag_de|10": {
395
- "acc,none": 0.5105678906917165,
396
- "acc_stderr,none": 0.005165028452585056,
397
- "acc_norm,none": 0.6821093082835183,
398
- "acc_norm_stderr,none": 0.004811335409067059,
399
- "alias": "hellaswag_de"
400
- },
401
- "harness|mmlu_m_fr|5": {
402
- "acc,none": 0.4328928271331449,
403
- "acc_stderr,none": 0.004330648564157281,
404
- "alias": "mmlu_m_fr"
405
- },
406
- "harness|belebele_fra_Latn|5": {
407
- "acc,none": 0.6222222222222222,
408
- "acc_stderr,none": 0.01617004083743038,
409
- "acc_norm,none": 0.6222222222222222,
410
- "acc_norm_stderr,none": 0.01617004083743038,
411
- "alias": "belebele_fra_Latn"
412
- },
413
- "harness|arc_challenge_m_fr|25": {
414
- "acc,none": 0.3849443969204448,
415
- "acc_stderr,none": 0.014237533846641056,
416
- "acc_norm,none": 0.437125748502994,
417
- "acc_norm_stderr,none": 0.0145140115491507,
418
- "alias": "arc_challenge_m_fr"
419
- },
420
- "harness|hellaswag|10": {
421
- "acc,none": 0.5785700059749054,
422
- "acc_stderr,none": 0.0049277900367266534,
423
- "acc_norm,none": 0.7778331009759012,
424
- "acc_norm_stderr,none": 0.00414853160898155,
425
- "alias": "hellaswag"
426
- },
427
- "harness|belebele_eng_Latn|5": {
428
- "acc,none": 0.7366666666666667,
429
- "acc_stderr,none": 0.01468955304734252,
430
- "acc_norm,none": 0.7366666666666667,
431
- "acc_norm_stderr,none": 0.01468955304734252,
432
- "alias": "belebele_eng_Latn"
433
- },
434
- "harness|arc_challenge_m_de|25": {
435
- "acc,none": 0.4448246364414029,
436
- "acc_stderr,none": 0.014540792856560819,
437
- "acc_norm,none": 0.47476475620188197,
438
- "acc_norm_stderr,none": 0.014611498054709703,
439
- "alias": "arc_challenge_m_de"
440
- },
441
- "harness|truthfulqa_mc2_m_es|0": {
442
- "acc,none": 0.2509505703422053,
443
- "acc_stderr,none": 0.015444939932767895,
444
- "alias": "truthfulqa_mc2_m_es"
445
- },
446
- "harness|arc_challenge_m_it|25": {
447
- "acc,none": 0.3669803250641574,
448
- "acc_stderr,none": 0.0141029047721974,
449
- "acc_norm,none": 0.4328485885372113,
450
- "acc_norm_stderr,none": 0.014497599232598588,
451
- "alias": "arc_challenge_m_it"
452
- },
453
- "harness|mmlu_m_it|5": {
454
- "acc,none": 0.43121553222029163,
455
- "acc_stderr,none": 0.004304697942055455,
456
- "alias": "mmlu_m_it"
457
- },
458
- "harness|mmlu_m_es|5": {
459
- "acc,none": 0.43602819859007047,
460
- "acc_stderr,none": 0.004294593314287693,
461
- "alias": "mmlu_m_es"
462
- },
463
- "harness|truthfulqa_mc2|0": {
464
- "acc,none": 0.429248126348752,
465
- "acc_stderr,none": 0.01423929114778046,
466
- "alias": "truthfulqa_mc2"
467
- },
468
- "harness|belebele_ita_Latn|5": {
469
- "acc,none": 0.5988888888888889,
470
- "acc_stderr,none": 0.016346531972046816,
471
- "acc_norm,none": 0.5988888888888889,
472
- "acc_norm_stderr,none": 0.016346531972046816,
473
- "alias": "belebele_ita_Latn"
474
- }
475
- },
476
- "versions": {
477
- "harness|hellaswag_fr|10": 1.0,
478
- "harness|hellaswag_es|10": 1.0,
479
- "harness|belebele_spa_Latn|5": 0.0,
480
- "harness|truthfulqa_mc2_m_de|0": "Yaml",
481
- "harness|hellaswag_it|10": 1.0,
482
- "harness|mmlu_m_de|5": "Yaml",
483
- "harness|truthfulqa_mc2_m_fr|0": "Yaml",
484
- "harness|arc_challenge|25": 1.0,
485
- "harness|hendrycksTest|5": "N/A",
486
- "harness|hendrycksTest-humanities|5": "N/A",
487
- "harness|hendrycksTest-formal_logic|5": "N/A",
488
- "harness|hendrycksTest-high_school_european_history|5": "N/A",
489
- "harness|hendrycksTest-high_school_us_history|5": "N/A",
490
- "harness|hendrycksTest-high_school_world_history|5": "N/A",
491
- "harness|hendrycksTest-international_law|5": "N/A",
492
- "harness|hendrycksTest-jurisprudence|5": "N/A",
493
- "harness|hendrycksTest-logical_fallacies|5": "N/A",
494
- "harness|hendrycksTest-moral_disputes|5": "N/A",
495
- "harness|hendrycksTest-moral_scenarios|5": "N/A",
496
- "harness|hendrycksTest-philosophy|5": "N/A",
497
- "harness|hendrycksTest-prehistory|5": "N/A",
498
- "harness|hendrycksTest-professional_law|5": "N/A",
499
- "harness|hendrycksTest-world_religions|5": "N/A",
500
- "harness|hendrycksTest-other|5": "N/A",
501
- "harness|hendrycksTest-business_ethics|5": "N/A",
502
- "harness|hendrycksTest-clinical_knowledge|5": "N/A",
503
- "harness|hendrycksTest-college_medicine|5": "N/A",
504
- "harness|hendrycksTest-global_facts|5": "N/A",
505
- "harness|hendrycksTest-human_aging|5": "N/A",
506
- "harness|hendrycksTest-management|5": "N/A",
507
- "harness|hendrycksTest-marketing|5": "N/A",
508
- "harness|hendrycksTest-medical_genetics|5": "N/A",
509
- "harness|hendrycksTest-miscellaneous|5": "N/A",
510
- "harness|hendrycksTest-nutrition|5": "N/A",
511
- "harness|hendrycksTest-professional_accounting|5": "N/A",
512
- "harness|hendrycksTest-professional_medicine|5": "N/A",
513
- "harness|hendrycksTest-virology|5": "N/A",
514
- "harness|hendrycksTest-social_sciences|5": "N/A",
515
- "harness|hendrycksTest-econometrics|5": "N/A",
516
- "harness|hendrycksTest-high_school_geography|5": "N/A",
517
- "harness|hendrycksTest-high_school_government_and_politics|5": "N/A",
518
- "harness|hendrycksTest-high_school_macroeconomics|5": "N/A",
519
- "harness|hendrycksTest-high_school_microeconomics|5": "N/A",
520
- "harness|hendrycksTest-high_school_psychology|5": "N/A",
521
- "harness|hendrycksTest-human_sexuality|5": "N/A",
522
- "harness|hendrycksTest-professional_psychology|5": "N/A",
523
- "harness|hendrycksTest-public_relations|5": "N/A",
524
- "harness|hendrycksTest-security_studies|5": "N/A",
525
- "harness|hendrycksTest-sociology|5": "N/A",
526
- "harness|hendrycksTest-us_foreign_policy|5": "N/A",
527
- "harness|hendrycksTest-stem|5": "N/A",
528
- "harness|hendrycksTest-abstract_algebra|5": "N/A",
529
- "harness|hendrycksTest-anatomy|5": "N/A",
530
- "harness|hendrycksTest-astronomy|5": "N/A",
531
- "harness|hendrycksTest-college_biology|5": "N/A",
532
- "harness|hendrycksTest-college_chemistry|5": "N/A",
533
- "harness|hendrycksTest-college_computer_science|5": "N/A",
534
- "harness|hendrycksTest-college_mathematics|5": "N/A",
535
- "harness|hendrycksTest-college_physics|5": "N/A",
536
- "harness|hendrycksTest-computer_security|5": "N/A",
537
- "harness|hendrycksTest-conceptual_physics|5": "N/A",
538
- "harness|hendrycksTest-electrical_engineering|5": "N/A",
539
- "harness|hendrycksTest-elementary_mathematics|5": "N/A",
540
- "harness|hendrycksTest-high_school_biology|5": "N/A",
541
- "harness|hendrycksTest-high_school_chemistry|5": "N/A",
542
- "harness|hendrycksTest-high_school_computer_science|5": "N/A",
543
- "harness|hendrycksTest-high_school_mathematics|5": "N/A",
544
- "harness|hendrycksTest-high_school_physics|5": "N/A",
545
- "harness|hendrycksTest-high_school_statistics|5": "N/A",
546
- "harness|hendrycksTest-machine_learning|5": "N/A",
547
- "harness|belebele_deu_Latn|5": 0.0,
548
- "harness|truthfulqa_mc2_m_it|0": "Yaml",
549
- "harness|arc_challenge_m_es|25": 1.0,
550
- "harness|hellaswag_de|10": 1.0,
551
- "harness|mmlu_m_fr|5": "Yaml",
552
- "harness|belebele_fra_Latn|5": 0.0,
553
- "harness|arc_challenge_m_fr|25": 1.0,
554
- "harness|hellaswag|10": 1.0,
555
- "harness|belebele_eng_Latn|5": 0.0,
556
- "harness|arc_challenge_m_de|25": 1.0,
557
- "harness|truthfulqa_mc2_m_es|0": "Yaml",
558
- "harness|arc_challenge_m_it|25": 1.0,
559
- "harness|mmlu_m_it|5": "Yaml",
560
- "harness|mmlu_m_es|5": "Yaml",
561
- "harness|truthfulqa_mc2|0": 2.0,
562
- "harness|belebele_ita_Latn|5": 0.0
563
- }
564
- }