barthfab commited on
Commit
10bf4d8
1 Parent(s): 7231ccf

Delete croissantllm/CroissantLLMChat-v0.1

Browse files
croissantllm/CroissantLLMChat-v0.1/results_2024_05_31T12-01-50.json DELETED
@@ -1,564 +0,0 @@
1
- {
2
- "config_general": {
3
- "lighteval_sha": "",
4
- "num_few_shot_default": 0,
5
- "num_fewshot_seeds": 1,
6
- "override_batch_size": "auto:6",
7
- "max_samples": "null",
8
- "job_id": "",
9
- "model_name": "croissantllm/CroissantLLMChat-v0.1",
10
- "model_sha": "",
11
- "model_dtype": "torch.bfloat16",
12
- "model_size": ""
13
- },
14
- "results": {
15
- "harness|hellaswag_it|10": {
16
- "acc,none": 0.30218644620907215,
17
- "acc_stderr,none": 0.004789632401308303,
18
- "acc_norm,none": 0.34439247253344935,
19
- "acc_norm_stderr,none": 0.004956142479858007,
20
- "alias": "hellaswag_it"
21
- },
22
- "harness|belebele_spa_Latn|5": {
23
- "acc,none": 0.25666666666666665,
24
- "acc_stderr,none": 0.014567891342380039,
25
- "acc_norm,none": 0.25666666666666665,
26
- "acc_norm_stderr,none": 0.014567891342380039,
27
- "alias": "belebele_spa_Latn"
28
- },
29
- "harness|arc_challenge_m_it|25": {
30
- "acc,none": 0.22583404619332764,
31
- "acc_stderr,none": 0.012234615366330675,
32
- "acc_norm,none": 0.262617621899059,
33
- "acc_norm_stderr,none": 0.012876175520452837,
34
- "alias": "arc_challenge_m_it"
35
- },
36
- "harness|arc_challenge_m_fr|25": {
37
- "acc,none": 0.2840034217279726,
38
- "acc_stderr,none": 0.013194588131940972,
39
- "acc_norm,none": 0.32677502138579984,
40
- "acc_norm_stderr,none": 0.013724076021999806,
41
- "alias": "arc_challenge_m_fr"
42
- },
43
- "harness|belebele_deu_Latn|5": {
44
- "acc,none": 0.2388888888888889,
45
- "acc_stderr,none": 0.0142213937312762,
46
- "acc_norm,none": 0.2388888888888889,
47
- "acc_norm_stderr,none": 0.0142213937312762,
48
- "alias": "belebele_deu_Latn"
49
- },
50
- "harness|hellaswag_es|10": {
51
- "acc,none": 0.32099423938553445,
52
- "acc_stderr,none": 0.0048222091210664165,
53
- "acc_norm,none": 0.36921271602304245,
54
- "acc_norm_stderr,none": 0.004984716707710809,
55
- "alias": "hellaswag_es"
56
- },
57
- "harness|arc_challenge_m_de|25": {
58
- "acc,none": 0.19931565440547477,
59
- "acc_stderr,none": 0.011689069809394484,
60
- "acc_norm,none": 0.2446535500427716,
61
- "acc_norm_stderr,none": 0.01257845892181575,
62
- "alias": "arc_challenge_m_de"
63
- },
64
- "harness|belebele_ita_Latn|5": {
65
- "acc,none": 0.2911111111111111,
66
- "acc_stderr,none": 0.015150906906440088,
67
- "acc_norm,none": 0.2911111111111111,
68
- "acc_norm_stderr,none": 0.015150906906440088,
69
- "alias": "belebele_ita_Latn"
70
- },
71
- "harness|truthfulqa_mc2_m_de|0": {
72
- "acc,none": 0.20558375634517767,
73
- "acc_stderr,none": 0.014405591330836916,
74
- "alias": "truthfulqa_mc2_m_de"
75
- },
76
- "harness|hendrycksTest|5": {
77
- "acc,none": 0.25081897165645917,
78
- "acc_stderr,none": 0.03887949265403574,
79
- "alias": "mmlu"
80
- },
81
- "harness|hendrycksTest-humanities|5": {
82
- "acc,none": 0.25081897165645917,
83
- "acc_stderr,none": 0.03887949265403574,
84
- "alias": "mmlu"
85
- },
86
- "harness|hendrycksTest-formal_logic|5": {
87
- "acc,none": 0.25081897165645917,
88
- "acc_stderr,none": 0.03887949265403574,
89
- "alias": "mmlu"
90
- },
91
- "harness|hendrycksTest-high_school_european_history|5": {
92
- "acc,none": 0.25081897165645917,
93
- "acc_stderr,none": 0.03887949265403574,
94
- "alias": "mmlu"
95
- },
96
- "harness|hendrycksTest-high_school_us_history|5": {
97
- "acc,none": 0.25081897165645917,
98
- "acc_stderr,none": 0.03887949265403574,
99
- "alias": "mmlu"
100
- },
101
- "harness|hendrycksTest-high_school_world_history|5": {
102
- "acc,none": 0.25081897165645917,
103
- "acc_stderr,none": 0.03887949265403574,
104
- "alias": "mmlu"
105
- },
106
- "harness|hendrycksTest-international_law|5": {
107
- "acc,none": 0.25081897165645917,
108
- "acc_stderr,none": 0.03887949265403574,
109
- "alias": "mmlu"
110
- },
111
- "harness|hendrycksTest-jurisprudence|5": {
112
- "acc,none": 0.25081897165645917,
113
- "acc_stderr,none": 0.03887949265403574,
114
- "alias": "mmlu"
115
- },
116
- "harness|hendrycksTest-logical_fallacies|5": {
117
- "acc,none": 0.25081897165645917,
118
- "acc_stderr,none": 0.03887949265403574,
119
- "alias": "mmlu"
120
- },
121
- "harness|hendrycksTest-moral_disputes|5": {
122
- "acc,none": 0.25081897165645917,
123
- "acc_stderr,none": 0.03887949265403574,
124
- "alias": "mmlu"
125
- },
126
- "harness|hendrycksTest-moral_scenarios|5": {
127
- "acc,none": 0.25081897165645917,
128
- "acc_stderr,none": 0.03887949265403574,
129
- "alias": "mmlu"
130
- },
131
- "harness|hendrycksTest-philosophy|5": {
132
- "acc,none": 0.25081897165645917,
133
- "acc_stderr,none": 0.03887949265403574,
134
- "alias": "mmlu"
135
- },
136
- "harness|hendrycksTest-prehistory|5": {
137
- "acc,none": 0.25081897165645917,
138
- "acc_stderr,none": 0.03887949265403574,
139
- "alias": "mmlu"
140
- },
141
- "harness|hendrycksTest-professional_law|5": {
142
- "acc,none": 0.25081897165645917,
143
- "acc_stderr,none": 0.03887949265403574,
144
- "alias": "mmlu"
145
- },
146
- "harness|hendrycksTest-world_religions|5": {
147
- "acc,none": 0.25081897165645917,
148
- "acc_stderr,none": 0.03887949265403574,
149
- "alias": "mmlu"
150
- },
151
- "harness|hendrycksTest-other|5": {
152
- "acc,none": 0.25081897165645917,
153
- "acc_stderr,none": 0.03887949265403574,
154
- "alias": "mmlu"
155
- },
156
- "harness|hendrycksTest-business_ethics|5": {
157
- "acc,none": 0.25081897165645917,
158
- "acc_stderr,none": 0.03887949265403574,
159
- "alias": "mmlu"
160
- },
161
- "harness|hendrycksTest-clinical_knowledge|5": {
162
- "acc,none": 0.25081897165645917,
163
- "acc_stderr,none": 0.03887949265403574,
164
- "alias": "mmlu"
165
- },
166
- "harness|hendrycksTest-college_medicine|5": {
167
- "acc,none": 0.25081897165645917,
168
- "acc_stderr,none": 0.03887949265403574,
169
- "alias": "mmlu"
170
- },
171
- "harness|hendrycksTest-global_facts|5": {
172
- "acc,none": 0.25081897165645917,
173
- "acc_stderr,none": 0.03887949265403574,
174
- "alias": "mmlu"
175
- },
176
- "harness|hendrycksTest-human_aging|5": {
177
- "acc,none": 0.25081897165645917,
178
- "acc_stderr,none": 0.03887949265403574,
179
- "alias": "mmlu"
180
- },
181
- "harness|hendrycksTest-management|5": {
182
- "acc,none": 0.25081897165645917,
183
- "acc_stderr,none": 0.03887949265403574,
184
- "alias": "mmlu"
185
- },
186
- "harness|hendrycksTest-marketing|5": {
187
- "acc,none": 0.25081897165645917,
188
- "acc_stderr,none": 0.03887949265403574,
189
- "alias": "mmlu"
190
- },
191
- "harness|hendrycksTest-medical_genetics|5": {
192
- "acc,none": 0.25081897165645917,
193
- "acc_stderr,none": 0.03887949265403574,
194
- "alias": "mmlu"
195
- },
196
- "harness|hendrycksTest-miscellaneous|5": {
197
- "acc,none": 0.25081897165645917,
198
- "acc_stderr,none": 0.03887949265403574,
199
- "alias": "mmlu"
200
- },
201
- "harness|hendrycksTest-nutrition|5": {
202
- "acc,none": 0.25081897165645917,
203
- "acc_stderr,none": 0.03887949265403574,
204
- "alias": "mmlu"
205
- },
206
- "harness|hendrycksTest-professional_accounting|5": {
207
- "acc,none": 0.25081897165645917,
208
- "acc_stderr,none": 0.03887949265403574,
209
- "alias": "mmlu"
210
- },
211
- "harness|hendrycksTest-professional_medicine|5": {
212
- "acc,none": 0.25081897165645917,
213
- "acc_stderr,none": 0.03887949265403574,
214
- "alias": "mmlu"
215
- },
216
- "harness|hendrycksTest-virology|5": {
217
- "acc,none": 0.25081897165645917,
218
- "acc_stderr,none": 0.03887949265403574,
219
- "alias": "mmlu"
220
- },
221
- "harness|hendrycksTest-social_sciences|5": {
222
- "acc,none": 0.25081897165645917,
223
- "acc_stderr,none": 0.03887949265403574,
224
- "alias": "mmlu"
225
- },
226
- "harness|hendrycksTest-econometrics|5": {
227
- "acc,none": 0.25081897165645917,
228
- "acc_stderr,none": 0.03887949265403574,
229
- "alias": "mmlu"
230
- },
231
- "harness|hendrycksTest-high_school_geography|5": {
232
- "acc,none": 0.25081897165645917,
233
- "acc_stderr,none": 0.03887949265403574,
234
- "alias": "mmlu"
235
- },
236
- "harness|hendrycksTest-high_school_government_and_politics|5": {
237
- "acc,none": 0.25081897165645917,
238
- "acc_stderr,none": 0.03887949265403574,
239
- "alias": "mmlu"
240
- },
241
- "harness|hendrycksTest-high_school_macroeconomics|5": {
242
- "acc,none": 0.25081897165645917,
243
- "acc_stderr,none": 0.03887949265403574,
244
- "alias": "mmlu"
245
- },
246
- "harness|hendrycksTest-high_school_microeconomics|5": {
247
- "acc,none": 0.25081897165645917,
248
- "acc_stderr,none": 0.03887949265403574,
249
- "alias": "mmlu"
250
- },
251
- "harness|hendrycksTest-high_school_psychology|5": {
252
- "acc,none": 0.25081897165645917,
253
- "acc_stderr,none": 0.03887949265403574,
254
- "alias": "mmlu"
255
- },
256
- "harness|hendrycksTest-human_sexuality|5": {
257
- "acc,none": 0.25081897165645917,
258
- "acc_stderr,none": 0.03887949265403574,
259
- "alias": "mmlu"
260
- },
261
- "harness|hendrycksTest-professional_psychology|5": {
262
- "acc,none": 0.25081897165645917,
263
- "acc_stderr,none": 0.03887949265403574,
264
- "alias": "mmlu"
265
- },
266
- "harness|hendrycksTest-public_relations|5": {
267
- "acc,none": 0.25081897165645917,
268
- "acc_stderr,none": 0.03887949265403574,
269
- "alias": "mmlu"
270
- },
271
- "harness|hendrycksTest-security_studies|5": {
272
- "acc,none": 0.25081897165645917,
273
- "acc_stderr,none": 0.03887949265403574,
274
- "alias": "mmlu"
275
- },
276
- "harness|hendrycksTest-sociology|5": {
277
- "acc,none": 0.25081897165645917,
278
- "acc_stderr,none": 0.03887949265403574,
279
- "alias": "mmlu"
280
- },
281
- "harness|hendrycksTest-us_foreign_policy|5": {
282
- "acc,none": 0.25081897165645917,
283
- "acc_stderr,none": 0.03887949265403574,
284
- "alias": "mmlu"
285
- },
286
- "harness|hendrycksTest-stem|5": {
287
- "acc,none": 0.25081897165645917,
288
- "acc_stderr,none": 0.03887949265403574,
289
- "alias": "mmlu"
290
- },
291
- "harness|hendrycksTest-abstract_algebra|5": {
292
- "acc,none": 0.25081897165645917,
293
- "acc_stderr,none": 0.03887949265403574,
294
- "alias": "mmlu"
295
- },
296
- "harness|hendrycksTest-anatomy|5": {
297
- "acc,none": 0.25081897165645917,
298
- "acc_stderr,none": 0.03887949265403574,
299
- "alias": "mmlu"
300
- },
301
- "harness|hendrycksTest-astronomy|5": {
302
- "acc,none": 0.25081897165645917,
303
- "acc_stderr,none": 0.03887949265403574,
304
- "alias": "mmlu"
305
- },
306
- "harness|hendrycksTest-college_biology|5": {
307
- "acc,none": 0.25081897165645917,
308
- "acc_stderr,none": 0.03887949265403574,
309
- "alias": "mmlu"
310
- },
311
- "harness|hendrycksTest-college_chemistry|5": {
312
- "acc,none": 0.25081897165645917,
313
- "acc_stderr,none": 0.03887949265403574,
314
- "alias": "mmlu"
315
- },
316
- "harness|hendrycksTest-college_computer_science|5": {
317
- "acc,none": 0.25081897165645917,
318
- "acc_stderr,none": 0.03887949265403574,
319
- "alias": "mmlu"
320
- },
321
- "harness|hendrycksTest-college_mathematics|5": {
322
- "acc,none": 0.25081897165645917,
323
- "acc_stderr,none": 0.03887949265403574,
324
- "alias": "mmlu"
325
- },
326
- "harness|hendrycksTest-college_physics|5": {
327
- "acc,none": 0.25081897165645917,
328
- "acc_stderr,none": 0.03887949265403574,
329
- "alias": "mmlu"
330
- },
331
- "harness|hendrycksTest-computer_security|5": {
332
- "acc,none": 0.25081897165645917,
333
- "acc_stderr,none": 0.03887949265403574,
334
- "alias": "mmlu"
335
- },
336
- "harness|hendrycksTest-conceptual_physics|5": {
337
- "acc,none": 0.25081897165645917,
338
- "acc_stderr,none": 0.03887949265403574,
339
- "alias": "mmlu"
340
- },
341
- "harness|hendrycksTest-electrical_engineering|5": {
342
- "acc,none": 0.25081897165645917,
343
- "acc_stderr,none": 0.03887949265403574,
344
- "alias": "mmlu"
345
- },
346
- "harness|hendrycksTest-elementary_mathematics|5": {
347
- "acc,none": 0.25081897165645917,
348
- "acc_stderr,none": 0.03887949265403574,
349
- "alias": "mmlu"
350
- },
351
- "harness|hendrycksTest-high_school_biology|5": {
352
- "acc,none": 0.25081897165645917,
353
- "acc_stderr,none": 0.03887949265403574,
354
- "alias": "mmlu"
355
- },
356
- "harness|hendrycksTest-high_school_chemistry|5": {
357
- "acc,none": 0.25081897165645917,
358
- "acc_stderr,none": 0.03887949265403574,
359
- "alias": "mmlu"
360
- },
361
- "harness|hendrycksTest-high_school_computer_science|5": {
362
- "acc,none": 0.25081897165645917,
363
- "acc_stderr,none": 0.03887949265403574,
364
- "alias": "mmlu"
365
- },
366
- "harness|hendrycksTest-high_school_mathematics|5": {
367
- "acc,none": 0.25081897165645917,
368
- "acc_stderr,none": 0.03887949265403574,
369
- "alias": "mmlu"
370
- },
371
- "harness|hendrycksTest-high_school_physics|5": {
372
- "acc,none": 0.25081897165645917,
373
- "acc_stderr,none": 0.03887949265403574,
374
- "alias": "mmlu"
375
- },
376
- "harness|hendrycksTest-high_school_statistics|5": {
377
- "acc,none": 0.25081897165645917,
378
- "acc_stderr,none": 0.03887949265403574,
379
- "alias": "mmlu"
380
- },
381
- "harness|hendrycksTest-machine_learning|5": {
382
- "acc,none": 0.25081897165645917,
383
- "acc_stderr,none": 0.03887949265403574,
384
- "alias": "mmlu"
385
- },
386
- "harness|arc_challenge|25": {
387
- "acc,none": 0.302901023890785,
388
- "acc_stderr,none": 0.013428241573185349,
389
- "acc_norm,none": 0.3250853242320819,
390
- "acc_norm_stderr,none": 0.013688147309729117,
391
- "alias": "arc_challenge"
392
- },
393
- "harness|hellaswag|10": {
394
- "acc,none": 0.4283011352320255,
395
- "acc_stderr,none": 0.004938212723748209,
396
- "acc_norm,none": 0.5612427803226449,
397
- "acc_norm_stderr,none": 0.0049522098318565775,
398
- "alias": "hellaswag"
399
- },
400
- "harness|hellaswag_de|10": {
401
- "acc,none": 0.28949615713065757,
402
- "acc_stderr,none": 0.004686022365371444,
403
- "acc_norm,none": 0.3123398804440649,
404
- "acc_norm_stderr,none": 0.0047885097822260945,
405
- "alias": "hellaswag_de"
406
- },
407
- "harness|belebele_fra_Latn|5": {
408
- "acc,none": 0.26,
409
- "acc_stderr,none": 0.014629271097998376,
410
- "acc_norm,none": 0.26,
411
- "acc_norm_stderr,none": 0.014629271097998376,
412
- "alias": "belebele_fra_Latn"
413
- },
414
- "harness|truthfulqa_mc2_m_es|0": {
415
- "acc,none": 0.22433460076045628,
416
- "acc_stderr,none": 0.01486011711896892,
417
- "alias": "truthfulqa_mc2_m_es"
418
- },
419
- "harness|belebele_eng_Latn|5": {
420
- "acc,none": 0.28888888888888886,
421
- "acc_stderr,none": 0.015116606414982333,
422
- "acc_norm,none": 0.28888888888888886,
423
- "acc_norm_stderr,none": 0.015116606414982333,
424
- "alias": "belebele_eng_Latn"
425
- },
426
- "harness|truthfulqa_mc2_m_fr|0": {
427
- "acc,none": 0.19822109275730623,
428
- "acc_stderr,none": 0.014219717662860107,
429
- "alias": "truthfulqa_mc2_m_fr"
430
- },
431
- "harness|truthfulqa_mc2|0": {
432
- "acc,none": 0.3965607460783958,
433
- "acc_stderr,none": 0.014745046690858642,
434
- "alias": "truthfulqa_mc2"
435
- },
436
- "harness|mmlu_m_es|5": {
437
- "acc,none": 0.2496625168741563,
438
- "acc_stderr,none": 0.003748357919855524,
439
- "alias": "mmlu_m_es"
440
- },
441
- "harness|mmlu_m_de|5": {
442
- "acc,none": 0.23472620304721678,
443
- "acc_stderr,none": 0.0036810064660393487,
444
- "alias": "mmlu_m_de"
445
- },
446
- "harness|truthfulqa_mc2_m_it|0": {
447
- "acc,none": 0.21328224776500637,
448
- "acc_stderr,none": 0.014648172749593518,
449
- "alias": "truthfulqa_mc2_m_it"
450
- },
451
- "harness|mmlu_m_it|5": {
452
- "acc,none": 0.24174661932462038,
453
- "acc_stderr,none": 0.0037214227061056937,
454
- "alias": "mmlu_m_it"
455
- },
456
- "harness|mmlu_m_fr|5": {
457
- "acc,none": 0.24352608662439845,
458
- "acc_stderr,none": 0.00375145636866583,
459
- "alias": "mmlu_m_fr"
460
- },
461
- "harness|arc_challenge_m_es|25": {
462
- "acc,none": 0.2299145299145299,
463
- "acc_stderr,none": 0.012306807801471982,
464
- "acc_norm,none": 0.252991452991453,
465
- "acc_norm_stderr,none": 0.01271476841834667,
466
- "alias": "arc_challenge_m_es"
467
- },
468
- "harness|hellaswag_fr|10": {
469
- "acc,none": 0.4185050331976869,
470
- "acc_stderr,none": 0.005105280908505716,
471
- "acc_norm,none": 0.5326622403084172,
472
- "acc_norm_stderr,none": 0.005163423088888286,
473
- "alias": "hellaswag_fr"
474
- }
475
- },
476
- "versions": {
477
- "harness|hellaswag_it|10": 1.0,
478
- "harness|belebele_spa_Latn|5": 0.0,
479
- "harness|arc_challenge_m_it|25": 1.0,
480
- "harness|arc_challenge_m_fr|25": 1.0,
481
- "harness|belebele_deu_Latn|5": 0.0,
482
- "harness|hellaswag_es|10": 1.0,
483
- "harness|arc_challenge_m_de|25": 1.0,
484
- "harness|belebele_ita_Latn|5": 0.0,
485
- "harness|truthfulqa_mc2_m_de|0": "Yaml",
486
- "harness|hendrycksTest|5": "N/A",
487
- "harness|hendrycksTest-humanities|5": "N/A",
488
- "harness|hendrycksTest-formal_logic|5": "N/A",
489
- "harness|hendrycksTest-high_school_european_history|5": "N/A",
490
- "harness|hendrycksTest-high_school_us_history|5": "N/A",
491
- "harness|hendrycksTest-high_school_world_history|5": "N/A",
492
- "harness|hendrycksTest-international_law|5": "N/A",
493
- "harness|hendrycksTest-jurisprudence|5": "N/A",
494
- "harness|hendrycksTest-logical_fallacies|5": "N/A",
495
- "harness|hendrycksTest-moral_disputes|5": "N/A",
496
- "harness|hendrycksTest-moral_scenarios|5": "N/A",
497
- "harness|hendrycksTest-philosophy|5": "N/A",
498
- "harness|hendrycksTest-prehistory|5": "N/A",
499
- "harness|hendrycksTest-professional_law|5": "N/A",
500
- "harness|hendrycksTest-world_religions|5": "N/A",
501
- "harness|hendrycksTest-other|5": "N/A",
502
- "harness|hendrycksTest-business_ethics|5": "N/A",
503
- "harness|hendrycksTest-clinical_knowledge|5": "N/A",
504
- "harness|hendrycksTest-college_medicine|5": "N/A",
505
- "harness|hendrycksTest-global_facts|5": "N/A",
506
- "harness|hendrycksTest-human_aging|5": "N/A",
507
- "harness|hendrycksTest-management|5": "N/A",
508
- "harness|hendrycksTest-marketing|5": "N/A",
509
- "harness|hendrycksTest-medical_genetics|5": "N/A",
510
- "harness|hendrycksTest-miscellaneous|5": "N/A",
511
- "harness|hendrycksTest-nutrition|5": "N/A",
512
- "harness|hendrycksTest-professional_accounting|5": "N/A",
513
- "harness|hendrycksTest-professional_medicine|5": "N/A",
514
- "harness|hendrycksTest-virology|5": "N/A",
515
- "harness|hendrycksTest-social_sciences|5": "N/A",
516
- "harness|hendrycksTest-econometrics|5": "N/A",
517
- "harness|hendrycksTest-high_school_geography|5": "N/A",
518
- "harness|hendrycksTest-high_school_government_and_politics|5": "N/A",
519
- "harness|hendrycksTest-high_school_macroeconomics|5": "N/A",
520
- "harness|hendrycksTest-high_school_microeconomics|5": "N/A",
521
- "harness|hendrycksTest-high_school_psychology|5": "N/A",
522
- "harness|hendrycksTest-human_sexuality|5": "N/A",
523
- "harness|hendrycksTest-professional_psychology|5": "N/A",
524
- "harness|hendrycksTest-public_relations|5": "N/A",
525
- "harness|hendrycksTest-security_studies|5": "N/A",
526
- "harness|hendrycksTest-sociology|5": "N/A",
527
- "harness|hendrycksTest-us_foreign_policy|5": "N/A",
528
- "harness|hendrycksTest-stem|5": "N/A",
529
- "harness|hendrycksTest-abstract_algebra|5": "N/A",
530
- "harness|hendrycksTest-anatomy|5": "N/A",
531
- "harness|hendrycksTest-astronomy|5": "N/A",
532
- "harness|hendrycksTest-college_biology|5": "N/A",
533
- "harness|hendrycksTest-college_chemistry|5": "N/A",
534
- "harness|hendrycksTest-college_computer_science|5": "N/A",
535
- "harness|hendrycksTest-college_mathematics|5": "N/A",
536
- "harness|hendrycksTest-college_physics|5": "N/A",
537
- "harness|hendrycksTest-computer_security|5": "N/A",
538
- "harness|hendrycksTest-conceptual_physics|5": "N/A",
539
- "harness|hendrycksTest-electrical_engineering|5": "N/A",
540
- "harness|hendrycksTest-elementary_mathematics|5": "N/A",
541
- "harness|hendrycksTest-high_school_biology|5": "N/A",
542
- "harness|hendrycksTest-high_school_chemistry|5": "N/A",
543
- "harness|hendrycksTest-high_school_computer_science|5": "N/A",
544
- "harness|hendrycksTest-high_school_mathematics|5": "N/A",
545
- "harness|hendrycksTest-high_school_physics|5": "N/A",
546
- "harness|hendrycksTest-high_school_statistics|5": "N/A",
547
- "harness|hendrycksTest-machine_learning|5": "N/A",
548
- "harness|arc_challenge|25": 1.0,
549
- "harness|hellaswag|10": 1.0,
550
- "harness|hellaswag_de|10": 1.0,
551
- "harness|belebele_fra_Latn|5": 0.0,
552
- "harness|truthfulqa_mc2_m_es|0": "Yaml",
553
- "harness|belebele_eng_Latn|5": 0.0,
554
- "harness|truthfulqa_mc2_m_fr|0": "Yaml",
555
- "harness|truthfulqa_mc2|0": 2.0,
556
- "harness|mmlu_m_es|5": "Yaml",
557
- "harness|mmlu_m_de|5": "Yaml",
558
- "harness|truthfulqa_mc2_m_it|0": "Yaml",
559
- "harness|mmlu_m_it|5": "Yaml",
560
- "harness|mmlu_m_fr|5": "Yaml",
561
- "harness|arc_challenge_m_es|25": 1.0,
562
- "harness|hellaswag_fr|10": 1.0
563
- }
564
- }