Ubuntu
commited on
Commit
·
86c2b0b
1
Parent(s):
481136a
added 4 results
Browse files- MaziyarPanahi/calme-3.1-instruct-3b/{results_2024-11-11T12-11-00.982797.json → results_2024-11-22T18-41-54.004959.json} +59 -59
- MaziyarPanahi/calme-3.1-instruct-3b/{results_2024-11-11T12-11-00.982797_norm.json → results_2024-11-22T18-41-54.004959_norm.json} +3 -3
- MaziyarPanahi/calme-3.2-baguette-3b/results_2024-11-22T19-11-43.556179.json +0 -0
- MaziyarPanahi/calme-3.2-baguette-3b/results_2024-11-22T19-11-43.556179_norm.json +26 -0
- MaziyarPanahi/calme-3.2-instruct-3b/results_2024-11-22T19-39-13.259226.json +0 -0
- MaziyarPanahi/calme-3.2-instruct-3b/results_2024-11-22T19-39-13.259226_norm.json +26 -0
- microsoft/Phi-3-small-128k-instruct/results_2024-11-22T20-20-13.815550.json +0 -0
- microsoft/Phi-3-small-128k-instruct/results_2024-11-22T20-20-13.815550_norm.json +26 -0
MaziyarPanahi/calme-3.1-instruct-3b/{results_2024-11-11T12-11-00.982797.json → results_2024-11-22T18-41-54.004959.json}
RENAMED
@@ -11,107 +11,107 @@
|
|
11 |
"leaderboard_bbh_compréhension_de_la_date": {
|
12 |
"alias": " - leaderboard_bbh_compréhension_de_la_date",
|
13 |
"acc_norm,none": 0.524,
|
14 |
-
"acc_norm_stderr,none": 0.
|
15 |
},
|
16 |
"leaderboard_bbh_compréhension_des_sports": {
|
17 |
"alias": " - leaderboard_bbh_compréhension_des_sports",
|
18 |
"acc_norm,none": 0.564,
|
19 |
-
"acc_norm_stderr,none": 0.
|
20 |
},
|
21 |
"leaderboard_bbh_comptage_d_objets": {
|
22 |
"alias": " - leaderboard_bbh_comptage_d_objets",
|
23 |
"acc_norm,none": 0.332,
|
24 |
-
"acc_norm_stderr,none": 0.
|
25 |
},
|
26 |
"leaderboard_bbh_déduction_logique_cinq_objets": {
|
27 |
"alias": " - leaderboard_bbh_déduction_logique_cinq_objets",
|
28 |
"acc_norm,none": 0.424,
|
29 |
-
"acc_norm_stderr,none": 0.
|
30 |
},
|
31 |
"leaderboard_bbh_déduction_logique_sept_objets": {
|
32 |
"alias": " - leaderboard_bbh_déduction_logique_sept_objets",
|
33 |
"acc_norm,none": 0.404,
|
34 |
-
"acc_norm_stderr,none": 0.
|
35 |
},
|
36 |
"leaderboard_bbh_déduction_logique_trois_objets": {
|
37 |
"alias": " - leaderboard_bbh_déduction_logique_trois_objets",
|
38 |
"acc_norm,none": 0.592,
|
39 |
-
"acc_norm_stderr,none": 0.
|
40 |
},
|
41 |
"leaderboard_bbh_désambiguïsation_qa": {
|
42 |
"alias": " - leaderboard_bbh_désambiguïsation_qa",
|
43 |
"acc_norm,none": 0.508,
|
44 |
-
"acc_norm_stderr,none": 0.
|
45 |
},
|
46 |
"leaderboard_bbh_expressions_booléennes": {
|
47 |
"alias": " - leaderboard_bbh_expressions_booléennes",
|
48 |
"acc_norm,none": 0.752,
|
49 |
-
"acc_norm_stderr,none": 0.
|
50 |
},
|
51 |
"leaderboard_bbh_formes_géométriques": {
|
52 |
"alias": " - leaderboard_bbh_formes_géométriques",
|
53 |
"acc_norm,none": 0.436,
|
54 |
-
"acc_norm_stderr,none": 0.
|
55 |
},
|
56 |
"leaderboard_bbh_hyperbate": {
|
57 |
"alias": " - leaderboard_bbh_hyperbate",
|
58 |
"acc_norm,none": 0.52,
|
59 |
-
"acc_norm_stderr,none": 0.
|
60 |
},
|
61 |
"leaderboard_bbh_jugement_causal": {
|
62 |
"alias": " - leaderboard_bbh_jugement_causal",
|
63 |
"acc_norm,none": 0.6203208556149733,
|
64 |
-
"acc_norm_stderr,none": 0.
|
65 |
},
|
66 |
"leaderboard_bbh_naviguer": {
|
67 |
"alias": " - leaderboard_bbh_naviguer",
|
68 |
"acc_norm,none": 0.652,
|
69 |
-
"acc_norm_stderr,none": 0.
|
70 |
},
|
71 |
"leaderboard_bbh_pingouins_sur_une_table": {
|
72 |
"alias": " - leaderboard_bbh_pingouins_sur_une_table",
|
73 |
"acc_norm,none": 0.4178082191780822,
|
74 |
-
"acc_norm_stderr,none": 0.
|
75 |
},
|
76 |
"leaderboard_bbh_raisonnement_sur_les_objets_colorés": {
|
77 |
"alias": " - leaderboard_bbh_raisonnement_sur_les_objets_colorés",
|
78 |
"acc_norm,none": 0.396,
|
79 |
-
"acc_norm_stderr,none": 0.
|
80 |
},
|
81 |
"leaderboard_bbh_recommandation_de_film": {
|
82 |
"alias": " - leaderboard_bbh_recommandation_de_film",
|
83 |
"acc_norm,none": 0.548,
|
84 |
-
"acc_norm_stderr,none": 0.
|
85 |
},
|
86 |
"leaderboard_bbh_sarcasmes": {
|
87 |
"alias": " - leaderboard_bbh_sarcasmes",
|
88 |
"acc_norm,none": 0.651685393258427,
|
89 |
-
"acc_norm_stderr,none": 0.
|
90 |
},
|
91 |
"leaderboard_bbh_sophismes_formels": {
|
92 |
"alias": " - leaderboard_bbh_sophismes_formels",
|
93 |
"acc_norm,none": 0.504,
|
94 |
-
"acc_norm_stderr,none": 0.
|
95 |
},
|
96 |
"leaderboard_bbh_suivi_objets_mélangés_cinq_objets": {
|
97 |
"alias": " - leaderboard_bbh_suivi_objets_mélangés_cinq_objets",
|
98 |
"acc_norm,none": 0.196,
|
99 |
-
"acc_norm_stderr,none": 0.
|
100 |
},
|
101 |
"leaderboard_bbh_suivi_objets_mélangés_sept_objets": {
|
102 |
"alias": " - leaderboard_bbh_suivi_objets_mélangés_sept_objets",
|
103 |
"acc_norm,none": 0.1,
|
104 |
-
"acc_norm_stderr,none": 0.
|
105 |
},
|
106 |
"leaderboard_bbh_suivi_objets_mélangés_trois_objets": {
|
107 |
"alias": " - leaderboard_bbh_suivi_objets_mélangés_trois_objets",
|
108 |
"acc_norm,none": 0.328,
|
109 |
-
"acc_norm_stderr,none": 0.
|
110 |
},
|
111 |
"leaderboard_bbh_séquences_temporelles": {
|
112 |
"alias": " - leaderboard_bbh_séquences_temporelles",
|
113 |
"acc_norm,none": 0.18,
|
114 |
-
"acc_norm_stderr,none": 0.
|
115 |
},
|
116 |
"leaderboard_bbh_toile_de_mensonges": {
|
117 |
"alias": " - leaderboard_bbh_toile_de_mensonges",
|
@@ -125,27 +125,27 @@
|
|
125 |
"leaderboard_gpqa_diamond_fr": {
|
126 |
"alias": " - leaderboard_gpqa_diamond_fr",
|
127 |
"acc_norm,none": 0.28426395939086296,
|
128 |
-
"acc_norm_stderr,none": 0.
|
129 |
},
|
130 |
"leaderboard_gpqa_extended_fr": {
|
131 |
"alias": " - leaderboard_gpqa_extended_fr",
|
132 |
"acc_norm,none": 0.31307550644567217,
|
133 |
-
"acc_norm_stderr,none": 0.
|
134 |
},
|
135 |
"leaderboard_gpqa_main_fr": {
|
136 |
"alias": " - leaderboard_gpqa_main_fr",
|
137 |
"acc_norm,none": 0.289044289044289,
|
138 |
-
"acc_norm_stderr,none": 0.
|
139 |
},
|
140 |
"leaderboard_ifeval_fr": {
|
141 |
"alias": " - leaderboard_ifeval_fr",
|
142 |
"prompt_level_strict_acc,none": 0.017475728155339806,
|
143 |
-
"prompt_level_strict_acc_stderr,none": 0.
|
144 |
-
"inst_level_strict_acc,none": 0.
|
145 |
"inst_level_strict_acc_stderr,none": "N/A",
|
146 |
"prompt_level_loose_acc,none": 0.02330097087378641,
|
147 |
-
"prompt_level_loose_acc_stderr,none": 0.
|
148 |
-
"inst_level_loose_acc,none": 0.
|
149 |
"inst_level_loose_acc_stderr,none": "N/A"
|
150 |
},
|
151 |
"leaderboard_math_hard_fr": {
|
@@ -154,38 +154,38 @@
|
|
154 |
},
|
155 |
"leaderboard_math_algebra_hard_fr": {
|
156 |
"alias": " - leaderboard_math_algebra_hard_fr",
|
157 |
-
"exact_match,none": 0.
|
158 |
-
"exact_match_stderr,none": 0.
|
159 |
},
|
160 |
"leaderboard_math_counting_and_prob_hard_fr": {
|
161 |
"alias": " - leaderboard_math_counting_and_prob_hard_fr",
|
162 |
-
"exact_match,none": 0.
|
163 |
-
"exact_match_stderr,none": 0.
|
164 |
},
|
165 |
"leaderboard_math_geometry_hard_fr": {
|
166 |
"alias": " - leaderboard_math_geometry_hard_fr",
|
167 |
-
"exact_match,none": 0.
|
168 |
-
"exact_match_stderr,none": 0.
|
169 |
},
|
170 |
"leaderboard_math_num_theory_hard_fr": {
|
171 |
"alias": " - leaderboard_math_num_theory_hard_fr",
|
172 |
-
"exact_match,none": 0.
|
173 |
-
"exact_match_stderr,none": 0.
|
174 |
},
|
175 |
"leaderboard_math_prealgebra_hard_fr": {
|
176 |
"alias": " - leaderboard_math_prealgebra_hard_fr",
|
177 |
-
"exact_match,none": 0.
|
178 |
-
"exact_match_stderr,none": 0.
|
179 |
},
|
180 |
"leaderboard_math_precalculus_hard_fr": {
|
181 |
"alias": " - leaderboard_math_precalculus_hard_fr",
|
182 |
"exact_match,none": 0.047619047619047616,
|
183 |
-
"exact_match_stderr,none": 0.
|
184 |
},
|
185 |
"leaderboard_mmlu_fr": {
|
186 |
"alias": " - leaderboard_mmlu_fr",
|
187 |
-
"acc,none": 0.
|
188 |
-
"acc_stderr,none": 0.
|
189 |
},
|
190 |
"leaderboard_musr_fr": {
|
191 |
" ": " ",
|
@@ -194,7 +194,7 @@
|
|
194 |
"leaderboard_musr_murder_mysteries_fr": {
|
195 |
"alias": " - leaderboard_musr_murder_mysteries_fr",
|
196 |
"acc_norm,none": 0.476,
|
197 |
-
"acc_norm_stderr,none": 0.
|
198 |
},
|
199 |
"leaderboard_musr_object_placements_fr": {
|
200 |
"alias": " - leaderboard_musr_object_placements_fr",
|
@@ -203,8 +203,8 @@
|
|
203 |
},
|
204 |
"leaderboard_musr_team_allocation_fr": {
|
205 |
"alias": " - leaderboard_musr_team_allocation_fr",
|
206 |
-
"acc_norm,none": 0.
|
207 |
-
"acc_norm_stderr,none": 0.
|
208 |
}
|
209 |
},
|
210 |
"group_subtasks": {
|
@@ -1521,7 +1521,7 @@
|
|
1521 |
"fewshot_delimiter": "\n\n",
|
1522 |
"fewshot_config": {
|
1523 |
"sampler": "first_n",
|
1524 |
-
"samples": "<function list_fewshot_samples at
|
1525 |
},
|
1526 |
"num_fewshot": 4,
|
1527 |
"metric_list": [
|
@@ -1564,7 +1564,7 @@
|
|
1564 |
"fewshot_delimiter": "\n\n",
|
1565 |
"fewshot_config": {
|
1566 |
"sampler": "first_n",
|
1567 |
-
"samples": "<function list_fewshot_samples at
|
1568 |
},
|
1569 |
"num_fewshot": 4,
|
1570 |
"metric_list": [
|
@@ -1607,7 +1607,7 @@
|
|
1607 |
"fewshot_delimiter": "\n\n",
|
1608 |
"fewshot_config": {
|
1609 |
"sampler": "first_n",
|
1610 |
-
"samples": "<function list_fewshot_samples at
|
1611 |
},
|
1612 |
"num_fewshot": 4,
|
1613 |
"metric_list": [
|
@@ -1650,7 +1650,7 @@
|
|
1650 |
"fewshot_delimiter": "\n\n",
|
1651 |
"fewshot_config": {
|
1652 |
"sampler": "first_n",
|
1653 |
-
"samples": "<function list_fewshot_samples at
|
1654 |
},
|
1655 |
"num_fewshot": 4,
|
1656 |
"metric_list": [
|
@@ -1693,7 +1693,7 @@
|
|
1693 |
"fewshot_delimiter": "\n\n",
|
1694 |
"fewshot_config": {
|
1695 |
"sampler": "first_n",
|
1696 |
-
"samples": "<function list_fewshot_samples at
|
1697 |
},
|
1698 |
"num_fewshot": 4,
|
1699 |
"metric_list": [
|
@@ -1736,7 +1736,7 @@
|
|
1736 |
"fewshot_delimiter": "\n\n",
|
1737 |
"fewshot_config": {
|
1738 |
"sampler": "first_n",
|
1739 |
-
"samples": "<function list_fewshot_samples at
|
1740 |
},
|
1741 |
"num_fewshot": 4,
|
1742 |
"metric_list": [
|
@@ -2248,8 +2248,8 @@
|
|
2248 |
},
|
2249 |
"config": {
|
2250 |
"model": "vllm",
|
2251 |
-
"model_args": "pretrained=MaziyarPanahi/calme-3.1-instruct-3b,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.
|
2252 |
-
"batch_size":
|
2253 |
"batch_sizes": [],
|
2254 |
"device": null,
|
2255 |
"use_cache": null,
|
@@ -2262,9 +2262,9 @@
|
|
2262 |
"fewshot_seed": 1234
|
2263 |
},
|
2264 |
"git_hash": "64286c9b",
|
2265 |
-
"date":
|
2266 |
-
"pretty_env_info": "PyTorch version: 2.
|
2267 |
-
"transformers_version": "4.46.
|
2268 |
"upper_git_hash": null,
|
2269 |
"tokenizer_pad_token": [
|
2270 |
"<|endoftext|>",
|
@@ -2279,7 +2279,7 @@
|
|
2279 |
"None"
|
2280 |
],
|
2281 |
"eot_token_id": 151645,
|
2282 |
-
"max_length":
|
2283 |
"task_hashes": {},
|
2284 |
"model_source": "vllm",
|
2285 |
"model_name": "MaziyarPanahi/calme-3.1-instruct-3b",
|
@@ -2289,7 +2289,7 @@
|
|
2289 |
"fewshot_as_multiturn": false,
|
2290 |
"chat_template": null,
|
2291 |
"chat_template_sha": null,
|
2292 |
-
"start_time":
|
2293 |
-
"end_time":
|
2294 |
-
"total_evaluation_time_seconds": "
|
2295 |
}
|
|
|
11 |
"leaderboard_bbh_compréhension_de_la_date": {
|
12 |
"alias": " - leaderboard_bbh_compréhension_de_la_date",
|
13 |
"acc_norm,none": 0.524,
|
14 |
+
"acc_norm_stderr,none": 0.031649688959687824
|
15 |
},
|
16 |
"leaderboard_bbh_compréhension_des_sports": {
|
17 |
"alias": " - leaderboard_bbh_compréhension_des_sports",
|
18 |
"acc_norm,none": 0.564,
|
19 |
+
"acc_norm_stderr,none": 0.03142556706028128
|
20 |
},
|
21 |
"leaderboard_bbh_comptage_d_objets": {
|
22 |
"alias": " - leaderboard_bbh_comptage_d_objets",
|
23 |
"acc_norm,none": 0.332,
|
24 |
+
"acc_norm_stderr,none": 0.02984403904746589
|
25 |
},
|
26 |
"leaderboard_bbh_déduction_logique_cinq_objets": {
|
27 |
"alias": " - leaderboard_bbh_déduction_logique_cinq_objets",
|
28 |
"acc_norm,none": 0.424,
|
29 |
+
"acc_norm_stderr,none": 0.03131803437491615
|
30 |
},
|
31 |
"leaderboard_bbh_déduction_logique_sept_objets": {
|
32 |
"alias": " - leaderboard_bbh_déduction_logique_sept_objets",
|
33 |
"acc_norm,none": 0.404,
|
34 |
+
"acc_norm_stderr,none": 0.031096688184825295
|
35 |
},
|
36 |
"leaderboard_bbh_déduction_logique_trois_objets": {
|
37 |
"alias": " - leaderboard_bbh_déduction_logique_trois_objets",
|
38 |
"acc_norm,none": 0.592,
|
39 |
+
"acc_norm_stderr,none": 0.0311452098465485
|
40 |
},
|
41 |
"leaderboard_bbh_désambiguïsation_qa": {
|
42 |
"alias": " - leaderboard_bbh_désambiguïsation_qa",
|
43 |
"acc_norm,none": 0.508,
|
44 |
+
"acc_norm_stderr,none": 0.0316821564314138
|
45 |
},
|
46 |
"leaderboard_bbh_expressions_booléennes": {
|
47 |
"alias": " - leaderboard_bbh_expressions_booléennes",
|
48 |
"acc_norm,none": 0.752,
|
49 |
+
"acc_norm_stderr,none": 0.027367497504863544
|
50 |
},
|
51 |
"leaderboard_bbh_formes_géométriques": {
|
52 |
"alias": " - leaderboard_bbh_formes_géométriques",
|
53 |
"acc_norm,none": 0.436,
|
54 |
+
"acc_norm_stderr,none": 0.03142556706028128
|
55 |
},
|
56 |
"leaderboard_bbh_hyperbate": {
|
57 |
"alias": " - leaderboard_bbh_hyperbate",
|
58 |
"acc_norm,none": 0.52,
|
59 |
+
"acc_norm_stderr,none": 0.031660853408495185
|
60 |
},
|
61 |
"leaderboard_bbh_jugement_causal": {
|
62 |
"alias": " - leaderboard_bbh_jugement_causal",
|
63 |
"acc_norm,none": 0.6203208556149733,
|
64 |
+
"acc_norm_stderr,none": 0.035584436288016635
|
65 |
},
|
66 |
"leaderboard_bbh_naviguer": {
|
67 |
"alias": " - leaderboard_bbh_naviguer",
|
68 |
"acc_norm,none": 0.652,
|
69 |
+
"acc_norm_stderr,none": 0.030186568464511707
|
70 |
},
|
71 |
"leaderboard_bbh_pingouins_sur_une_table": {
|
72 |
"alias": " - leaderboard_bbh_pingouins_sur_une_table",
|
73 |
"acc_norm,none": 0.4178082191780822,
|
74 |
+
"acc_norm_stderr,none": 0.04095788511220136
|
75 |
},
|
76 |
"leaderboard_bbh_raisonnement_sur_les_objets_colorés": {
|
77 |
"alias": " - leaderboard_bbh_raisonnement_sur_les_objets_colorés",
|
78 |
"acc_norm,none": 0.396,
|
79 |
+
"acc_norm_stderr,none": 0.03099319785457785
|
80 |
},
|
81 |
"leaderboard_bbh_recommandation_de_film": {
|
82 |
"alias": " - leaderboard_bbh_recommandation_de_film",
|
83 |
"acc_norm,none": 0.548,
|
84 |
+
"acc_norm_stderr,none": 0.031539864492556614
|
85 |
},
|
86 |
"leaderboard_bbh_sarcasmes": {
|
87 |
"alias": " - leaderboard_bbh_sarcasmes",
|
88 |
"acc_norm,none": 0.651685393258427,
|
89 |
+
"acc_norm_stderr,none": 0.035811144737534335
|
90 |
},
|
91 |
"leaderboard_bbh_sophismes_formels": {
|
92 |
"alias": " - leaderboard_bbh_sophismes_formels",
|
93 |
"acc_norm,none": 0.504,
|
94 |
+
"acc_norm_stderr,none": 0.03168519855119915
|
95 |
},
|
96 |
"leaderboard_bbh_suivi_objets_mélangés_cinq_objets": {
|
97 |
"alias": " - leaderboard_bbh_suivi_objets_mélangés_cinq_objets",
|
98 |
"acc_norm,none": 0.196,
|
99 |
+
"acc_norm_stderr,none": 0.02515685731325592
|
100 |
},
|
101 |
"leaderboard_bbh_suivi_objets_mélangés_sept_objets": {
|
102 |
"alias": " - leaderboard_bbh_suivi_objets_mélangés_sept_objets",
|
103 |
"acc_norm,none": 0.1,
|
104 |
+
"acc_norm_stderr,none": 0.019011727515734368
|
105 |
},
|
106 |
"leaderboard_bbh_suivi_objets_mélangés_trois_objets": {
|
107 |
"alias": " - leaderboard_bbh_suivi_objets_mélangés_trois_objets",
|
108 |
"acc_norm,none": 0.328,
|
109 |
+
"acc_norm_stderr,none": 0.029752391824475376
|
110 |
},
|
111 |
"leaderboard_bbh_séquences_temporelles": {
|
112 |
"alias": " - leaderboard_bbh_séquences_temporelles",
|
113 |
"acc_norm,none": 0.18,
|
114 |
+
"acc_norm_stderr,none": 0.02434689065029353
|
115 |
},
|
116 |
"leaderboard_bbh_toile_de_mensonges": {
|
117 |
"alias": " - leaderboard_bbh_toile_de_mensonges",
|
|
|
125 |
"leaderboard_gpqa_diamond_fr": {
|
126 |
"alias": " - leaderboard_gpqa_diamond_fr",
|
127 |
"acc_norm,none": 0.28426395939086296,
|
128 |
+
"acc_norm_stderr,none": 0.03221879607182462
|
129 |
},
|
130 |
"leaderboard_gpqa_extended_fr": {
|
131 |
"alias": " - leaderboard_gpqa_extended_fr",
|
132 |
"acc_norm,none": 0.31307550644567217,
|
133 |
+
"acc_norm_stderr,none": 0.019919544524825222
|
134 |
},
|
135 |
"leaderboard_gpqa_main_fr": {
|
136 |
"alias": " - leaderboard_gpqa_main_fr",
|
137 |
"acc_norm,none": 0.289044289044289,
|
138 |
+
"acc_norm_stderr,none": 0.021911978118877663
|
139 |
},
|
140 |
"leaderboard_ifeval_fr": {
|
141 |
"alias": " - leaderboard_ifeval_fr",
|
142 |
"prompt_level_strict_acc,none": 0.017475728155339806,
|
143 |
+
"prompt_level_strict_acc_stderr,none": 0.005779732312541064,
|
144 |
+
"inst_level_strict_acc,none": 0.2261995430312262,
|
145 |
"inst_level_strict_acc_stderr,none": "N/A",
|
146 |
"prompt_level_loose_acc,none": 0.02330097087378641,
|
147 |
+
"prompt_level_loose_acc_stderr,none": 0.006654046431364177,
|
148 |
+
"inst_level_loose_acc,none": 0.22924600152322924,
|
149 |
"inst_level_loose_acc_stderr,none": "N/A"
|
150 |
},
|
151 |
"leaderboard_math_hard_fr": {
|
|
|
154 |
},
|
155 |
"leaderboard_math_algebra_hard_fr": {
|
156 |
"alias": " - leaderboard_math_algebra_hard_fr",
|
157 |
+
"exact_match,none": 0.28285714285714286,
|
158 |
+
"exact_match_stderr,none": 0.024108694860670427
|
159 |
},
|
160 |
"leaderboard_math_counting_and_prob_hard_fr": {
|
161 |
"alias": " - leaderboard_math_counting_and_prob_hard_fr",
|
162 |
+
"exact_match,none": 0.05102040816326531,
|
163 |
+
"exact_match_stderr,none": 0.015757346860192097
|
164 |
},
|
165 |
"leaderboard_math_geometry_hard_fr": {
|
166 |
"alias": " - leaderboard_math_geometry_hard_fr",
|
167 |
+
"exact_match,none": 0.05339805825242718,
|
168 |
+
"exact_match_stderr,none": 0.015702525139063304
|
169 |
},
|
170 |
"leaderboard_math_num_theory_hard_fr": {
|
171 |
"alias": " - leaderboard_math_num_theory_hard_fr",
|
172 |
+
"exact_match,none": 0.09216589861751152,
|
173 |
+
"exact_match_stderr,none": 0.019681649865624786
|
174 |
},
|
175 |
"leaderboard_math_prealgebra_hard_fr": {
|
176 |
"alias": " - leaderboard_math_prealgebra_hard_fr",
|
177 |
+
"exact_match,none": 0.35398230088495575,
|
178 |
+
"exact_match_stderr,none": 0.045186007587007644
|
179 |
},
|
180 |
"leaderboard_math_precalculus_hard_fr": {
|
181 |
"alias": " - leaderboard_math_precalculus_hard_fr",
|
182 |
"exact_match,none": 0.047619047619047616,
|
183 |
+
"exact_match_stderr,none": 0.02337527112732456
|
184 |
},
|
185 |
"leaderboard_mmlu_fr": {
|
186 |
"alias": " - leaderboard_mmlu_fr",
|
187 |
+
"acc,none": 0.5615296966244124,
|
188 |
+
"acc_stderr,none": 0.004187525235419421
|
189 |
},
|
190 |
"leaderboard_musr_fr": {
|
191 |
" ": " ",
|
|
|
194 |
"leaderboard_musr_murder_mysteries_fr": {
|
195 |
"alias": " - leaderboard_musr_murder_mysteries_fr",
|
196 |
"acc_norm,none": 0.476,
|
197 |
+
"acc_norm_stderr,none": 0.03164968895968782
|
198 |
},
|
199 |
"leaderboard_musr_object_placements_fr": {
|
200 |
"alias": " - leaderboard_musr_object_placements_fr",
|
|
|
203 |
},
|
204 |
"leaderboard_musr_team_allocation_fr": {
|
205 |
"alias": " - leaderboard_musr_team_allocation_fr",
|
206 |
+
"acc_norm,none": 0.244,
|
207 |
+
"acc_norm_stderr,none": 0.027217995464553182
|
208 |
}
|
209 |
},
|
210 |
"group_subtasks": {
|
|
|
1521 |
"fewshot_delimiter": "\n\n",
|
1522 |
"fewshot_config": {
|
1523 |
"sampler": "first_n",
|
1524 |
+
"samples": "<function list_fewshot_samples at 0x78c32202d5a0>"
|
1525 |
},
|
1526 |
"num_fewshot": 4,
|
1527 |
"metric_list": [
|
|
|
1564 |
"fewshot_delimiter": "\n\n",
|
1565 |
"fewshot_config": {
|
1566 |
"sampler": "first_n",
|
1567 |
+
"samples": "<function list_fewshot_samples at 0x78c3135e7c70>"
|
1568 |
},
|
1569 |
"num_fewshot": 4,
|
1570 |
"metric_list": [
|
|
|
1607 |
"fewshot_delimiter": "\n\n",
|
1608 |
"fewshot_config": {
|
1609 |
"sampler": "first_n",
|
1610 |
+
"samples": "<function list_fewshot_samples at 0x78c3135e6170>"
|
1611 |
},
|
1612 |
"num_fewshot": 4,
|
1613 |
"metric_list": [
|
|
|
1650 |
"fewshot_delimiter": "\n\n",
|
1651 |
"fewshot_config": {
|
1652 |
"sampler": "first_n",
|
1653 |
+
"samples": "<function list_fewshot_samples at 0x78c3135bd2d0>"
|
1654 |
},
|
1655 |
"num_fewshot": 4,
|
1656 |
"metric_list": [
|
|
|
1693 |
"fewshot_delimiter": "\n\n",
|
1694 |
"fewshot_config": {
|
1695 |
"sampler": "first_n",
|
1696 |
+
"samples": "<function list_fewshot_samples at 0x78c3135e4040>"
|
1697 |
},
|
1698 |
"num_fewshot": 4,
|
1699 |
"metric_list": [
|
|
|
1736 |
"fewshot_delimiter": "\n\n",
|
1737 |
"fewshot_config": {
|
1738 |
"sampler": "first_n",
|
1739 |
+
"samples": "<function list_fewshot_samples at 0x78c3135bcaf0>"
|
1740 |
},
|
1741 |
"num_fewshot": 4,
|
1742 |
"metric_list": [
|
|
|
2248 |
},
|
2249 |
"config": {
|
2250 |
"model": "vllm",
|
2251 |
+
"model_args": "pretrained=MaziyarPanahi/calme-3.1-instruct-3b,tensor_parallel_size=1,dtype=auto,gpu_memory_utilization=0.7,data_parallel_size=4,enforce_eager=true,max_model_len=4096",
|
2252 |
+
"batch_size": "auto",
|
2253 |
"batch_sizes": [],
|
2254 |
"device": null,
|
2255 |
"use_cache": null,
|
|
|
2262 |
"fewshot_seed": 1234
|
2263 |
},
|
2264 |
"git_hash": "64286c9b",
|
2265 |
+
"date": 1732299281.6836495,
|
2266 |
+
"pretty_env_info": "PyTorch version: 2.5.1+cu124\nIs debug build: False\nCUDA used to build PyTorch: 12.4\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.5 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.22.1\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Nov 6 2024, 20:22:13) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-6.5.0-1024-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.4.131\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\nGPU 2: NVIDIA A100 80GB PCIe\nGPU 3: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 550.90.07\ncuDNN version: Could not collect\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 1\nBogoMIPS: 4890.87\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (12 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec rstack overflow: Vulnerable: Safe RET, no microcode\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines; STIBP disabled; RSB filling; PBRSB-eIBRS Not affected; BHI Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] torch==2.5.1\n[pip3] torchvision==0.20.1\n[pip3] triton==3.1.0\n[conda] No relevant packages",
|
2267 |
+
"transformers_version": "4.46.3",
|
2268 |
"upper_git_hash": null,
|
2269 |
"tokenizer_pad_token": [
|
2270 |
"<|endoftext|>",
|
|
|
2279 |
"None"
|
2280 |
],
|
2281 |
"eot_token_id": 151645,
|
2282 |
+
"max_length": 4096,
|
2283 |
"task_hashes": {},
|
2284 |
"model_source": "vllm",
|
2285 |
"model_name": "MaziyarPanahi/calme-3.1-instruct-3b",
|
|
|
2289 |
"fewshot_as_multiturn": false,
|
2290 |
"chat_template": null,
|
2291 |
"chat_template_sha": null,
|
2292 |
+
"start_time": 7491.564958994,
|
2293 |
+
"end_time": 9129.853715093,
|
2294 |
+
"total_evaluation_time_seconds": "1638.2887560990002"
|
2295 |
}
|
MaziyarPanahi/calme-3.1-instruct-3b/{results_2024-11-11T12-11-00.982797_norm.json → results_2024-11-22T18-41-54.004959_norm.json}
RENAMED
@@ -11,16 +11,16 @@
|
|
11 |
"metric_name": 0.060599999999999994
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
-
"metric_name": 0.
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
"metric_name": 0.0387
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
-
"metric_name": 0.
|
24 |
}
|
25 |
}
|
26 |
}
|
|
|
11 |
"metric_name": 0.060599999999999994
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
+
"metric_name": 0.12179999999999999
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
"metric_name": 0.0387
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.14679999999999999
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.5128
|
24 |
}
|
25 |
}
|
26 |
}
|
MaziyarPanahi/calme-3.2-baguette-3b/results_2024-11-22T19-11-43.556179.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
MaziyarPanahi/calme-3.2-baguette-3b/results_2024-11-22T19-11-43.556179_norm.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"model_name": "MaziyarPanahi/calme-3.2-baguette-3b",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
+
},
|
6 |
+
"results": {
|
7 |
+
"BBH-fr": {
|
8 |
+
"metric_name": 0.2065
|
9 |
+
},
|
10 |
+
"GPQA-fr": {
|
11 |
+
"metric_name": 0.07519999999999999
|
12 |
+
},
|
13 |
+
"IFEval-fr": {
|
14 |
+
"metric_name": 0.1441
|
15 |
+
},
|
16 |
+
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0176
|
18 |
+
},
|
19 |
+
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1875
|
21 |
+
},
|
22 |
+
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.5114
|
24 |
+
}
|
25 |
+
}
|
26 |
+
}
|
MaziyarPanahi/calme-3.2-instruct-3b/results_2024-11-22T19-39-13.259226.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
MaziyarPanahi/calme-3.2-instruct-3b/results_2024-11-22T19-39-13.259226_norm.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"model_name": "MaziyarPanahi/calme-3.2-instruct-3b",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
+
},
|
6 |
+
"results": {
|
7 |
+
"BBH-fr": {
|
8 |
+
"metric_name": 0.2309
|
9 |
+
},
|
10 |
+
"GPQA-fr": {
|
11 |
+
"metric_name": 0.0583
|
12 |
+
},
|
13 |
+
"IFEval-fr": {
|
14 |
+
"metric_name": 0.1259
|
15 |
+
},
|
16 |
+
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0518
|
18 |
+
},
|
19 |
+
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1508
|
21 |
+
},
|
22 |
+
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.5149
|
24 |
+
}
|
25 |
+
}
|
26 |
+
}
|
microsoft/Phi-3-small-128k-instruct/results_2024-11-22T20-20-13.815550.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
microsoft/Phi-3-small-128k-instruct/results_2024-11-22T20-20-13.815550_norm.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"model_name": "microsoft/Phi-3-small-128k-instruct",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
+
},
|
6 |
+
"results": {
|
7 |
+
"BBH-fr": {
|
8 |
+
"metric_name": 0.3604
|
9 |
+
},
|
10 |
+
"GPQA-fr": {
|
11 |
+
"metric_name": 0.053200000000000004
|
12 |
+
},
|
13 |
+
"IFEval-fr": {
|
14 |
+
"metric_name": 0.12990000000000002
|
15 |
+
},
|
16 |
+
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0827
|
18 |
+
},
|
19 |
+
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1361
|
21 |
+
},
|
22 |
+
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.6104999999999999
|
24 |
+
}
|
25 |
+
}
|
26 |
+
}
|