Muennighoff's picture
Add eval
990f338
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.325,
"acc_stderr": 0.014818724459095524
},
"anli_r2": {
"acc": 0.331,
"acc_stderr": 0.014888272588203928
},
"anli_r3": {
"acc": 0.3225,
"acc_stderr": 0.013499258621103245
},
"cb": {
"acc": 0.48214285714285715,
"acc_stderr": 0.0673769750864465,
"f1": 0.3461538461538461
},
"copa": {
"acc": 0.66,
"acc_stderr": 0.04760952285695238
},
"hellaswag": {
"acc": 0.37552280422226647,
"acc_stderr": 0.004832679188788792,
"acc_norm": 0.4534953196574388,
"acc_norm_stderr": 0.004968151878211054
},
"rte": {
"acc": 0.48375451263537905,
"acc_stderr": 0.030080573208738064
},
"winogrande": {
"acc": 0.5359116022099447,
"acc_stderr": 0.014016193433958308
},
"storycloze_2016": {
"acc": 0.6419027258150721,
"acc_stderr": 0.011087006809925713
},
"boolq": {
"acc": 0.5094801223241591,
"acc_stderr": 0.008743482951361247
},
"arc_easy": {
"acc": 0.4671717171717172,
"acc_stderr": 0.010237645778853848,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.010116282977781239
},
"arc_challenge": {
"acc": 0.24232081911262798,
"acc_stderr": 0.012521593295800116,
"acc_norm": 0.24658703071672355,
"acc_norm_stderr": 0.01259572626879013
},
"sciq": {
"acc": 0.703,
"acc_stderr": 0.0144568322948011,
"acc_norm": 0.628,
"acc_norm_stderr": 0.015292149942040577
},
"piqa": {
"acc": 0.6947769314472253,
"acc_stderr": 0.01074426704560648,
"acc_norm": 0.6947769314472253,
"acc_norm_stderr": 0.01074426704560648
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}