Muennighoff's picture
Add
7aa58e1
{
"results": {
"anli_r1": {
"acc": 0.352,
"acc_stderr": 0.015110404505648668
},
"anli_r2": {
"acc": 0.327,
"acc_stderr": 0.014842213153411247
},
"anli_r3": {
"acc": 0.3458333333333333,
"acc_stderr": 0.013736245342311012
},
"cb": {
"acc": 0.44642857142857145,
"acc_stderr": 0.06703189227942397,
"f1": 0.3134878193701723
},
"copa": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256
},
"hellaswag": {
"acc": 0.4457279426409082,
"acc_stderr": 0.004960299952519412,
"acc_norm": 0.5867357100179247,
"acc_norm_stderr": 0.00491413085543178
},
"rte": {
"acc": 0.5415162454873647,
"acc_stderr": 0.029992535385373314
},
"winogrande": {
"acc": 0.5730071033938438,
"acc_stderr": 0.013901878072575055
},
"storycloze_2016": {
"acc": 0.7071084981293426,
"acc_stderr": 0.010523873293246309
},
"boolq": {
"acc": 0.5960244648318043,
"acc_stderr": 0.008582268854021401
},
"arc_easy": {
"acc": 0.6153198653198653,
"acc_stderr": 0.00998317170700901,
"acc_norm": 0.5989057239057239,
"acc_norm_stderr": 0.010057051106534385
},
"arc_challenge": {
"acc": 0.27474402730375425,
"acc_stderr": 0.013044617212771227,
"acc_norm": 0.30887372013651876,
"acc_norm_stderr": 0.013501770929344003
},
"sciq": {
"acc": 0.891,
"acc_stderr": 0.009859828407037191,
"acc_norm": 0.863,
"acc_norm_stderr": 0.010878848714333316
},
"piqa": {
"acc": 0.7301414581066377,
"acc_stderr": 0.0103565954218522,
"acc_norm": 0.7312295973884657,
"acc_norm_stderr": 0.01034339294009
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}