Muennighoff's picture
Add
7aa58e1
raw
history blame
2.47 kB
{
"results": {
"anli_r1": {
"acc": 0.332,
"acc_stderr": 0.014899597242811488
},
"anli_r2": {
"acc": 0.316,
"acc_stderr": 0.014709193056057118
},
"anli_r3": {
"acc": 0.3308333333333333,
"acc_stderr": 0.013588208070708995
},
"cb": {
"acc": 0.44642857142857145,
"acc_stderr": 0.06703189227942398,
"f1": 0.28154851684263454
},
"copa": {
"acc": 0.81,
"acc_stderr": 0.03942772444036623
},
"hellaswag": {
"acc": 0.4366660027882892,
"acc_stderr": 0.004949589567678892,
"acc_norm": 0.5635331607249552,
"acc_norm_stderr": 0.0049493353568818635
},
"rte": {
"acc": 0.5342960288808665,
"acc_stderr": 0.030025579819366426
},
"winogrande": {
"acc": 0.5808997632202052,
"acc_stderr": 0.013867325192210117
},
"storycloze_2016": {
"acc": 0.7151256012827365,
"acc_stderr": 0.01043751398661171
},
"boolq": {
"acc": 0.5703363914373089,
"acc_stderr": 0.00865809540849789
},
"arc_easy": {
"acc": 0.6136363636363636,
"acc_stderr": 0.009991296778159617,
"acc_norm": 0.5812289562289562,
"acc_norm_stderr": 0.010123487160167813
},
"arc_challenge": {
"acc": 0.27303754266211605,
"acc_stderr": 0.013019332762635746,
"acc_norm": 0.29436860068259385,
"acc_norm_stderr": 0.013318528460539426
},
"sciq": {
"acc": 0.88,
"acc_stderr": 0.010281328012747386,
"acc_norm": 0.863,
"acc_norm_stderr": 0.010878848714333318
},
"piqa": {
"acc": 0.7181719260065288,
"acc_stderr": 0.010496675231258166,
"acc_norm": 0.7132752992383025,
"acc_norm_stderr": 0.01055131450310808
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}