lm1-2b8-55b-c4-perplexity / evaluation /rankeval /lm1-2b8-55b-c4-perplexity_2.json
Muennighoff's picture
Add
5db3f0d
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.322,
"acc_stderr": 0.014782913600996666
},
"anli_r2": {
"acc": 0.329,
"acc_stderr": 0.014865395385928362
},
"anli_r3": {
"acc": 0.3408333333333333,
"acc_stderr": 0.013688600793296932
},
"cb": {
"acc": 0.35714285714285715,
"acc_stderr": 0.0646095738380922,
"f1": 0.22886002886002887
},
"copa": {
"acc": 0.77,
"acc_stderr": 0.04229525846816506
},
"hellaswag": {
"acc": 0.4501095399322844,
"acc_stderr": 0.00496487956351331,
"acc_norm": 0.5963951404102769,
"acc_norm_stderr": 0.004896173035943317
},
"rte": {
"acc": 0.5234657039711191,
"acc_stderr": 0.03006330041190266
},
"winogrande": {
"acc": 0.5785319652722968,
"acc_stderr": 0.013878072377497606
},
"storycloze_2016": {
"acc": 0.7087119187600214,
"acc_stderr": 0.010506919924163615
},
"boolq": {
"acc": 0.5620795107033639,
"acc_stderr": 0.008677388652709263
},
"arc_easy": {
"acc": 0.5904882154882155,
"acc_stderr": 0.01009036816099006,
"acc_norm": 0.5643939393939394,
"acc_norm_stderr": 0.010174341733665226
},
"arc_challenge": {
"acc": 0.26023890784982934,
"acc_stderr": 0.012821930225112573,
"acc_norm": 0.295221843003413,
"acc_norm_stderr": 0.01332975029338232
},
"sciq": {
"acc": 0.877,
"acc_stderr": 0.010391293421849874,
"acc_norm": 0.853,
"acc_norm_stderr": 0.01120341539516033
},
"piqa": {
"acc": 0.7546245919477693,
"acc_stderr": 0.010039831320422396,
"acc_norm": 0.7595212187159956,
"acc_norm_stderr": 0.009971345364651062
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}