File size: 1,421 Bytes
5c6d1f0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
{
"results": {
"arc_easy": {
"acc": 0.43223905723905726,
"acc_stderr": 0.010165130379698762,
"acc_norm": 0.3867845117845118,
"acc_norm_stderr": 0.009993308355370987
},
"boolq": {
"acc": 0.6091743119266055,
"acc_stderr": 0.00853404406545651
},
"lambada_openai": {
"ppl": 74.2398707720553,
"ppl_stderr": 2.903833914079987,
"acc": 0.2604308169998059,
"acc_stderr": 0.0061143129377882795
},
"openbookqa": {
"acc": 0.144,
"acc_stderr": 0.015716934945725767,
"acc_norm": 0.278,
"acc_norm_stderr": 0.020055833888070914
},
"piqa": {
"acc": 0.5908596300326442,
"acc_stderr": 0.011471593460443328,
"acc_norm": 0.5870511425462459,
"acc_norm_stderr": 0.011487658725079094
},
"winogrande": {
"acc": 0.5224940805051302,
"acc_stderr": 0.014038257824059874
}
},
"versions": {
"arc_easy": 0,
"boolq": 1,
"lambada_openai": 0,
"openbookqa": 0,
"piqa": 0,
"winogrande": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=BEE-spoke-data/smol_llama-101M-GQA,revision=main,trust_remote_code=True,dtype='float'",
"num_fewshot": 0,
"batch_size": "8",
"batch_sizes": [],
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
} |