File size: 2,937 Bytes
24dc0da |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": 0.0,
"eqbench_stderr,none": 0.0,
"percent_parseable,none": 0.0,
"percent_parseable_stderr,none": 0.0
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.23737373737373738,
"acc_norm_stderr,none": 0.0303137105381989
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.026535253980288095,
"exact_match_stderr,strict-match": 0.004427045987265169,
"exact_match,flexible-extract": 0.04169825625473844,
"exact_match_stderr,flexible-extract": 0.005506205058175768
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.23279352226720648,
"prompt_level_strict_acc_stderr,none": 0.019033476340855927,
"inst_level_strict_acc,none": 0.3249258160237389,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.23279352226720648,
"prompt_level_loose_acc_stderr,none": 0.019033476340855927,
"inst_level_loose_acc,none": 0.3293768545994065,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.5485398579321231,
"acc_stderr,none": 0.01398611030101776
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.18908122503328895,
"acc_norm_stderr,none": 0.005052241076886808
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.5811333333333437,
"acc_norm_stderr,none": 0.002069876538185862
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.4265,
"acc_norm_stderr,none": 0.006897293768621575
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.5091491378727735,
"A-SVA_stderr,none": 0.00376634294685384
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "Taekyoon/llama2-ko-7b-test",
"model_sha": "6420bc2779920edc7981be42ce88ec1244a11f42",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
} |