results / 4yo1 /llama3-eng-ko-8b-sl4 /result_2024-07-10 12:28:24.json
jihoo-kim's picture
Add results for 2024-07-10 12:28:24
9864050 verified
raw
history blame
2.99 kB
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": 8.905777557083756,
"eqbench_stderr,none": 2.086819150769451,
"percent_parseable,none": 35.08771929824562,
"percent_parseable_stderr,none": 3.660298834049165
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.18686868686868688,
"acc_norm_stderr,none": 0.027772533334218988
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.2714177407126611,
"exact_match_stderr,strict-match": 0.012249002026150582,
"exact_match,flexible-extract": 0.2979529946929492,
"exact_match_stderr,flexible-extract": 0.01259793223291452
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.3319838056680162,
"prompt_level_strict_acc_stderr,none": 0.021209405011398043,
"inst_level_strict_acc,none": 0.413946587537092,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.4291497975708502,
"prompt_level_loose_acc_stderr,none": 0.02229164283755372,
"inst_level_loose_acc,none": 0.516320474777448,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.5445935280189423,
"acc_stderr,none": 0.01399648503772978
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.1810918774966711,
"acc_norm_stderr,none": 0.00496864832622276
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.6273666666666885,
"acc_norm_stderr,none": 0.0020726747448792378
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.49275,
"acc_norm_stderr,none": 0.007101861514313354
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.4961319620791059,
"A-SVA_stderr,none": 0.003806780014028084
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "4yo1/llama3-eng-ko-8b-sl4",
"model_sha": "25a488e868ecf3d10fef14a534257043b895df7e",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}