results
/
DBCMLAB
/Llama-3-instruction-constructionsafety-layertuning
/result_2024-05-24 06:09:39.json
{ | |
"results": { | |
"ko_eqbench": { | |
"alias": " - ko_eqbench", | |
"eqbench,none": 12.155960092709961, | |
"eqbench_stderr,none": 2.4192283368965004, | |
"percent_parseable,none": 71.34502923976608, | |
"percent_parseable_stderr,none": 3.467826685703822 | |
}, | |
"ko_gpqa_diamond_zeroshot": { | |
"alias": " - ko_gpqa_diamond_zeroshot", | |
"acc_norm,none": 0.24242424242424243, | |
"acc_norm_stderr,none": 0.030532892233932032 | |
}, | |
"ko_gsm8k": { | |
"alias": " - ko_gsm8k", | |
"exact_match,strict-match": 0.23730098559514784, | |
"exact_match_stderr,strict-match": 0.01171840917873944, | |
"exact_match,flexible-extract": 0.2714177407126611, | |
"exact_match_stderr,flexible-extract": 0.012249002026150594 | |
}, | |
"ko_ifeval": { | |
"alias": " - ko_ifeval", | |
"prompt_level_strict_acc,none": 0.2732793522267207, | |
"prompt_level_strict_acc_stderr,none": 0.020070754554423323, | |
"inst_level_strict_acc,none": 0.3427299703264095, | |
"inst_level_strict_acc_stderr,none": "N/A", | |
"prompt_level_loose_acc,none": 0.2732793522267207, | |
"prompt_level_loose_acc_stderr,none": 0.020070754554423323, | |
"inst_level_loose_acc,none": 0.34421364985163205, | |
"inst_level_loose_acc_stderr,none": "N/A" | |
}, | |
"ko_winogrande": { | |
"alias": " - ko_winogrande", | |
"acc,none": 0.665351223362273, | |
"acc_stderr,none": 0.013261823629558364 | |
}, | |
"kornat_common": { | |
"alias": " - kornat_common", | |
"acc_norm,none": 0.37400133155792276, | |
"acc_norm_stderr,none": 0.006243016215347783 | |
}, | |
"kornat_harmless": { | |
"alias": " - kornat_harmless", | |
"acc_norm,none": 0.6126666666666837, | |
"acc_norm_stderr,none": 0.00210289694252659 | |
}, | |
"kornat_helpful": { | |
"alias": " - kornat_helpful", | |
"acc_norm,none": 0.43025, | |
"acc_norm_stderr,none": 0.006894108580270328 | |
}, | |
"kornat_social": { | |
"alias": " - kornat_social", | |
"A-SVA,none": 0.5127789530893125, | |
"A-SVA_stderr,none": 0.0037450658720781897 | |
} | |
}, | |
"versions": { | |
"all": 2, | |
"ko_eqbench": 2, | |
"ko_gpqa_diamond_zeroshot": 2, | |
"ko_gsm8k": 2, | |
"ko_ifeval": 2, | |
"ko_winogrande": 2, | |
"kornat_common": 2, | |
"kornat_harmless": 2, | |
"kornat_helpful": 2, | |
"kornat_social": 2 | |
}, | |
"config_general": { | |
"model_name": "DBCMLAB/Llama-3-instruction-constructionsafety-layertuning", | |
"model_sha": "3a5875d841eb6f95547eca77e519ede6069f6a57", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |