results / MNCJ1hun /Mistral-7B-OP-u1k-ver0.5 /result_2023-10-29 12:13:11.json
jihoo-kim's picture
Add results for 2023-10-29 12:13:11
feac6d2 verified
raw
history blame
2.94 kB
{
"results": {
"ko_eqbench": {
"alias": " - ko_eqbench",
"eqbench,none": 0.0,
"eqbench_stderr,none": 0.0,
"percent_parseable,none": 0.0,
"percent_parseable_stderr,none": 0.0
},
"ko_gpqa_diamond_zeroshot": {
"alias": " - ko_gpqa_diamond_zeroshot",
"acc_norm,none": 0.23232323232323232,
"acc_norm_stderr,none": 0.030088629490217487
},
"ko_gsm8k": {
"alias": " - ko_gsm8k",
"exact_match,strict-match": 0.10386656557998483,
"exact_match_stderr,strict-match": 0.00840362222892402,
"exact_match,flexible-extract": 0.1417740712661107,
"exact_match_stderr,flexible-extract": 0.009608188527765571
},
"ko_ifeval": {
"alias": " - ko_ifeval",
"prompt_level_strict_acc,none": 0.1659919028340081,
"prompt_level_strict_acc_stderr,none": 0.01675734170655029,
"inst_level_strict_acc,none": 0.2195845697329377,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.18421052631578946,
"prompt_level_loose_acc_stderr,none": 0.017459138520290383,
"inst_level_loose_acc,none": 0.23738872403560832,
"inst_level_loose_acc_stderr,none": "N/A"
},
"ko_winogrande": {
"alias": " - ko_winogrande",
"acc,none": 0.5272296764009471,
"acc_stderr,none": 0.014031631629827701
},
"kornat_common": {
"alias": " - kornat_common",
"acc_norm,none": 0.17676431424766978,
"acc_norm_stderr,none": 0.004921874938913182
},
"kornat_harmless": {
"alias": " - kornat_harmless",
"acc_norm,none": 0.6357333333333542,
"acc_norm_stderr,none": 0.0020699616815353368
},
"kornat_helpful": {
"alias": " - kornat_helpful",
"acc_norm,none": 0.4625,
"acc_norm_stderr,none": 0.007120198284584939
},
"kornat_social": {
"alias": " - kornat_social",
"A-SVA,none": 0.413782662423322,
"A-SVA_stderr,none": 0.00394666151789303
}
},
"versions": {
"all": 2,
"ko_eqbench": 2,
"ko_gpqa_diamond_zeroshot": 2,
"ko_gsm8k": 2,
"ko_ifeval": 2,
"ko_winogrande": 2,
"kornat_common": 2,
"kornat_harmless": 2,
"kornat_helpful": 2,
"kornat_social": 2
},
"config_general": {
"model_name": "MNCJ1hun/Mistral-7B-OP-u1k-ver0.5",
"model_sha": "3ccdca4afa332d805c50ffbaaa84cd8fa8b9ebe1",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}