|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.34812286689419797, |
|
"acc_stderr": 0.01392100859517935, |
|
"acc_norm": 0.4112627986348123, |
|
"acc_norm_stderr": 0.014379441068522077 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.39245170284803826, |
|
"acc_stderr": 0.0048729844929679975, |
|
"acc_norm": 0.5243975303724357, |
|
"acc_norm_stderr": 0.004983837641502894 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.47368421052631576, |
|
"acc_stderr": 0.03829509868994727, |
|
"acc_norm": 0.47368421052631576, |
|
"acc_norm_stderr": 0.03829509868994727 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.04865777570410769, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.04865777570410769 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5491698595146871, |
|
"acc_stderr": 0.017793297572699044, |
|
"acc_norm": 0.5491698595146871, |
|
"acc_norm_stderr": 0.017793297572699044 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4148148148148148, |
|
"acc_stderr": 0.04256193767901407, |
|
"acc_norm": 0.4148148148148148, |
|
"acc_norm_stderr": 0.04256193767901407 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.425531914893617, |
|
"acc_stderr": 0.03232146916224468, |
|
"acc_norm": 0.425531914893617, |
|
"acc_norm_stderr": 0.03232146916224468 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3433734939759036, |
|
"acc_stderr": 0.03696584317010601, |
|
"acc_norm": 0.3433734939759036, |
|
"acc_norm_stderr": 0.03696584317010601 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4790996784565916, |
|
"acc_stderr": 0.028373270961069414, |
|
"acc_norm": 0.4790996784565916, |
|
"acc_norm_stderr": 0.028373270961069414 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5022421524663677, |
|
"acc_stderr": 0.03355746535223263, |
|
"acc_norm": 0.5022421524663677, |
|
"acc_norm_stderr": 0.03355746535223263 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578756, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578756 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5858585858585859, |
|
"acc_stderr": 0.03509438348879629, |
|
"acc_norm": 0.5858585858585859, |
|
"acc_norm_stderr": 0.03509438348879629 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4689655172413793, |
|
"acc_stderr": 0.04158632762097828, |
|
"acc_norm": 0.4689655172413793, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171453, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4579831932773109, |
|
"acc_stderr": 0.032363611119519416, |
|
"acc_norm": 0.4579831932773109, |
|
"acc_norm_stderr": 0.032363611119519416 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.41025641025641024, |
|
"acc_stderr": 0.024939313906940777, |
|
"acc_norm": 0.41025641025641024, |
|
"acc_norm_stderr": 0.024939313906940777 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3497536945812808, |
|
"acc_stderr": 0.03355400904969566, |
|
"acc_norm": 0.3497536945812808, |
|
"acc_norm_stderr": 0.03355400904969566 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4290322580645161, |
|
"acc_stderr": 0.02815603653823321, |
|
"acc_norm": 0.4290322580645161, |
|
"acc_norm_stderr": 0.02815603653823321 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6794871794871795, |
|
"acc_stderr": 0.030572811310299607, |
|
"acc_norm": 0.6794871794871795, |
|
"acc_norm_stderr": 0.030572811310299607 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4830188679245283, |
|
"acc_stderr": 0.030755120364119905, |
|
"acc_norm": 0.4830188679245283, |
|
"acc_norm_stderr": 0.030755120364119905 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.027840811495871916, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.027840811495871916 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31125827814569534, |
|
"acc_stderr": 0.03780445850526733, |
|
"acc_norm": 0.31125827814569534, |
|
"acc_norm_stderr": 0.03780445850526733 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5621890547263682, |
|
"acc_stderr": 0.0350808011219984, |
|
"acc_norm": 0.5621890547263682, |
|
"acc_norm_stderr": 0.0350808011219984 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.03669072477416907, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.03669072477416907 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3386243386243386, |
|
"acc_stderr": 0.024373197867983063, |
|
"acc_norm": 0.3386243386243386, |
|
"acc_norm_stderr": 0.024373197867983063 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3472222222222222, |
|
"acc_stderr": 0.03981240543717862, |
|
"acc_norm": 0.3472222222222222, |
|
"acc_norm_stderr": 0.03981240543717862 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4682080924855491, |
|
"acc_stderr": 0.026864624366756656, |
|
"acc_norm": 0.4682080924855491, |
|
"acc_norm_stderr": 0.026864624366756656 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4049079754601227, |
|
"acc_stderr": 0.03856672163548914, |
|
"acc_norm": 0.4049079754601227, |
|
"acc_norm_stderr": 0.03856672163548914 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.027801656212323653, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.027801656212323653 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.538860103626943, |
|
"acc_stderr": 0.035975244117345775, |
|
"acc_norm": 0.538860103626943, |
|
"acc_norm_stderr": 0.035975244117345775 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.04303684033537316, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537316 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5706422018348624, |
|
"acc_stderr": 0.021222286397236504, |
|
"acc_norm": 0.5706422018348624, |
|
"acc_norm_stderr": 0.021222286397236504 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.038095238095238126, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.038095238095238126 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.027956046165424516, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.027956046165424516 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.043913262867240704, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.043913262867240704 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.42105263157894735, |
|
"acc_stderr": 0.04017901275981748, |
|
"acc_norm": 0.42105263157894735, |
|
"acc_norm_stderr": 0.04017901275981748 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.019977422600227467, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.019977422600227467 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32269503546099293, |
|
"acc_stderr": 0.02788913930053478, |
|
"acc_norm": 0.32269503546099293, |
|
"acc_norm_stderr": 0.02788913930053478 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.043994650575715215, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.043994650575715215 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.32407407407407407, |
|
"acc_stderr": 0.03191923445686186, |
|
"acc_norm": 0.32407407407407407, |
|
"acc_norm_stderr": 0.03191923445686186 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.28044692737430166, |
|
"acc_stderr": 0.015024083883322884, |
|
"acc_norm": 0.28044692737430166, |
|
"acc_norm_stderr": 0.015024083883322884 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33455882352941174, |
|
"acc_stderr": 0.028661996202335314, |
|
"acc_norm": 0.33455882352941174, |
|
"acc_norm_stderr": 0.028661996202335314 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.34285714285714286, |
|
"acc_stderr": 0.03038726291954773, |
|
"acc_norm": 0.34285714285714286, |
|
"acc_norm_stderr": 0.03038726291954773 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5907172995780591, |
|
"acc_stderr": 0.032007041833595914, |
|
"acc_norm": 0.5907172995780591, |
|
"acc_norm_stderr": 0.032007041833595914 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3350717079530639, |
|
"acc_stderr": 0.012055499471330368, |
|
"acc_norm": 0.3350717079530639, |
|
"acc_norm_stderr": 0.012055499471330368 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.47058823529411764, |
|
"acc_stderr": 0.03503235296367992, |
|
"acc_norm": 0.47058823529411764, |
|
"acc_norm_stderr": 0.03503235296367992 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5757575757575758, |
|
"acc_stderr": 0.038592681420702636, |
|
"acc_norm": 0.5757575757575758, |
|
"acc_norm_stderr": 0.038592681420702636 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.28518971848225216, |
|
"mc1_stderr": 0.015805827874454895, |
|
"mc2": 0.4259753093204231, |
|
"mc2_stderr": 0.014983532851791444 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5289256198347108, |
|
"acc_stderr": 0.01716156394991635, |
|
"acc_norm": 0.6080283353010626, |
|
"acc_norm_stderr": 0.016784332119424084 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.5", |
|
"model_sha": "136d3c543af246c9046e17d42c0a357316de8815", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |