|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2986348122866894, |
|
"acc_stderr": 0.01337407861506875, |
|
"acc_norm": 0.34982935153583616, |
|
"acc_norm_stderr": 0.013936809212158292 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.40948018323043217, |
|
"acc_stderr": 0.004907329270272704, |
|
"acc_norm": 0.5320653256323441, |
|
"acc_norm_stderr": 0.004979510001776618 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.03446296217088426, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.03446296217088426 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.24271844660194175, |
|
"acc_stderr": 0.04245022486384495, |
|
"acc_norm": 0.24271844660194175, |
|
"acc_norm_stderr": 0.04245022486384495 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.26309067688378035, |
|
"acc_stderr": 0.01574549716904905, |
|
"acc_norm": 0.26309067688378035, |
|
"acc_norm_stderr": 0.01574549716904905 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.32592592592592595, |
|
"acc_stderr": 0.040491220417025055, |
|
"acc_norm": 0.32592592592592595, |
|
"acc_norm_stderr": 0.040491220417025055 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.026148818018424495, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.026148818018424495 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.19879518072289157, |
|
"acc_stderr": 0.031069390260789413, |
|
"acc_norm": 0.19879518072289157, |
|
"acc_norm_stderr": 0.031069390260789413 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.28938906752411575, |
|
"acc_stderr": 0.025755865922632938, |
|
"acc_norm": 0.28938906752411575, |
|
"acc_norm_stderr": 0.025755865922632938 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.15246636771300448, |
|
"acc_stderr": 0.02412620481325287, |
|
"acc_norm": 0.15246636771300448, |
|
"acc_norm_stderr": 0.02412620481325287 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3053435114503817, |
|
"acc_stderr": 0.04039314978724561, |
|
"acc_norm": 0.3053435114503817, |
|
"acc_norm_stderr": 0.04039314978724561 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.26262626262626265, |
|
"acc_stderr": 0.031353050095330834, |
|
"acc_norm": 0.26262626262626265, |
|
"acc_norm_stderr": 0.031353050095330834 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.32413793103448274, |
|
"acc_stderr": 0.03900432069185555, |
|
"acc_norm": 0.32413793103448274, |
|
"acc_norm_stderr": 0.03900432069185555 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171451, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171451 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.27310924369747897, |
|
"acc_stderr": 0.02894200404099817, |
|
"acc_norm": 0.27310924369747897, |
|
"acc_norm_stderr": 0.02894200404099817 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.25384615384615383, |
|
"acc_stderr": 0.022066054378726257, |
|
"acc_norm": 0.25384615384615383, |
|
"acc_norm_stderr": 0.022066054378726257 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.21296296296296297, |
|
"acc_stderr": 0.03957835471980981, |
|
"acc_norm": 0.21296296296296297, |
|
"acc_norm_stderr": 0.03957835471980981 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.24630541871921183, |
|
"acc_stderr": 0.03031509928561773, |
|
"acc_norm": 0.24630541871921183, |
|
"acc_norm_stderr": 0.03031509928561773 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2806451612903226, |
|
"acc_stderr": 0.025560604721022888, |
|
"acc_norm": 0.2806451612903226, |
|
"acc_norm_stderr": 0.025560604721022888 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2692307692307692, |
|
"acc_stderr": 0.02905858830374884, |
|
"acc_norm": 0.2692307692307692, |
|
"acc_norm_stderr": 0.02905858830374884 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2528301886792453, |
|
"acc_stderr": 0.026749899771241238, |
|
"acc_norm": 0.2528301886792453, |
|
"acc_norm_stderr": 0.026749899771241238 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2636363636363636, |
|
"acc_stderr": 0.04220224692971987, |
|
"acc_norm": 0.2636363636363636, |
|
"acc_norm_stderr": 0.04220224692971987 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24444444444444444, |
|
"acc_stderr": 0.02620276653465215, |
|
"acc_norm": 0.24444444444444444, |
|
"acc_norm_stderr": 0.02620276653465215 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.037345356767871984, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.037345356767871984 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.21393034825870647, |
|
"acc_stderr": 0.028996909693328923, |
|
"acc_norm": 0.21393034825870647, |
|
"acc_norm_stderr": 0.028996909693328923 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3063583815028902, |
|
"acc_stderr": 0.03514942551267438, |
|
"acc_norm": 0.3063583815028902, |
|
"acc_norm_stderr": 0.03514942551267438 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2724867724867725, |
|
"acc_stderr": 0.022930973071633345, |
|
"acc_norm": 0.2724867724867725, |
|
"acc_norm_stderr": 0.022930973071633345 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.22832369942196531, |
|
"acc_stderr": 0.022598703804321624, |
|
"acc_norm": 0.22832369942196531, |
|
"acc_norm_stderr": 0.022598703804321624 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.294478527607362, |
|
"acc_stderr": 0.03581165790474082, |
|
"acc_norm": 0.294478527607362, |
|
"acc_norm_stderr": 0.03581165790474082 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2839506172839506, |
|
"acc_stderr": 0.02508947852376513, |
|
"acc_norm": 0.2839506172839506, |
|
"acc_norm_stderr": 0.02508947852376513 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2694300518134715, |
|
"acc_stderr": 0.03201867122877794, |
|
"acc_norm": 0.2694300518134715, |
|
"acc_norm_stderr": 0.03201867122877794 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.040969851398436695, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436695 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.29724770642201837, |
|
"acc_stderr": 0.019595707224643533, |
|
"acc_norm": 0.29724770642201837, |
|
"acc_norm_stderr": 0.019595707224643533 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.15079365079365079, |
|
"acc_stderr": 0.03200686497287392, |
|
"acc_norm": 0.15079365079365079, |
|
"acc_norm_stderr": 0.03200686497287392 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.28104575163398693, |
|
"acc_stderr": 0.025738854797818737, |
|
"acc_norm": 0.28104575163398693, |
|
"acc_norm_stderr": 0.025738854797818737 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.38016528925619836, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.38016528925619836, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.28289473684210525, |
|
"acc_stderr": 0.03665349695640767, |
|
"acc_norm": 0.28289473684210525, |
|
"acc_norm_stderr": 0.03665349695640767 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.28104575163398693, |
|
"acc_stderr": 0.018185218954318082, |
|
"acc_norm": 0.28104575163398693, |
|
"acc_norm_stderr": 0.018185218954318082 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2695035460992908, |
|
"acc_stderr": 0.02646903681859063, |
|
"acc_norm": 0.2695035460992908, |
|
"acc_norm_stderr": 0.02646903681859063 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.20535714285714285, |
|
"acc_stderr": 0.03834241021419074, |
|
"acc_norm": 0.20535714285714285, |
|
"acc_norm_stderr": 0.03834241021419074 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.030225226160012397, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.030225226160012397 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24804469273743016, |
|
"acc_stderr": 0.014444157808261453, |
|
"acc_norm": 0.24804469273743016, |
|
"acc_norm_stderr": 0.014444157808261453 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3639705882352941, |
|
"acc_stderr": 0.029227192460032025, |
|
"acc_norm": 0.3639705882352941, |
|
"acc_norm_stderr": 0.029227192460032025 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24081632653061225, |
|
"acc_stderr": 0.027372942201788163, |
|
"acc_norm": 0.24081632653061225, |
|
"acc_norm_stderr": 0.027372942201788163 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2911392405063291, |
|
"acc_stderr": 0.029571601065753374, |
|
"acc_norm": 0.2911392405063291, |
|
"acc_norm_stderr": 0.029571601065753374 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24902216427640156, |
|
"acc_stderr": 0.01104489226404077, |
|
"acc_norm": 0.24902216427640156, |
|
"acc_norm_stderr": 0.01104489226404077 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.0309645179269234, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.0309645179269234 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24848484848484848, |
|
"acc_stderr": 0.03374402644139404, |
|
"acc_norm": 0.24848484848484848, |
|
"acc_norm_stderr": 0.03374402644139404 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2558139534883721, |
|
"mc1_stderr": 0.015274176219283349, |
|
"mc2": 0.421210061474517, |
|
"mc2_stderr": 0.01567952563537008 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3931924882629108, |
|
"acc_stderr": 0.016744157492949278, |
|
"acc_norm": 0.4636150234741784, |
|
"acc_norm_stderr": 0.01709433745632628 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "tlphams/gollm-tendency-45", |
|
"model_sha": "a8f12582eb5700e1d45d045df0c38e79364e0b0c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |