|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.25170648464163825, |
|
"acc_stderr": 0.01268249633404297, |
|
"acc_norm": 0.3054607508532423, |
|
"acc_norm_stderr": 0.013460080478002496 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3325034853614818, |
|
"acc_stderr": 0.004701474865207019, |
|
"acc_norm": 0.4032065325632344, |
|
"acc_norm_stderr": 0.004895390341445625 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03218093795602357, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03218093795602357 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.18446601941747573, |
|
"acc_stderr": 0.03840423627288276, |
|
"acc_norm": 0.18446601941747573, |
|
"acc_norm_stderr": 0.03840423627288276 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.22860791826309068, |
|
"acc_stderr": 0.015016884698539873, |
|
"acc_norm": 0.22860791826309068, |
|
"acc_norm_stderr": 0.015016884698539873 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.03853254836552003, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.03853254836552003 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2170212765957447, |
|
"acc_stderr": 0.026947483121496238, |
|
"acc_norm": 0.2170212765957447, |
|
"acc_norm_stderr": 0.026947483121496238 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3253012048192771, |
|
"acc_stderr": 0.03647168523683227, |
|
"acc_norm": 0.3253012048192771, |
|
"acc_norm_stderr": 0.03647168523683227 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2765273311897106, |
|
"acc_stderr": 0.025403832978179622, |
|
"acc_norm": 0.2765273311897106, |
|
"acc_norm_stderr": 0.025403832978179622 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3811659192825112, |
|
"acc_stderr": 0.032596251184168264, |
|
"acc_norm": 0.3811659192825112, |
|
"acc_norm_stderr": 0.032596251184168264 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.29770992366412213, |
|
"acc_stderr": 0.04010358942462203, |
|
"acc_norm": 0.29770992366412213, |
|
"acc_norm_stderr": 0.04010358942462203 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.047609522856952344, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.047609522856952344 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.25252525252525254, |
|
"acc_stderr": 0.030954055470365914, |
|
"acc_norm": 0.25252525252525254, |
|
"acc_norm_stderr": 0.030954055470365914 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.25517241379310346, |
|
"acc_stderr": 0.03632984052707842, |
|
"acc_norm": 0.25517241379310346, |
|
"acc_norm_stderr": 0.03632984052707842 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.03873958714149354, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.03873958714149354 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23109243697478993, |
|
"acc_stderr": 0.027381406927868963, |
|
"acc_norm": 0.23109243697478993, |
|
"acc_norm_stderr": 0.027381406927868963 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.23333333333333334, |
|
"acc_stderr": 0.021444547301560476, |
|
"acc_norm": 0.23333333333333334, |
|
"acc_norm_stderr": 0.021444547301560476 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.038612291966536955, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.038612291966536955 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252627, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.24630541871921183, |
|
"acc_stderr": 0.030315099285617722, |
|
"acc_norm": 0.24630541871921183, |
|
"acc_norm_stderr": 0.030315099285617722 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2870967741935484, |
|
"acc_stderr": 0.025736542745594528, |
|
"acc_norm": 0.2870967741935484, |
|
"acc_norm_stderr": 0.025736542745594528 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2948717948717949, |
|
"acc_stderr": 0.029872577708891162, |
|
"acc_norm": 0.2948717948717949, |
|
"acc_norm_stderr": 0.029872577708891162 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24528301886792453, |
|
"acc_stderr": 0.02648035717989569, |
|
"acc_norm": 0.24528301886792453, |
|
"acc_norm_stderr": 0.02648035717989569 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.22727272727272727, |
|
"acc_stderr": 0.04013964554072775, |
|
"acc_norm": 0.22727272727272727, |
|
"acc_norm_stderr": 0.04013964554072775 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.027309140588230165, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.027309140588230165 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.24503311258278146, |
|
"acc_stderr": 0.03511807571804724, |
|
"acc_norm": 0.24503311258278146, |
|
"acc_norm_stderr": 0.03511807571804724 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.29850746268656714, |
|
"acc_stderr": 0.032357437893550424, |
|
"acc_norm": 0.29850746268656714, |
|
"acc_norm_stderr": 0.032357437893550424 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.031568093627031744, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.031568093627031744 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.022569897074918417, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.022569897074918417 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2361111111111111, |
|
"acc_stderr": 0.03551446610810826, |
|
"acc_norm": 0.2361111111111111, |
|
"acc_norm_stderr": 0.03551446610810826 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932269, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932269 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.023267528432100174, |
|
"acc_norm": 0.24855491329479767, |
|
"acc_norm_stderr": 0.023267528432100174 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.26993865030674846, |
|
"acc_stderr": 0.034878251684978906, |
|
"acc_norm": 0.26993865030674846, |
|
"acc_norm_stderr": 0.034878251684978906 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.024659685185967277, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.024659685185967277 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24352331606217617, |
|
"acc_stderr": 0.030975436386845436, |
|
"acc_norm": 0.24352331606217617, |
|
"acc_norm_stderr": 0.030975436386845436 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.26055045871559634, |
|
"acc_stderr": 0.01881918203485007, |
|
"acc_norm": 0.26055045871559634, |
|
"acc_norm_stderr": 0.01881918203485007 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.038932596106046734, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.038932596106046734 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.238562091503268, |
|
"acc_stderr": 0.024404394928087873, |
|
"acc_norm": 0.238562091503268, |
|
"acc_norm_stderr": 0.024404394928087873 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.2396694214876033, |
|
"acc_stderr": 0.03896878985070417, |
|
"acc_norm": 0.2396694214876033, |
|
"acc_norm_stderr": 0.03896878985070417 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.20394736842105263, |
|
"acc_stderr": 0.03279000406310049, |
|
"acc_norm": 0.20394736842105263, |
|
"acc_norm_stderr": 0.03279000406310049 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.017740899509177795, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.017740899509177795 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24822695035460993, |
|
"acc_stderr": 0.025770015644290396, |
|
"acc_norm": 0.24822695035460993, |
|
"acc_norm_stderr": 0.025770015644290396 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.042878587513404544, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.042878587513404544 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.03179876342176849, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.03179876342176849 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2558659217877095, |
|
"acc_stderr": 0.014593620923210756, |
|
"acc_norm": 0.2558659217877095, |
|
"acc_norm_stderr": 0.014593620923210756 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.13, |
|
"acc_stderr": 0.03379976689896309, |
|
"acc_norm": 0.13, |
|
"acc_norm_stderr": 0.03379976689896309 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816507, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816507 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3639705882352941, |
|
"acc_stderr": 0.02922719246003203, |
|
"acc_norm": 0.3639705882352941, |
|
"acc_norm_stderr": 0.02922719246003203 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24081632653061225, |
|
"acc_stderr": 0.02737294220178817, |
|
"acc_norm": 0.24081632653061225, |
|
"acc_norm_stderr": 0.02737294220178817 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2616033755274262, |
|
"acc_stderr": 0.028609516716994934, |
|
"acc_norm": 0.2616033755274262, |
|
"acc_norm_stderr": 0.028609516716994934 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.23663624511082137, |
|
"acc_stderr": 0.010855137351572747, |
|
"acc_norm": 0.23663624511082137, |
|
"acc_norm_stderr": 0.010855137351572747 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.02933116229425173, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.02933116229425173 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.21212121212121213, |
|
"acc_stderr": 0.03192271569548297, |
|
"acc_norm": 0.21212121212121213, |
|
"acc_norm_stderr": 0.03192271569548297 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.23133414932680538, |
|
"mc1_stderr": 0.014761945174862685, |
|
"mc2": 0.4101668259727761, |
|
"mc2_stderr": 0.01554453474117709 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.06924882629107981, |
|
"acc_stderr": 0.00870278440176373, |
|
"acc_norm": 0.1068075117370892, |
|
"acc_norm_stderr": 0.010587871205074872 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Nara-Lab/nallm-polyglot-ko-1.3b-base", |
|
"model_sha": "8fd7fa9b1b5bbe857f65576e2e37bd600e10ce8c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |