|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.34812286689419797, |
|
"acc_stderr": 0.013921008595179344, |
|
"acc_norm": 0.4138225255972696, |
|
"acc_norm_stderr": 0.014392730009221009 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3966341366261701, |
|
"acc_stderr": 0.004881990487628915, |
|
"acc_norm": 0.534654451304521, |
|
"acc_norm_stderr": 0.004977782217582457 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4853801169590643, |
|
"acc_stderr": 0.038331852752130205, |
|
"acc_norm": 0.4853801169590643, |
|
"acc_norm_stderr": 0.038331852752130205 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5533980582524272, |
|
"acc_stderr": 0.04922424153458933, |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458933 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5261813537675607, |
|
"acc_stderr": 0.017855434554041986, |
|
"acc_norm": 0.5261813537675607, |
|
"acc_norm_stderr": 0.017855434554041986 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45185185185185184, |
|
"acc_stderr": 0.042992689054808624, |
|
"acc_norm": 0.45185185185185184, |
|
"acc_norm_stderr": 0.042992689054808624 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4127659574468085, |
|
"acc_stderr": 0.03218471141400351, |
|
"acc_norm": 0.4127659574468085, |
|
"acc_norm_stderr": 0.03218471141400351 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.03828401115079022, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.03828401115079022 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5048231511254019, |
|
"acc_stderr": 0.028396770444111298, |
|
"acc_norm": 0.5048231511254019, |
|
"acc_norm_stderr": 0.028396770444111298 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5201793721973094, |
|
"acc_stderr": 0.033530461674123, |
|
"acc_norm": 0.5201793721973094, |
|
"acc_norm_stderr": 0.033530461674123 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5114503816793893, |
|
"acc_stderr": 0.043841400240780176, |
|
"acc_norm": 0.5114503816793893, |
|
"acc_norm_stderr": 0.043841400240780176 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.034273086529999365, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.034273086529999365 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5103448275862069, |
|
"acc_stderr": 0.04165774775728762, |
|
"acc_norm": 0.5103448275862069, |
|
"acc_norm_stderr": 0.04165774775728762 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3431372549019608, |
|
"acc_stderr": 0.04724007352383888, |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.04724007352383888 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5210084033613446, |
|
"acc_stderr": 0.032449808499900284, |
|
"acc_norm": 0.5210084033613446, |
|
"acc_norm_stderr": 0.032449808499900284 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4794871794871795, |
|
"acc_stderr": 0.025329663163489943, |
|
"acc_norm": 0.4794871794871795, |
|
"acc_norm_stderr": 0.025329663163489943 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4236453201970443, |
|
"acc_stderr": 0.03476725747649038, |
|
"acc_norm": 0.4236453201970443, |
|
"acc_norm_stderr": 0.03476725747649038 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.532258064516129, |
|
"acc_stderr": 0.02838474778881334, |
|
"acc_norm": 0.532258064516129, |
|
"acc_norm_stderr": 0.02838474778881334 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7264957264957265, |
|
"acc_stderr": 0.02920254015343118, |
|
"acc_norm": 0.7264957264957265, |
|
"acc_norm_stderr": 0.02920254015343118 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5169811320754717, |
|
"acc_stderr": 0.030755120364119905, |
|
"acc_norm": 0.5169811320754717, |
|
"acc_norm_stderr": 0.030755120364119905 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4818181818181818, |
|
"acc_stderr": 0.04785964010794916, |
|
"acc_norm": 0.4818181818181818, |
|
"acc_norm_stderr": 0.04785964010794916 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.02803792996911498, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.02803792996911498 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.36423841059602646, |
|
"acc_stderr": 0.03929111781242741, |
|
"acc_norm": 0.36423841059602646, |
|
"acc_norm_stderr": 0.03929111781242741 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6318407960199005, |
|
"acc_stderr": 0.03410410565495301, |
|
"acc_norm": 0.6318407960199005, |
|
"acc_norm_stderr": 0.03410410565495301 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.44508670520231214, |
|
"acc_stderr": 0.03789401760283648, |
|
"acc_norm": 0.44508670520231214, |
|
"acc_norm_stderr": 0.03789401760283648 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3412698412698413, |
|
"acc_stderr": 0.024419234966819064, |
|
"acc_norm": 0.3412698412698413, |
|
"acc_norm_stderr": 0.024419234966819064 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4236111111111111, |
|
"acc_stderr": 0.04132125019723368, |
|
"acc_norm": 0.4236111111111111, |
|
"acc_norm_stderr": 0.04132125019723368 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.66, |
|
"acc_stderr": 0.04760952285695238, |
|
"acc_norm": 0.66, |
|
"acc_norm_stderr": 0.04760952285695238 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5057803468208093, |
|
"acc_stderr": 0.026917296179149116, |
|
"acc_norm": 0.5057803468208093, |
|
"acc_norm_stderr": 0.026917296179149116 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4723926380368098, |
|
"acc_stderr": 0.039223782906109894, |
|
"acc_norm": 0.4723926380368098, |
|
"acc_norm_stderr": 0.039223782906109894 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.027801656212323667, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.027801656212323667 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5854922279792746, |
|
"acc_stderr": 0.035553003195576686, |
|
"acc_norm": 0.5854922279792746, |
|
"acc_norm_stderr": 0.035553003195576686 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.35964912280701755, |
|
"acc_stderr": 0.04514496132873632, |
|
"acc_norm": 0.35964912280701755, |
|
"acc_norm_stderr": 0.04514496132873632 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6275229357798165, |
|
"acc_stderr": 0.020728368457638494, |
|
"acc_norm": 0.6275229357798165, |
|
"acc_norm_stderr": 0.020728368457638494 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.03970158273235173, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235173 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5196078431372549, |
|
"acc_stderr": 0.028607893699576073, |
|
"acc_norm": 0.5196078431372549, |
|
"acc_norm_stderr": 0.028607893699576073 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5785123966942148, |
|
"acc_stderr": 0.04507732278775087, |
|
"acc_norm": 0.5785123966942148, |
|
"acc_norm_stderr": 0.04507732278775087 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4934210526315789, |
|
"acc_stderr": 0.040685900502249704, |
|
"acc_norm": 0.4934210526315789, |
|
"acc_norm_stderr": 0.040685900502249704 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.019944914136873583, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.019944914136873583 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.0286638201471995, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.0286638201471995 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.041577515398656284, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.041577515398656284 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24134078212290502, |
|
"acc_stderr": 0.014310999547961441, |
|
"acc_norm": 0.24134078212290502, |
|
"acc_norm_stderr": 0.014310999547961441 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4485294117647059, |
|
"acc_stderr": 0.0302114796091216, |
|
"acc_norm": 0.4485294117647059, |
|
"acc_norm_stderr": 0.0302114796091216 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4816326530612245, |
|
"acc_stderr": 0.031987615467631264, |
|
"acc_norm": 0.4816326530612245, |
|
"acc_norm_stderr": 0.031987615467631264 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.620253164556962, |
|
"acc_stderr": 0.0315918875296585, |
|
"acc_norm": 0.620253164556962, |
|
"acc_norm_stderr": 0.0315918875296585 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3305084745762712, |
|
"acc_stderr": 0.012014142101842974, |
|
"acc_norm": 0.3305084745762712, |
|
"acc_norm_stderr": 0.012014142101842974 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5343137254901961, |
|
"acc_stderr": 0.03501038327635897, |
|
"acc_norm": 0.5343137254901961, |
|
"acc_norm_stderr": 0.03501038327635897 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5515151515151515, |
|
"acc_stderr": 0.038835659779569286, |
|
"acc_norm": 0.5515151515151515, |
|
"acc_norm_stderr": 0.038835659779569286 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2594859241126071, |
|
"mc1_stderr": 0.015345409485557956, |
|
"mc2": 0.41345739770630174, |
|
"mc2_stderr": 0.014785029688685922 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5714285714285714, |
|
"acc_stderr": 0.01701403811929749, |
|
"acc_norm": 0.6186540731995277, |
|
"acc_norm_stderr": 0.016699301768828084 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "GAI-LLM/Yi-Ko-6B-smash", |
|
"model_sha": "8b0f29ce0c792414e986f2c8b1fe59d68cb874cd", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |