results
dict
versions
dict
config_general
dict
{ "harness|ko_arc_challenge|25": { "acc": 0.2790102389078498, "acc_stderr": 0.013106784883601355, "acc_norm": 0.3455631399317406, "acc_norm_stderr": 0.01389693846114568 }, "harness|ko_hellaswag|10": { "acc": 0.33808006373232424, "acc_stderr": 0.004720891597174718, "acc_norm": 0.45120493925512845, "acc_norm_stderr": 0.0049659636472103195 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.34502923976608185, "acc_stderr": 0.036459813773888065, "acc_norm": 0.34502923976608185, "acc_norm_stderr": 0.036459813773888065 }, "harness|ko_mmlu_management|5": { "acc": 0.21359223300970873, "acc_stderr": 0.04058042015646034, "acc_norm": 0.21359223300970873, "acc_norm_stderr": 0.04058042015646034 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.31545338441890164, "acc_stderr": 0.016617501738763394, "acc_norm": 0.31545338441890164, "acc_norm_stderr": 0.016617501738763394 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3829787234042553, "acc_stderr": 0.03177821250236922, "acc_norm": 0.3829787234042553, "acc_norm_stderr": 0.03177821250236922 }, "harness|ko_mmlu_virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553027, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.31189710610932475, "acc_stderr": 0.026311858071854155, "acc_norm": 0.31189710610932475, "acc_norm_stderr": 0.026311858071854155 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3811659192825112, "acc_stderr": 0.03259625118416827, "acc_norm": 0.3811659192825112, "acc_norm_stderr": 0.03259625118416827 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3053435114503817, "acc_stderr": 0.04039314978724562, "acc_norm": 0.3053435114503817, "acc_norm_stderr": 0.04039314978724562 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.25252525252525254, "acc_stderr": 0.030954055470365907, "acc_norm": 0.25252525252525254, "acc_norm_stderr": 0.030954055470365907 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.296551724137931, "acc_stderr": 0.038061426873099935, "acc_norm": 0.296551724137931, "acc_norm_stderr": 0.038061426873099935 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364396, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364396 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.25210084033613445, "acc_stderr": 0.028205545033277723, "acc_norm": 0.25210084033613445, "acc_norm_stderr": 0.028205545033277723 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2358974358974359, "acc_stderr": 0.021525965407408726, "acc_norm": 0.2358974358974359, "acc_norm_stderr": 0.021525965407408726 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.04414343666854932, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.04414343666854932 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.22167487684729065, "acc_stderr": 0.0292255758924896, "acc_norm": 0.22167487684729065, "acc_norm_stderr": 0.0292255758924896 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2903225806451613, "acc_stderr": 0.025822106119415895, "acc_norm": 0.2903225806451613, "acc_norm_stderr": 0.025822106119415895 }, "harness|ko_mmlu_marketing|5": { "acc": 0.3247863247863248, "acc_stderr": 0.03067902276549883, "acc_norm": 0.3247863247863248, "acc_norm_stderr": 0.03067902276549883 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.29056603773584905, "acc_stderr": 0.02794321998933716, "acc_norm": 0.29056603773584905, "acc_norm_stderr": 0.02794321998933716 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2545454545454545, "acc_stderr": 0.04172343038705383, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.04172343038705383 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.027080372815145668, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.027080372815145668 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763744, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763744 }, "harness|ko_mmlu_sociology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03333333333333334, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03333333333333334 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2138728323699422, "acc_stderr": 0.03126511206173042, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.03126511206173042 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708614, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708614 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686935, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686935 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2947976878612717, "acc_stderr": 0.024547617794803828, "acc_norm": 0.2947976878612717, "acc_norm_stderr": 0.024547617794803828 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2822085889570552, "acc_stderr": 0.03536117886664743, "acc_norm": 0.2822085889570552, "acc_norm_stderr": 0.03536117886664743 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2839506172839506, "acc_stderr": 0.025089478523765127, "acc_norm": 0.2839506172839506, "acc_norm_stderr": 0.025089478523765127 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.27461139896373055, "acc_stderr": 0.032210245080411516, "acc_norm": 0.27461139896373055, "acc_norm_stderr": 0.032210245080411516 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.25137614678899084, "acc_stderr": 0.018599206360287415, "acc_norm": 0.25137614678899084, "acc_norm_stderr": 0.018599206360287415 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.19047619047619047, "acc_stderr": 0.03512207412302052, "acc_norm": 0.19047619047619047, "acc_norm_stderr": 0.03512207412302052 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.29411764705882354, "acc_stderr": 0.026090162504279035, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.026090162504279035 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2975206611570248, "acc_stderr": 0.04173349148083499, "acc_norm": 0.2975206611570248, "acc_norm_stderr": 0.04173349148083499 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.17105263157894737, "acc_stderr": 0.030643607071677098, "acc_norm": 0.17105263157894737, "acc_norm_stderr": 0.030643607071677098 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3022875816993464, "acc_stderr": 0.018579232711113874, "acc_norm": 0.3022875816993464, "acc_norm_stderr": 0.018579232711113874 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340461014, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340461014 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03894641120044793, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03894641120044793 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.029886910547626978, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.029886910547626978 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767857, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767857 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.1801470588235294, "acc_stderr": 0.02334516361654488, "acc_norm": 0.1801470588235294, "acc_norm_stderr": 0.02334516361654488 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.24489795918367346, "acc_stderr": 0.027529637440174934, "acc_norm": 0.24489795918367346, "acc_norm_stderr": 0.027529637440174934 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.33755274261603374, "acc_stderr": 0.030781549102026216, "acc_norm": 0.33755274261603374, "acc_norm_stderr": 0.030781549102026216 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.26401564537157757, "acc_stderr": 0.011258435537723814, "acc_norm": 0.26401564537157757, "acc_norm_stderr": 0.011258435537723814 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.22058823529411764, "acc_stderr": 0.02910225438967407, "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.02910225438967407 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.03346409881055952, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055952 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.25703794369645044, "mc1_stderr": 0.015298077509485083, "mc2": 0.42530376345187815, "mc2_stderr": 0.015252754425393767 }, "harness|ko_commongen_v2|2": { "acc": 0.15584415584415584, "acc_stderr": 0.012470141877923077, "acc_norm": 0.3577331759149941, "acc_norm_stderr": 0.016479808935749976 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "beomi/llama-2-ko-7b-emb-dev", "model_sha": "f1ff977bd4ee3f0c2a3ee7dd1c4b7750e3a0766c", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.30204778156996587, "acc_stderr": 0.013417519144716429, "acc_norm": 0.378839590443686, "acc_norm_stderr": 0.014175915490000324 }, "harness|ko_hellaswag|10": { "acc": 0.35480979884485164, "acc_stderr": 0.0047747781803451845, "acc_norm": 0.47390957976498704, "acc_norm_stderr": 0.0049829835924591935 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.24561403508771928, "acc_stderr": 0.033014059469872487, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.033014059469872487 }, "harness|ko_mmlu_management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384493, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384493 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3269476372924649, "acc_stderr": 0.01677490818013146, "acc_norm": 0.3269476372924649, "acc_norm_stderr": 0.01677490818013146 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.35555555555555557, "acc_stderr": 0.04135176749720386, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.04135176749720386 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.35319148936170214, "acc_stderr": 0.031245325202761926, "acc_norm": 0.35319148936170214, "acc_norm_stderr": 0.031245325202761926 }, "harness|ko_mmlu_virology|5": { "acc": 0.3855421686746988, "acc_stderr": 0.03789134424611548, "acc_norm": 0.3855421686746988, "acc_norm_stderr": 0.03789134424611548 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3215434083601286, "acc_stderr": 0.026527724079528872, "acc_norm": 0.3215434083601286, "acc_norm_stderr": 0.026527724079528872 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.30493273542600896, "acc_stderr": 0.03089861088247751, "acc_norm": 0.30493273542600896, "acc_norm_stderr": 0.03089861088247751 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.039153454088478354, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.039153454088478354 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.29292929292929293, "acc_stderr": 0.03242497958178817, "acc_norm": 0.29292929292929293, "acc_norm_stderr": 0.03242497958178817 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.03780019230438013, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.03780019230438013 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617746, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617746 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3403361344537815, "acc_stderr": 0.03077805742293167, "acc_norm": 0.3403361344537815, "acc_norm_stderr": 0.03077805742293167 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3, "acc_stderr": 0.023234581088428494, "acc_norm": 0.3, "acc_norm_stderr": 0.023234581088428494 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.043300437496507416, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.043300437496507416 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3251231527093596, "acc_stderr": 0.03295797566311271, "acc_norm": 0.3251231527093596, "acc_norm_stderr": 0.03295797566311271 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3193548387096774, "acc_stderr": 0.026522709674667768, "acc_norm": 0.3193548387096774, "acc_norm_stderr": 0.026522709674667768 }, "harness|ko_mmlu_marketing|5": { "acc": 0.3034188034188034, "acc_stderr": 0.03011821010694266, "acc_norm": 0.3034188034188034, "acc_norm_stderr": 0.03011821010694266 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2830188679245283, "acc_stderr": 0.0277242364927009, "acc_norm": 0.2830188679245283, "acc_norm_stderr": 0.0277242364927009 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3181818181818182, "acc_stderr": 0.04461272175910508, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.04461272175910508 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.02773896963217609, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.02773896963217609 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.3880597014925373, "acc_stderr": 0.0344578996436275, "acc_norm": 0.3880597014925373, "acc_norm_stderr": 0.0344578996436275 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2804232804232804, "acc_stderr": 0.023135287974325635, "acc_norm": 0.2804232804232804, "acc_norm_stderr": 0.023135287974325635 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2777777777777778, "acc_stderr": 0.037455547914624576, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.037455547914624576 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.26878612716763006, "acc_stderr": 0.023868003262500104, "acc_norm": 0.26878612716763006, "acc_norm_stderr": 0.023868003262500104 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.26993865030674846, "acc_stderr": 0.03487825168497892, "acc_norm": 0.26993865030674846, "acc_norm_stderr": 0.03487825168497892 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3117283950617284, "acc_stderr": 0.025773111169630453, "acc_norm": 0.3117283950617284, "acc_norm_stderr": 0.025773111169630453 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.26424870466321243, "acc_stderr": 0.03182155050916647, "acc_norm": 0.26424870466321243, "acc_norm_stderr": 0.03182155050916647 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.26055045871559634, "acc_stderr": 0.018819182034850068, "acc_norm": 0.26055045871559634, "acc_norm_stderr": 0.018819182034850068 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.036196045241242494, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.036196045241242494 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.31699346405228757, "acc_stderr": 0.026643278474508755, "acc_norm": 0.31699346405228757, "acc_norm_stderr": 0.026643278474508755 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_international_law|5": { "acc": 0.4132231404958678, "acc_stderr": 0.04495087843548408, "acc_norm": 0.4132231404958678, "acc_norm_stderr": 0.04495087843548408 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2565359477124183, "acc_stderr": 0.017667841612379002, "acc_norm": 0.2565359477124183, "acc_norm_stderr": 0.017667841612379002 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.026011992930902006, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.026011992930902006 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.03167468706828977, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.03167468706828977 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23575418994413408, "acc_stderr": 0.014196375686290803, "acc_norm": 0.23575418994413408, "acc_norm_stderr": 0.014196375686290803 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4375, "acc_stderr": 0.030134614954403924, "acc_norm": 0.4375, "acc_norm_stderr": 0.030134614954403924 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4204081632653061, "acc_stderr": 0.03160106993449604, "acc_norm": 0.4204081632653061, "acc_norm_stderr": 0.03160106993449604 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2869198312236287, "acc_stderr": 0.029443773022594693, "acc_norm": 0.2869198312236287, "acc_norm_stderr": 0.029443773022594693 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2607561929595828, "acc_stderr": 0.011213471559602334, "acc_norm": 0.2607561929595828, "acc_norm_stderr": 0.011213471559602334 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2909090909090909, "acc_stderr": 0.03546563019624337, "acc_norm": 0.2909090909090909, "acc_norm_stderr": 0.03546563019624337 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.24969400244798043, "mc1_stderr": 0.015152286907148123, "mc2": 0.4175794689167079, "mc2_stderr": 0.01513967881843377 }, "harness|ko_commongen_v2|2": { "acc": 0.13695395513577333, "acc_stderr": 0.011820043946570876, "acc_norm": 0.33530106257378983, "acc_norm_stderr": 0.016230981232989817 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "beomi/llama-2-ko-7b-emb-dev", "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.32593856655290104, "acc_stderr": 0.013697432466693239, "acc_norm": 0.40273037542662116, "acc_norm_stderr": 0.014332236306790147 }, "harness|ko_hellaswag|10": { "acc": 0.3641704839673372, "acc_stderr": 0.004802133511654224, "acc_norm": 0.49083847839075884, "acc_norm_stderr": 0.004988943721711207 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.24561403508771928, "acc_stderr": 0.03301405946987249, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.03301405946987249 }, "harness|ko_mmlu_management|5": { "acc": 0.21359223300970873, "acc_stderr": 0.04058042015646034, "acc_norm": 0.21359223300970873, "acc_norm_stderr": 0.04058042015646034 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.30395913154533843, "acc_stderr": 0.016448321686769043, "acc_norm": 0.30395913154533843, "acc_norm_stderr": 0.016448321686769043 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04072314811876837, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04072314811876837 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.32340425531914896, "acc_stderr": 0.03057944277361034, "acc_norm": 0.32340425531914896, "acc_norm_stderr": 0.03057944277361034 }, "harness|ko_mmlu_virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553027, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2829581993569132, "acc_stderr": 0.02558306248998484, "acc_norm": 0.2829581993569132, "acc_norm_stderr": 0.02558306248998484 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4080717488789238, "acc_stderr": 0.03298574607842822, "acc_norm": 0.4080717488789238, "acc_norm_stderr": 0.03298574607842822 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.039153454088478354, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.039153454088478354 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.23232323232323232, "acc_stderr": 0.030088629490217483, "acc_norm": 0.23232323232323232, "acc_norm_stderr": 0.030088629490217483 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.03600105692727774, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.03600105692727774 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.039505818611799616, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.039505818611799616 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2605042016806723, "acc_stderr": 0.028510251512341923, "acc_norm": 0.2605042016806723, "acc_norm_stderr": 0.028510251512341923 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.26153846153846155, "acc_stderr": 0.02228214120420443, "acc_norm": 0.26153846153846155, "acc_norm_stderr": 0.02228214120420443 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3055555555555556, "acc_stderr": 0.044531975073749834, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.044531975073749834 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2315270935960591, "acc_stderr": 0.02967833314144446, "acc_norm": 0.2315270935960591, "acc_norm_stderr": 0.02967833314144446 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3032258064516129, "acc_stderr": 0.026148685930671742, "acc_norm": 0.3032258064516129, "acc_norm_stderr": 0.026148685930671742 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2692307692307692, "acc_stderr": 0.029058588303748845, "acc_norm": 0.2692307692307692, "acc_norm_stderr": 0.029058588303748845 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2830188679245283, "acc_stderr": 0.0277242364927009, "acc_norm": 0.2830188679245283, "acc_norm_stderr": 0.0277242364927009 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.39090909090909093, "acc_stderr": 0.04673752333670237, "acc_norm": 0.39090909090909093, "acc_norm_stderr": 0.04673752333670237 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.03479185572599661, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.03479185572599661 }, "harness|ko_mmlu_sociology|5": { "acc": 0.2935323383084577, "acc_stderr": 0.03220024104534204, "acc_norm": 0.2935323383084577, "acc_norm_stderr": 0.03220024104534204 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.23121387283236994, "acc_stderr": 0.0321473730202947, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.0321473730202947 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.25396825396825395, "acc_stderr": 0.022418042891113942, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.022418042891113942 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03852084696008534, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03852084696008534 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24566473988439305, "acc_stderr": 0.02317629820399201, "acc_norm": 0.24566473988439305, "acc_norm_stderr": 0.02317629820399201 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.27607361963190186, "acc_stderr": 0.03512385283705051, "acc_norm": 0.27607361963190186, "acc_norm_stderr": 0.03512385283705051 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02492200116888633, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02492200116888633 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.20207253886010362, "acc_stderr": 0.02897908979429673, "acc_norm": 0.20207253886010362, "acc_norm_stderr": 0.02897908979429673 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.041424397194893624, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.041424397194893624 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.24220183486238533, "acc_stderr": 0.01836817630659862, "acc_norm": 0.24220183486238533, "acc_norm_stderr": 0.01836817630659862 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.036196045241242494, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.036196045241242494 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.27450980392156865, "acc_stderr": 0.02555316999182652, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.02555316999182652 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|ko_mmlu_international_law|5": { "acc": 0.36363636363636365, "acc_stderr": 0.043913262867240704, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.043913262867240704 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.18421052631578946, "acc_stderr": 0.0315469804508223, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2761437908496732, "acc_stderr": 0.018087276935663133, "acc_norm": 0.2761437908496732, "acc_norm_stderr": 0.018087276935663133 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880585, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880585 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467764, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467764 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2361111111111111, "acc_stderr": 0.028963702570791044, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.028963702570791044 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.24632352941176472, "acc_stderr": 0.02617343857052, "acc_norm": 0.24632352941176472, "acc_norm_stderr": 0.02617343857052 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.22040816326530613, "acc_stderr": 0.0265370453121453, "acc_norm": 0.22040816326530613, "acc_norm_stderr": 0.0265370453121453 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.31645569620253167, "acc_stderr": 0.03027497488021898, "acc_norm": 0.31645569620253167, "acc_norm_stderr": 0.03027497488021898 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24902216427640156, "acc_stderr": 0.01104489226404077, "acc_norm": 0.24902216427640156, "acc_norm_stderr": 0.01104489226404077 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.21568627450980393, "acc_stderr": 0.028867431449849313, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.028867431449849313 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.24848484848484848, "acc_stderr": 0.03374402644139405, "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.03374402644139405 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2558139534883721, "mc1_stderr": 0.015274176219283347, "mc2": 0.430843038646161, "mc2_stderr": 0.015222244438027463 }, "harness|ko_commongen_v2|2": { "acc": 0.15348288075560804, "acc_stderr": 0.012392606565325119, "acc_norm": 0.3435655253837072, "acc_norm_stderr": 0.016327334806429145 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "beomi/llama-2-ko-7b-emb-dev", "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.3387372013651877, "acc_stderr": 0.013830568927974334, "acc_norm": 0.4197952218430034, "acc_norm_stderr": 0.014422181226303031 }, "harness|ko_hellaswag|10": { "acc": 0.364070902210715, "acc_stderr": 0.0048018528813297484, "acc_norm": 0.49741087432782316, "acc_norm_stderr": 0.004989714512282407 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.21637426900584794, "acc_stderr": 0.03158149539338733, "acc_norm": 0.21637426900584794, "acc_norm_stderr": 0.03158149539338733 }, "harness|ko_mmlu_management|5": { "acc": 0.2621359223300971, "acc_stderr": 0.043546310772605956, "acc_norm": 0.2621359223300971, "acc_norm_stderr": 0.043546310772605956 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3052362707535121, "acc_stderr": 0.016467711947635123, "acc_norm": 0.3052362707535121, "acc_norm_stderr": 0.016467711947635123 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.37777777777777777, "acc_stderr": 0.04188307537595852, "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.04188307537595852 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3404255319148936, "acc_stderr": 0.03097669299853443, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.03097669299853443 }, "harness|ko_mmlu_virology|5": { "acc": 0.3674698795180723, "acc_stderr": 0.03753267402120574, "acc_norm": 0.3674698795180723, "acc_norm_stderr": 0.03753267402120574 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.28938906752411575, "acc_stderr": 0.025755865922632924, "acc_norm": 0.28938906752411575, "acc_norm_stderr": 0.025755865922632924 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3811659192825112, "acc_stderr": 0.032596251184168264, "acc_norm": 0.3811659192825112, "acc_norm_stderr": 0.032596251184168264 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.038808483010823944, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.038808483010823944 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.25757575757575757, "acc_stderr": 0.031156269519646826, "acc_norm": 0.25757575757575757, "acc_norm_stderr": 0.031156269519646826 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.037245636197746325, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.037245636197746325 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.039505818611799616, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.039505818611799616 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2815126050420168, "acc_stderr": 0.02921354941437216, "acc_norm": 0.2815126050420168, "acc_norm_stderr": 0.02921354941437216 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.26153846153846155, "acc_stderr": 0.02228214120420443, "acc_norm": 0.26153846153846155, "acc_norm_stderr": 0.02228214120420443 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3054187192118227, "acc_stderr": 0.032406615658684086, "acc_norm": 0.3054187192118227, "acc_norm_stderr": 0.032406615658684086 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2870967741935484, "acc_stderr": 0.02573654274559453, "acc_norm": 0.2870967741935484, "acc_norm_stderr": 0.02573654274559453 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2948717948717949, "acc_stderr": 0.029872577708891165, "acc_norm": 0.2948717948717949, "acc_norm_stderr": 0.029872577708891165 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2981132075471698, "acc_stderr": 0.028152837942493875, "acc_norm": 0.2981132075471698, "acc_norm_stderr": 0.028152837942493875 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3, "acc_stderr": 0.04389311454644286, "acc_norm": 0.3, "acc_norm_stderr": 0.04389311454644286 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.02742001935094527, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.02742001935094527 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23178807947019867, "acc_stderr": 0.034454062719870546, "acc_norm": 0.23178807947019867, "acc_norm_stderr": 0.034454062719870546 }, "harness|ko_mmlu_sociology|5": { "acc": 0.34328358208955223, "acc_stderr": 0.03357379665433431, "acc_norm": 0.34328358208955223, "acc_norm_stderr": 0.03357379665433431 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.25396825396825395, "acc_stderr": 0.022418042891113942, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.022418042891113942 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.039420826399272135, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.039420826399272135 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.27167630057803466, "acc_stderr": 0.02394851290546836, "acc_norm": 0.27167630057803466, "acc_norm_stderr": 0.02394851290546836 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.25766871165644173, "acc_stderr": 0.03436150827846917, "acc_norm": 0.25766871165644173, "acc_norm_stderr": 0.03436150827846917 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.28703703703703703, "acc_stderr": 0.025171041915309684, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.025171041915309684 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.029778663037752954, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.029778663037752954 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748142, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748142 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.26605504587155965, "acc_stderr": 0.018946022322225597, "acc_norm": 0.26605504587155965, "acc_norm_stderr": 0.018946022322225597 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047182, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047182 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2908496732026144, "acc_stderr": 0.026004800363952113, "acc_norm": 0.2908496732026144, "acc_norm_stderr": 0.026004800363952113 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|ko_mmlu_international_law|5": { "acc": 0.39669421487603307, "acc_stderr": 0.044658697805310094, "acc_norm": 0.39669421487603307, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.031975658210325, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.031975658210325 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.25326797385620914, "acc_stderr": 0.017593486895366828, "acc_norm": 0.25326797385620914, "acc_norm_stderr": 0.017593486895366828 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.25886524822695034, "acc_stderr": 0.026129572527180848, "acc_norm": 0.25886524822695034, "acc_norm_stderr": 0.026129572527180848 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.042878587513404565, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.042878587513404565 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.20833333333333334, "acc_stderr": 0.02769691071309394, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.02769691071309394 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816507, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816507 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.31985294117647056, "acc_stderr": 0.028332959514031218, "acc_norm": 0.31985294117647056, "acc_norm_stderr": 0.028332959514031218 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2653061224489796, "acc_stderr": 0.02826388994378461, "acc_norm": 0.2653061224489796, "acc_norm_stderr": 0.02826388994378461 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.27848101265822783, "acc_stderr": 0.029178682304842544, "acc_norm": 0.27848101265822783, "acc_norm_stderr": 0.029178682304842544 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.26401564537157757, "acc_stderr": 0.011258435537723814, "acc_norm": 0.26401564537157757, "acc_norm_stderr": 0.011258435537723814 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591361, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591361 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299953, "mc2": 0.4275383331125476, "mc2_stderr": 0.01526305656191646 }, "harness|ko_commongen_v2|2": { "acc": 0.14403778040141677, "acc_stderr": 0.012072030576668953, "acc_norm": 0.3707201889020071, "acc_norm_stderr": 0.016605801289212598 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "beomi/llama-2-ko-7b-emb-dev", "model_sha": "d0e8d08d5f41082f3f48ec990edc2eb521ac2e73", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.32593856655290104, "acc_stderr": 0.013697432466693246, "acc_norm": 0.38139931740614336, "acc_norm_stderr": 0.014194389086685272 }, "harness|ko_hellaswag|10": { "acc": 0.3543118900617407, "acc_stderr": 0.0047732675101127406, "acc_norm": 0.4435371439952201, "acc_norm_stderr": 0.004957863944093121 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.45614035087719296, "acc_stderr": 0.03820042586602966, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.03820042586602966 }, "harness|ko_mmlu_management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.04939291447273481, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.04939291447273481 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.48020434227330777, "acc_stderr": 0.017865944827291615, "acc_norm": 0.48020434227330777, "acc_norm_stderr": 0.017865944827291615 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.32592592592592595, "acc_stderr": 0.04049122041702506, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.04049122041702506 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4297872340425532, "acc_stderr": 0.03236214467715564, "acc_norm": 0.4297872340425532, "acc_norm_stderr": 0.03236214467715564 }, "harness|ko_mmlu_virology|5": { "acc": 0.3795180722891566, "acc_stderr": 0.037777988227480165, "acc_norm": 0.3795180722891566, "acc_norm_stderr": 0.037777988227480165 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.43729903536977494, "acc_stderr": 0.02817391776176287, "acc_norm": 0.43729903536977494, "acc_norm_stderr": 0.02817391776176287 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4170403587443946, "acc_stderr": 0.03309266936071721, "acc_norm": 0.4170403587443946, "acc_norm_stderr": 0.03309266936071721 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4198473282442748, "acc_stderr": 0.04328577215262972, "acc_norm": 0.4198473282442748, "acc_norm_stderr": 0.04328577215262972 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.48484848484848486, "acc_stderr": 0.03560716516531061, "acc_norm": 0.48484848484848486, "acc_norm_stderr": 0.03560716516531061 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.35172413793103446, "acc_stderr": 0.03979236637497411, "acc_norm": 0.35172413793103446, "acc_norm_stderr": 0.03979236637497411 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4327731092436975, "acc_stderr": 0.032183581077426124, "acc_norm": 0.4327731092436975, "acc_norm_stderr": 0.032183581077426124 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3923076923076923, "acc_stderr": 0.02475600038213094, "acc_norm": 0.3923076923076923, "acc_norm_stderr": 0.02475600038213094 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760628, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760628 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.031947400722655395, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.031947400722655395 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.432258064516129, "acc_stderr": 0.028181739720019416, "acc_norm": 0.432258064516129, "acc_norm_stderr": 0.028181739720019416 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6196581196581197, "acc_stderr": 0.03180425204384099, "acc_norm": 0.6196581196581197, "acc_norm_stderr": 0.03180425204384099 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4528301886792453, "acc_stderr": 0.030635627957961827, "acc_norm": 0.4528301886792453, "acc_norm_stderr": 0.030635627957961827 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5454545454545454, "acc_stderr": 0.04769300568972743, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.04769300568972743 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340492, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340492 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5472636815920398, "acc_stderr": 0.03519702717576915, "acc_norm": 0.5472636815920398, "acc_norm_stderr": 0.03519702717576915 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.36416184971098264, "acc_stderr": 0.03669072477416907, "acc_norm": 0.36416184971098264, "acc_norm_stderr": 0.03669072477416907 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02306818884826111, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02306818884826111 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3611111111111111, "acc_stderr": 0.040166600304512336, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.040166600304512336 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5028901734104047, "acc_stderr": 0.02691864538323901, "acc_norm": 0.5028901734104047, "acc_norm_stderr": 0.02691864538323901 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.36809815950920244, "acc_stderr": 0.03789213935838396, "acc_norm": 0.36809815950920244, "acc_norm_stderr": 0.03789213935838396 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4660493827160494, "acc_stderr": 0.027756535257347663, "acc_norm": 0.4660493827160494, "acc_norm_stderr": 0.027756535257347663 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.45595854922279794, "acc_stderr": 0.03594413711272438, "acc_norm": 0.45595854922279794, "acc_norm_stderr": 0.03594413711272438 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518753, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518753 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.44954128440366975, "acc_stderr": 0.021327881417823373, "acc_norm": 0.44954128440366975, "acc_norm_stderr": 0.021327881417823373 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4117647058823529, "acc_stderr": 0.028180596328259287, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.028180596328259287 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.35526315789473684, "acc_stderr": 0.038947344870133176, "acc_norm": 0.35526315789473684, "acc_norm_stderr": 0.038947344870133176 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3349673202614379, "acc_stderr": 0.01909422816700031, "acc_norm": 0.3349673202614379, "acc_norm_stderr": 0.01909422816700031 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02812163604063989, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02812163604063989 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03214952147802747, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03214952147802747 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.25921787709497207, "acc_stderr": 0.014655780837497722, "acc_norm": 0.25921787709497207, "acc_norm_stderr": 0.014655780837497722 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.34191176470588236, "acc_stderr": 0.02881472242225417, "acc_norm": 0.34191176470588236, "acc_norm_stderr": 0.02881472242225417 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4448979591836735, "acc_stderr": 0.031814251181977865, "acc_norm": 0.4448979591836735, "acc_norm_stderr": 0.031814251181977865 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5822784810126582, "acc_stderr": 0.032103530322412685, "acc_norm": 0.5822784810126582, "acc_norm_stderr": 0.032103530322412685 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.363754889178618, "acc_stderr": 0.012286991879902887, "acc_norm": 0.363754889178618, "acc_norm_stderr": 0.012286991879902887 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5, "acc_stderr": 0.03509312031717982, "acc_norm": 0.5, "acc_norm_stderr": 0.03509312031717982 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03902551007374448, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03902551007374448 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396708, "mc2": 0.444838685797901, "mc2_stderr": 0.015532530203119514 }, "harness|ko_commongen_v2|2": { "acc": 0.3742621015348288, "acc_stderr": 0.016637917789798735, "acc_norm": 0.4179456906729634, "acc_norm_stderr": 0.016957292005279723 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "jjourney1125/llama2-dev", "model_sha": "66931bf246639e144dcd1e8b255a2222e210e2f0", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.31569965870307165, "acc_stderr": 0.013582571095815291, "acc_norm": 0.3575085324232082, "acc_norm_stderr": 0.014005494275916573 }, "harness|ko_hellaswag|10": { "acc": 0.36227843059151565, "acc_stderr": 0.004796763521045229, "acc_norm": 0.4538936466839275, "acc_norm_stderr": 0.004968521608065472 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.25146198830409355, "acc_stderr": 0.033275044238468436, "acc_norm": 0.25146198830409355, "acc_norm_stderr": 0.033275044238468436 }, "harness|ko_mmlu_management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690878, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690878 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2656449553001277, "acc_stderr": 0.015794302487888715, "acc_norm": 0.2656449553001277, "acc_norm_stderr": 0.015794302487888715 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.26666666666666666, "acc_stderr": 0.038201699145179055, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.038201699145179055 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2297872340425532, "acc_stderr": 0.027501752944412424, "acc_norm": 0.2297872340425532, "acc_norm_stderr": 0.027501752944412424 }, "harness|ko_mmlu_virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288085, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288085 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.26688102893890675, "acc_stderr": 0.02512263760881664, "acc_norm": 0.26688102893890675, "acc_norm_stderr": 0.02512263760881664 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3004484304932735, "acc_stderr": 0.030769352008229143, "acc_norm": 0.3004484304932735, "acc_norm_stderr": 0.030769352008229143 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.039153454088478354, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.039153454088478354 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.25757575757575757, "acc_stderr": 0.031156269519646836, "acc_norm": 0.25757575757575757, "acc_norm_stderr": 0.031156269519646836 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.037800192304380156, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.037800192304380156 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.28991596638655465, "acc_stderr": 0.029472485833136074, "acc_norm": 0.28991596638655465, "acc_norm_stderr": 0.029472485833136074 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2923076923076923, "acc_stderr": 0.023060438380857744, "acc_norm": 0.2923076923076923, "acc_norm_stderr": 0.023060438380857744 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252626, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252626 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.22660098522167488, "acc_stderr": 0.02945486383529297, "acc_norm": 0.22660098522167488, "acc_norm_stderr": 0.02945486383529297 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2806451612903226, "acc_stderr": 0.0255606047210229, "acc_norm": 0.2806451612903226, "acc_norm_stderr": 0.0255606047210229 }, "harness|ko_mmlu_marketing|5": { "acc": 0.3247863247863248, "acc_stderr": 0.03067902276549883, "acc_norm": 0.3247863247863248, "acc_norm_stderr": 0.03067902276549883 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2, "acc_stderr": 0.024618298195866514, "acc_norm": 0.2, "acc_norm_stderr": 0.024618298195866514 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.37272727272727274, "acc_stderr": 0.046313813194254635, "acc_norm": 0.37272727272727274, "acc_norm_stderr": 0.046313813194254635 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.026335739404055803, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.026335739404055803 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969654, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969654 }, "harness|ko_mmlu_sociology|5": { "acc": 0.29850746268656714, "acc_stderr": 0.03235743789355041, "acc_norm": 0.29850746268656714, "acc_norm_stderr": 0.03235743789355041 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.1907514450867052, "acc_stderr": 0.029957851329869334, "acc_norm": 0.1907514450867052, "acc_norm_stderr": 0.029957851329869334 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.02226181769240017, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.02226181769240017 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3402777777777778, "acc_stderr": 0.039621355734862175, "acc_norm": 0.3402777777777778, "acc_norm_stderr": 0.039621355734862175 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2514450867052023, "acc_stderr": 0.023357365785874044, "acc_norm": 0.2514450867052023, "acc_norm_stderr": 0.023357365785874044 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2883435582822086, "acc_stderr": 0.03559039531617342, "acc_norm": 0.2883435582822086, "acc_norm_stderr": 0.03559039531617342 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2932098765432099, "acc_stderr": 0.025329888171900926, "acc_norm": 0.2932098765432099, "acc_norm_stderr": 0.025329888171900926 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.26424870466321243, "acc_stderr": 0.03182155050916647, "acc_norm": 0.26424870466321243, "acc_norm_stderr": 0.03182155050916647 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436695, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436695 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.25504587155963304, "acc_stderr": 0.018688500856535846, "acc_norm": 0.25504587155963304, "acc_norm_stderr": 0.018688500856535846 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.042163702135578345, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.042163702135578345 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.28104575163398693, "acc_stderr": 0.02573885479781873, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.02573885479781873 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2066115702479339, "acc_stderr": 0.036959801280988254, "acc_norm": 0.2066115702479339, "acc_norm_stderr": 0.036959801280988254 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.24342105263157895, "acc_stderr": 0.034923496688842384, "acc_norm": 0.24342105263157895, "acc_norm_stderr": 0.034923496688842384 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.238562091503268, "acc_stderr": 0.0172423858287796, "acc_norm": 0.238562091503268, "acc_norm_stderr": 0.0172423858287796 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590627, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590627 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.26256983240223464, "acc_stderr": 0.014716824273017763, "acc_norm": 0.26256983240223464, "acc_norm_stderr": 0.014716824273017763 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.29044117647058826, "acc_stderr": 0.027576468622740522, "acc_norm": 0.29044117647058826, "acc_norm_stderr": 0.027576468622740522 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.23673469387755103, "acc_stderr": 0.02721283588407316, "acc_norm": 0.23673469387755103, "acc_norm_stderr": 0.02721283588407316 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.028756799629658342, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.028756799629658342 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.26597131681877445, "acc_stderr": 0.011285033165551276, "acc_norm": 0.26597131681877445, "acc_norm_stderr": 0.011285033165551276 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.033464098810559534, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.033464098810559534 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006518, "mc2": 0.4351036475713623, "mc2_stderr": 0.01572968169308527 }, "harness|ko_commongen_v2|2": { "acc": 0.3010625737898465, "acc_stderr": 0.015771113299945457, "acc_norm": 0.3600944510035419, "acc_norm_stderr": 0.01650368672044007 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "currybab/gemma-2b-ko-dev-pb", "model_sha": "5c1619d624d9ef6889ff86d4c59fccea132e62de", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.2960750853242321, "acc_stderr": 0.013340916085246261, "acc_norm": 0.363481228668942, "acc_norm_stderr": 0.014056207319068283 }, "harness|ko_hellaswag|10": { "acc": 0.36068512248556067, "acc_stderr": 0.0047921790525834425, "acc_norm": 0.46086436964748057, "acc_norm_stderr": 0.004974473255391264 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.21637426900584794, "acc_stderr": 0.031581495393387324, "acc_norm": 0.21637426900584794, "acc_norm_stderr": 0.031581495393387324 }, "harness|ko_mmlu_management|5": { "acc": 0.22330097087378642, "acc_stderr": 0.04123553189891431, "acc_norm": 0.22330097087378642, "acc_norm_stderr": 0.04123553189891431 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.27586206896551724, "acc_stderr": 0.01598281477469563, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.01598281477469563 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3111111111111111, "acc_stderr": 0.03999262876617723, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.03999262876617723 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.251063829787234, "acc_stderr": 0.02834696377716245, "acc_norm": 0.251063829787234, "acc_norm_stderr": 0.02834696377716245 }, "harness|ko_mmlu_virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.03410646614071857, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.03410646614071857 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3215434083601286, "acc_stderr": 0.026527724079528872, "acc_norm": 0.3215434083601286, "acc_norm_stderr": 0.026527724079528872 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.22869955156950672, "acc_stderr": 0.028188240046929203, "acc_norm": 0.22869955156950672, "acc_norm_stderr": 0.028188240046929203 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3282442748091603, "acc_stderr": 0.041184385658062976, "acc_norm": 0.3282442748091603, "acc_norm_stderr": 0.041184385658062976 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.24242424242424243, "acc_stderr": 0.030532892233932026, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.030532892233932026 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.32413793103448274, "acc_stderr": 0.03900432069185555, "acc_norm": 0.32413793103448274, "acc_norm_stderr": 0.03900432069185555 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617748, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617748 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.23109243697478993, "acc_stderr": 0.027381406927868966, "acc_norm": 0.23109243697478993, "acc_norm_stderr": 0.027381406927868966 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.022421273612923707, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.022421273612923707 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.16666666666666666, "acc_stderr": 0.036028141763926456, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.036028141763926456 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2512315270935961, "acc_stderr": 0.030516530732694436, "acc_norm": 0.2512315270935961, "acc_norm_stderr": 0.030516530732694436 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.27741935483870966, "acc_stderr": 0.025470196835900055, "acc_norm": 0.27741935483870966, "acc_norm_stderr": 0.025470196835900055 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.029745048572674057, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.029745048572674057 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.19622641509433963, "acc_stderr": 0.024442388131100817, "acc_norm": 0.19622641509433963, "acc_norm_stderr": 0.024442388131100817 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2545454545454545, "acc_stderr": 0.04172343038705383, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.04172343038705383 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712166, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712166 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.31343283582089554, "acc_stderr": 0.03280188205348645, "acc_norm": 0.31343283582089554, "acc_norm_stderr": 0.03280188205348645 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.030631145539198823, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.030631145539198823 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2830687830687831, "acc_stderr": 0.023201392938194978, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.023201392938194978 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2361111111111111, "acc_stderr": 0.03551446610810826, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.03551446610810826 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.30346820809248554, "acc_stderr": 0.024752411960917202, "acc_norm": 0.30346820809248554, "acc_norm_stderr": 0.024752411960917202 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.24539877300613497, "acc_stderr": 0.03380939813943354, "acc_norm": 0.24539877300613497, "acc_norm_stderr": 0.03380939813943354 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2932098765432099, "acc_stderr": 0.025329888171900936, "acc_norm": 0.2932098765432099, "acc_norm_stderr": 0.025329888171900936 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.25906735751295334, "acc_stderr": 0.031618779179354094, "acc_norm": 0.25906735751295334, "acc_norm_stderr": 0.031618779179354094 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518754, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518754 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.26972477064220185, "acc_stderr": 0.01902848671111544, "acc_norm": 0.26972477064220185, "acc_norm_stderr": 0.01902848671111544 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.03268454013011743, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.03268454013011743 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.29411764705882354, "acc_stderr": 0.026090162504279042, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.026090162504279042 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2975206611570248, "acc_stderr": 0.04173349148083497, "acc_norm": 0.2975206611570248, "acc_norm_stderr": 0.04173349148083497 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2631578947368421, "acc_stderr": 0.03583496176361064, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.03583496176361064 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.21895424836601307, "acc_stderr": 0.016729937565537523, "acc_norm": 0.21895424836601307, "acc_norm_stderr": 0.016729937565537523 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.22695035460992907, "acc_stderr": 0.024987106365642976, "acc_norm": 0.22695035460992907, "acc_norm_stderr": 0.024987106365642976 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755807, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755807 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.027467401804057986, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.027467401804057986 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.014400296429225608, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.014400296429225608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.29411764705882354, "acc_stderr": 0.027678468642144693, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.027678468642144693 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.22857142857142856, "acc_stderr": 0.02688214492230774, "acc_norm": 0.22857142857142856, "acc_norm_stderr": 0.02688214492230774 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.27848101265822783, "acc_stderr": 0.029178682304842555, "acc_norm": 0.27848101265822783, "acc_norm_stderr": 0.029178682304842555 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24445893089960888, "acc_stderr": 0.010976425013113897, "acc_norm": 0.24445893089960888, "acc_norm_stderr": 0.010976425013113897 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.03058759135160425, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.03058759135160425 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.0340150671524904, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.0340150671524904 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2484700122399021, "mc1_stderr": 0.015127427096520705, "mc2": 0.4016607777606808, "mc2_stderr": 0.014845926809175847 }, "harness|ko_commongen_v2|2": { "acc": 0.2644628099173554, "acc_stderr": 0.015163499477892408, "acc_norm": 0.33884297520661155, "acc_norm_stderr": 0.016272952997019124 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "gemmathon/gemma-2b-ko-dev-pbc432", "model_sha": "35d87b5b339736c8754526c8507ba57a512da812", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.34215017064846415, "acc_stderr": 0.01386415215917728, "acc_norm": 0.4052901023890785, "acc_norm_stderr": 0.01434686906022933 }, "harness|ko_hellaswag|10": { "acc": 0.3686516630153356, "acc_stderr": 0.004814532642574656, "acc_norm": 0.4689304919338777, "acc_norm_stderr": 0.00498013867916104 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.18128654970760233, "acc_stderr": 0.029547741687640024, "acc_norm": 0.18128654970760233, "acc_norm_stderr": 0.029547741687640024 }, "harness|ko_mmlu_management|5": { "acc": 0.3592233009708738, "acc_stderr": 0.04750458399041693, "acc_norm": 0.3592233009708738, "acc_norm_stderr": 0.04750458399041693 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3001277139208174, "acc_stderr": 0.016389249691317432, "acc_norm": 0.3001277139208174, "acc_norm_stderr": 0.016389249691317432 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.040943762699967946, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.040943762699967946 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.25957446808510637, "acc_stderr": 0.02865917937429232, "acc_norm": 0.25957446808510637, "acc_norm_stderr": 0.02865917937429232 }, "harness|ko_mmlu_virology|5": { "acc": 0.30120481927710846, "acc_stderr": 0.03571609230053481, "acc_norm": 0.30120481927710846, "acc_norm_stderr": 0.03571609230053481 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.33762057877813506, "acc_stderr": 0.026858825879488544, "acc_norm": 0.33762057877813506, "acc_norm_stderr": 0.026858825879488544 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.21076233183856502, "acc_stderr": 0.027373095500540193, "acc_norm": 0.21076233183856502, "acc_norm_stderr": 0.027373095500540193 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306085, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306085 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.32323232323232326, "acc_stderr": 0.03332299921070643, "acc_norm": 0.32323232323232326, "acc_norm_stderr": 0.03332299921070643 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.03780019230438015, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.03780019230438015 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.14705882352941177, "acc_stderr": 0.035240689515674495, "acc_norm": 0.14705882352941177, "acc_norm_stderr": 0.035240689515674495 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.2857142857142857, "acc_stderr": 0.029344572500634335, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.029344572500634335 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.31025641025641026, "acc_stderr": 0.023454674889404288, "acc_norm": 0.31025641025641026, "acc_norm_stderr": 0.023454674889404288 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243839, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243839 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2561576354679803, "acc_stderr": 0.030712730070982592, "acc_norm": 0.2561576354679803, "acc_norm_stderr": 0.030712730070982592 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3161290322580645, "acc_stderr": 0.026450874489042757, "acc_norm": 0.3161290322580645, "acc_norm_stderr": 0.026450874489042757 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.029343114798094476, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.029343114798094476 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.22641509433962265, "acc_stderr": 0.025757559893106727, "acc_norm": 0.22641509433962265, "acc_norm_stderr": 0.025757559893106727 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3090909090909091, "acc_stderr": 0.044262946482000985, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.044262946482000985 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|ko_mmlu_sociology|5": { "acc": 0.36318407960199006, "acc_stderr": 0.034005985055990146, "acc_norm": 0.36318407960199006, "acc_norm_stderr": 0.034005985055990146 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.03345036916788991, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.03345036916788991 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2751322751322751, "acc_stderr": 0.02300008685906865, "acc_norm": 0.2751322751322751, "acc_norm_stderr": 0.02300008685906865 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.037738099906869355, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.037738099906869355 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2832369942196532, "acc_stderr": 0.02425790170532338, "acc_norm": 0.2832369942196532, "acc_norm_stderr": 0.02425790170532338 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.033220157957767414, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.033220157957767414 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3425925925925926, "acc_stderr": 0.02640614597362567, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.02640614597362567 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.03097543638684543, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.03097543638684543 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.19298245614035087, "acc_stderr": 0.03712454853721368, "acc_norm": 0.19298245614035087, "acc_norm_stderr": 0.03712454853721368 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3229357798165138, "acc_stderr": 0.020048115923415325, "acc_norm": 0.3229357798165138, "acc_norm_stderr": 0.020048115923415325 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047181, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047181 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3366013071895425, "acc_stderr": 0.027057974624494382, "acc_norm": 0.3366013071895425, "acc_norm_stderr": 0.027057974624494382 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.03984979653302871, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.03984979653302871 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.34210526315789475, "acc_stderr": 0.03860731599316092, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.03860731599316092 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.24183006535947713, "acc_stderr": 0.017322789207784326, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.017322789207784326 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2198581560283688, "acc_stderr": 0.024706141070705477, "acc_norm": 0.2198581560283688, "acc_norm_stderr": 0.024706141070705477 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.37962962962962965, "acc_stderr": 0.03309682581119035, "acc_norm": 0.37962962962962965, "acc_norm_stderr": 0.03309682581119035 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23575418994413408, "acc_stderr": 0.014196375686290803, "acc_norm": 0.23575418994413408, "acc_norm_stderr": 0.014196375686290803 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.40808823529411764, "acc_stderr": 0.029855261393483927, "acc_norm": 0.40808823529411764, "acc_norm_stderr": 0.029855261393483927 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.24081632653061225, "acc_stderr": 0.027372942201788174, "acc_norm": 0.24081632653061225, "acc_norm_stderr": 0.027372942201788174 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.23628691983122363, "acc_stderr": 0.027652153144159263, "acc_norm": 0.23628691983122363, "acc_norm_stderr": 0.027652153144159263 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2288135593220339, "acc_stderr": 0.010728759090375505, "acc_norm": 0.2288135593220339, "acc_norm_stderr": 0.010728759090375505 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.20098039215686275, "acc_stderr": 0.028125972265654373, "acc_norm": 0.20098039215686275, "acc_norm_stderr": 0.028125972265654373 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.035014387062967806, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.035014387062967806 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.26193390452876375, "mc1_stderr": 0.015392118805015006, "mc2": 0.4222174184184925, "mc2_stderr": 0.015149588490094387 }, "harness|ko_commongen_v2|2": { "acc": 0.29043683589138136, "acc_stderr": 0.01560760256981463, "acc_norm": 0.3730814639905549, "acc_norm_stderr": 0.01662731827513744 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "gemmathon/gemma-2b-ko-dev-pbmt192", "model_sha": "856bd740a9c014e7dfc7233dbe6b9083f44ac8a5", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
895