results / 42MARU /llama-2-ko-7b-instruct /result_2023-09-29 09:41:36.json
open-ko-llm-bot's picture
Add results for 2023-09-29 09:41:36
1eeb4f4
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3455631399317406,
"acc_stderr": 0.013896938461145678,
"acc_norm": 0.3839590443686007,
"acc_norm_stderr": 0.01421244498065189
},
"harness|ko_hellaswag|10": {
"acc": 0.3975303724357698,
"acc_stderr": 0.004883871774350598,
"acc_norm": 0.5247958573989245,
"acc_norm_stderr": 0.004983641854351152
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3684210526315789,
"acc_stderr": 0.036996580176568775,
"acc_norm": 0.3684210526315789,
"acc_norm_stderr": 0.036996580176568775
},
"harness|ko_mmlu_management|5": {
"acc": 0.32038834951456313,
"acc_stderr": 0.0462028408228004,
"acc_norm": 0.32038834951456313,
"acc_norm_stderr": 0.0462028408228004
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3831417624521073,
"acc_stderr": 0.01738477419488563,
"acc_norm": 0.3831417624521073,
"acc_norm_stderr": 0.01738477419488563
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.32592592592592595,
"acc_stderr": 0.040491220417025055,
"acc_norm": 0.32592592592592595,
"acc_norm_stderr": 0.040491220417025055
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421255,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421255
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.28085106382978725,
"acc_stderr": 0.02937917046412482,
"acc_norm": 0.28085106382978725,
"acc_norm_stderr": 0.02937917046412482
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3795180722891566,
"acc_stderr": 0.03777798822748017,
"acc_norm": 0.3795180722891566,
"acc_norm_stderr": 0.03777798822748017
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.35691318327974275,
"acc_stderr": 0.027210420375934012,
"acc_norm": 0.35691318327974275,
"acc_norm_stderr": 0.027210420375934012
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4170403587443946,
"acc_stderr": 0.03309266936071721,
"acc_norm": 0.4170403587443946,
"acc_norm_stderr": 0.03309266936071721
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4198473282442748,
"acc_stderr": 0.043285772152629715,
"acc_norm": 0.4198473282442748,
"acc_norm_stderr": 0.043285772152629715
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.35858585858585856,
"acc_stderr": 0.034169036403915214,
"acc_norm": 0.35858585858585856,
"acc_norm_stderr": 0.034169036403915214
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.25517241379310346,
"acc_stderr": 0.03632984052707842,
"acc_norm": 0.25517241379310346,
"acc_norm_stderr": 0.03632984052707842
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.1568627450980392,
"acc_stderr": 0.03618664819936245,
"acc_norm": 0.1568627450980392,
"acc_norm_stderr": 0.03618664819936245
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.25630252100840334,
"acc_stderr": 0.028359620870533953,
"acc_norm": 0.25630252100840334,
"acc_norm_stderr": 0.028359620870533953
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.24871794871794872,
"acc_stderr": 0.021916957709213803,
"acc_norm": 0.24871794871794872,
"acc_norm_stderr": 0.021916957709213803
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.04668408033024932,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024932
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.23645320197044334,
"acc_stderr": 0.029896114291733545,
"acc_norm": 0.23645320197044334,
"acc_norm_stderr": 0.029896114291733545
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3161290322580645,
"acc_stderr": 0.026450874489042767,
"acc_norm": 0.3161290322580645,
"acc_norm_stderr": 0.026450874489042767
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.4358974358974359,
"acc_stderr": 0.03248577511578401,
"acc_norm": 0.4358974358974359,
"acc_norm_stderr": 0.03248577511578401
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.30943396226415093,
"acc_stderr": 0.028450154794118627,
"acc_norm": 0.30943396226415093,
"acc_norm_stderr": 0.028450154794118627
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.37272727272727274,
"acc_stderr": 0.04631381319425463,
"acc_norm": 0.37272727272727274,
"acc_norm_stderr": 0.04631381319425463
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25555555555555554,
"acc_stderr": 0.026593939101844072,
"acc_norm": 0.25555555555555554,
"acc_norm_stderr": 0.026593939101844072
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.23841059602649006,
"acc_stderr": 0.0347918557259966,
"acc_norm": 0.23841059602649006,
"acc_norm_stderr": 0.0347918557259966
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.3283582089552239,
"acc_stderr": 0.033206858897443244,
"acc_norm": 0.3283582089552239,
"acc_norm_stderr": 0.033206858897443244
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.23121387283236994,
"acc_stderr": 0.0321473730202947,
"acc_norm": 0.23121387283236994,
"acc_norm_stderr": 0.0321473730202947
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.021935878081184756,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.021935878081184756
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.03745554791462457,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.03745554791462457
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.33236994219653176,
"acc_stderr": 0.025361168749688225,
"acc_norm": 0.33236994219653176,
"acc_norm_stderr": 0.025361168749688225
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.25766871165644173,
"acc_stderr": 0.03436150827846917,
"acc_norm": 0.25766871165644173,
"acc_norm_stderr": 0.03436150827846917
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.33641975308641975,
"acc_stderr": 0.026289734945952926,
"acc_norm": 0.33641975308641975,
"acc_norm_stderr": 0.026289734945952926
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542129,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542129
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.32642487046632124,
"acc_stderr": 0.033840286211432945,
"acc_norm": 0.32642487046632124,
"acc_norm_stderr": 0.033840286211432945
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.0433913832257986,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.0433913832257986
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3174311926605505,
"acc_stderr": 0.019957152198460497,
"acc_norm": 0.3174311926605505,
"acc_norm_stderr": 0.019957152198460497
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.1746031746031746,
"acc_stderr": 0.03395490020856111,
"acc_norm": 0.1746031746031746,
"acc_norm_stderr": 0.03395490020856111
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.02736359328468495,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.02736359328468495
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.36363636363636365,
"acc_stderr": 0.043913262867240704,
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.043913262867240704
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3026315789473684,
"acc_stderr": 0.037385206761196686,
"acc_norm": 0.3026315789473684,
"acc_norm_stderr": 0.037385206761196686
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2679738562091503,
"acc_stderr": 0.017917974069594726,
"acc_norm": 0.2679738562091503,
"acc_norm_stderr": 0.017917974069594726
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2624113475177305,
"acc_stderr": 0.02624492034984301,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.02624492034984301
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285712,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285712
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.029157522184605586,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.029157522184605586
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.0290294228156814,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.0290294228156814
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.27755102040816326,
"acc_stderr": 0.028666857790274648,
"acc_norm": 0.27755102040816326,
"acc_norm_stderr": 0.028666857790274648
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.35864978902953587,
"acc_stderr": 0.031219569445301847,
"acc_norm": 0.35864978902953587,
"acc_norm_stderr": 0.031219569445301847
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.27249022164276404,
"acc_stderr": 0.01137165829431153,
"acc_norm": 0.27249022164276404,
"acc_norm_stderr": 0.01137165829431153
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2696078431372549,
"acc_stderr": 0.031145570659486782,
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.031145570659486782
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3090909090909091,
"acc_stderr": 0.036085410115739666,
"acc_norm": 0.3090909090909091,
"acc_norm_stderr": 0.036085410115739666
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24969400244798043,
"mc1_stderr": 0.015152286907148125,
"mc2": 0.39805148377575406,
"mc2_stderr": 0.015027401787198838
},
"harness|ko_commongen_v2|2": {
"acc": 0.392018779342723,
"acc_stderr": 0.016735309112043194,
"acc_norm": 0.46830985915492956,
"acc_norm_stderr": 0.017105318850828437
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "42MARU/llama-2-ko-7b-instruct",
"model_sha": "3c590472282b5de4c76d846153db5f41b82c1b62",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}