results
/
ENERGY-DRINK-LOVE
/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT
/result_2024-03-04 12:34:24.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.42662116040955633, | |
"acc_stderr": 0.014453185592920293, | |
"acc_norm": 0.4761092150170648, | |
"acc_norm_stderr": 0.014594701798071654 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.44991037641904, | |
"acc_stderr": 0.0049646798459184365, | |
"acc_norm": 0.6059549890460068, | |
"acc_norm_stderr": 0.004876459434619797 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.6257309941520468, | |
"acc_stderr": 0.037116011853894806, | |
"acc_norm": 0.6257309941520468, | |
"acc_norm_stderr": 0.037116011853894806 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.6116504854368932, | |
"acc_stderr": 0.04825729337356389, | |
"acc_norm": 0.6116504854368932, | |
"acc_norm_stderr": 0.04825729337356389 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.6717752234993615, | |
"acc_stderr": 0.01679168564019289, | |
"acc_norm": 0.6717752234993615, | |
"acc_norm_stderr": 0.01679168564019289 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.4074074074074074, | |
"acc_stderr": 0.042446332383532306, | |
"acc_norm": 0.4074074074074074, | |
"acc_norm_stderr": 0.042446332383532306 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.26, | |
"acc_stderr": 0.04408440022768077, | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.04408440022768077 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.46382978723404256, | |
"acc_stderr": 0.032600385118357715, | |
"acc_norm": 0.46382978723404256, | |
"acc_norm_stderr": 0.032600385118357715 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.03892494720807614, | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.03892494720807614 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5819935691318328, | |
"acc_stderr": 0.028013651891995072, | |
"acc_norm": 0.5819935691318328, | |
"acc_norm_stderr": 0.028013651891995072 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.5739910313901345, | |
"acc_stderr": 0.033188332862172806, | |
"acc_norm": 0.5739910313901345, | |
"acc_norm_stderr": 0.033188332862172806 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.5572519083969466, | |
"acc_stderr": 0.04356447202665069, | |
"acc_norm": 0.5572519083969466, | |
"acc_norm_stderr": 0.04356447202665069 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.702020202020202, | |
"acc_stderr": 0.032586303838365555, | |
"acc_norm": 0.702020202020202, | |
"acc_norm_stderr": 0.032586303838365555 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4482758620689655, | |
"acc_stderr": 0.04144311810878151, | |
"acc_norm": 0.4482758620689655, | |
"acc_norm_stderr": 0.04144311810878151 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.2549019607843137, | |
"acc_stderr": 0.043364327079931785, | |
"acc_norm": 0.2549019607843137, | |
"acc_norm_stderr": 0.043364327079931785 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5588235294117647, | |
"acc_stderr": 0.03225294232399639, | |
"acc_norm": 0.5588235294117647, | |
"acc_norm_stderr": 0.03225294232399639 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.5230769230769231, | |
"acc_stderr": 0.02532399086173626, | |
"acc_norm": 0.5230769230769231, | |
"acc_norm_stderr": 0.02532399086173626 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.58, | |
"acc_stderr": 0.04960449637488583, | |
"acc_norm": 0.58, | |
"acc_norm_stderr": 0.04960449637488583 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5925925925925926, | |
"acc_stderr": 0.047500773411999854, | |
"acc_norm": 0.5925925925925926, | |
"acc_norm_stderr": 0.047500773411999854 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.43349753694581283, | |
"acc_stderr": 0.03486731727419872, | |
"acc_norm": 0.43349753694581283, | |
"acc_norm_stderr": 0.03486731727419872 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5806451612903226, | |
"acc_stderr": 0.028071588901091828, | |
"acc_norm": 0.5806451612903226, | |
"acc_norm_stderr": 0.028071588901091828 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.8034188034188035, | |
"acc_stderr": 0.026035386098951292, | |
"acc_norm": 0.8034188034188035, | |
"acc_norm_stderr": 0.026035386098951292 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.5056603773584906, | |
"acc_stderr": 0.030770900763851295, | |
"acc_norm": 0.5056603773584906, | |
"acc_norm_stderr": 0.030770900763851295 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.6454545454545455, | |
"acc_stderr": 0.04582004841505417, | |
"acc_norm": 0.6454545454545455, | |
"acc_norm_stderr": 0.04582004841505417 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.35185185185185186, | |
"acc_stderr": 0.029116617606083032, | |
"acc_norm": 0.35185185185185186, | |
"acc_norm_stderr": 0.029116617606083032 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.33774834437086093, | |
"acc_stderr": 0.038615575462551684, | |
"acc_norm": 0.33774834437086093, | |
"acc_norm_stderr": 0.038615575462551684 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6865671641791045, | |
"acc_stderr": 0.03280188205348642, | |
"acc_norm": 0.6865671641791045, | |
"acc_norm_stderr": 0.03280188205348642 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.44508670520231214, | |
"acc_stderr": 0.03789401760283648, | |
"acc_norm": 0.44508670520231214, | |
"acc_norm_stderr": 0.03789401760283648 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.373015873015873, | |
"acc_stderr": 0.02490699045899257, | |
"acc_norm": 0.373015873015873, | |
"acc_norm_stderr": 0.02490699045899257 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.4722222222222222, | |
"acc_stderr": 0.04174752578923185, | |
"acc_norm": 0.4722222222222222, | |
"acc_norm_stderr": 0.04174752578923185 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.048523658709391, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.048523658709391 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.75, | |
"acc_stderr": 0.04351941398892446, | |
"acc_norm": 0.75, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5664739884393064, | |
"acc_stderr": 0.02668013476167922, | |
"acc_norm": 0.5664739884393064, | |
"acc_norm_stderr": 0.02668013476167922 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.4723926380368098, | |
"acc_stderr": 0.0392237829061099, | |
"acc_norm": 0.4723926380368098, | |
"acc_norm_stderr": 0.0392237829061099 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.6018518518518519, | |
"acc_stderr": 0.027237415094592484, | |
"acc_norm": 0.6018518518518519, | |
"acc_norm_stderr": 0.027237415094592484 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.04852365870939099, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.04852365870939099 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.6839378238341969, | |
"acc_stderr": 0.033553973696861736, | |
"acc_norm": 0.6839378238341969, | |
"acc_norm_stderr": 0.033553973696861736 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.4298245614035088, | |
"acc_stderr": 0.04657047260594964, | |
"acc_norm": 0.4298245614035088, | |
"acc_norm_stderr": 0.04657047260594964 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.618348623853211, | |
"acc_stderr": 0.0208281485170226, | |
"acc_norm": 0.618348623853211, | |
"acc_norm_stderr": 0.0208281485170226 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.3253968253968254, | |
"acc_stderr": 0.041905964388711366, | |
"acc_norm": 0.3253968253968254, | |
"acc_norm_stderr": 0.041905964388711366 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.6111111111111112, | |
"acc_stderr": 0.027914055510468, | |
"acc_norm": 0.6111111111111112, | |
"acc_norm_stderr": 0.027914055510468 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.51, | |
"acc_stderr": 0.05024183937956914, | |
"acc_norm": 0.51, | |
"acc_norm_stderr": 0.05024183937956914 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.7355371900826446, | |
"acc_stderr": 0.04026187527591207, | |
"acc_norm": 0.7355371900826446, | |
"acc_norm_stderr": 0.04026187527591207 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.5723684210526315, | |
"acc_stderr": 0.04026097083296563, | |
"acc_norm": 0.5723684210526315, | |
"acc_norm_stderr": 0.04026097083296563 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.4738562091503268, | |
"acc_stderr": 0.020200164564804588, | |
"acc_norm": 0.4738562091503268, | |
"acc_norm_stderr": 0.020200164564804588 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.36879432624113473, | |
"acc_stderr": 0.028782227561347243, | |
"acc_norm": 0.36879432624113473, | |
"acc_norm_stderr": 0.028782227561347243 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.4017857142857143, | |
"acc_stderr": 0.04653333146973646, | |
"acc_norm": 0.4017857142857143, | |
"acc_norm_stderr": 0.04653333146973646 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.46296296296296297, | |
"acc_stderr": 0.03400603625538272, | |
"acc_norm": 0.46296296296296297, | |
"acc_norm_stderr": 0.03400603625538272 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2022346368715084, | |
"acc_stderr": 0.013433729483320986, | |
"acc_norm": 0.2022346368715084, | |
"acc_norm_stderr": 0.013433729483320986 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001974, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001974 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.63, | |
"acc_stderr": 0.04852365870939099, | |
"acc_norm": 0.63, | |
"acc_norm_stderr": 0.04852365870939099 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.5036764705882353, | |
"acc_stderr": 0.0303720158854282, | |
"acc_norm": 0.5036764705882353, | |
"acc_norm_stderr": 0.0303720158854282 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.6, | |
"acc_stderr": 0.031362502409358936, | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.031362502409358936 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6962025316455697, | |
"acc_stderr": 0.029936696387138594, | |
"acc_norm": 0.6962025316455697, | |
"acc_norm_stderr": 0.029936696387138594 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.37809647979139505, | |
"acc_stderr": 0.012384878406798095, | |
"acc_norm": 0.37809647979139505, | |
"acc_norm_stderr": 0.012384878406798095 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.6029411764705882, | |
"acc_stderr": 0.03434131164719129, | |
"acc_norm": 0.6029411764705882, | |
"acc_norm_stderr": 0.03434131164719129 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.6545454545454545, | |
"acc_stderr": 0.037131580674819135, | |
"acc_norm": 0.6545454545454545, | |
"acc_norm_stderr": 0.037131580674819135 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.2692778457772338, | |
"mc1_stderr": 0.015528566637087298, | |
"mc2": 0.40370494061094203, | |
"mc2_stderr": 0.014912269705733993 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.39315230224321135, | |
"acc_stderr": 0.01679326280128708, | |
"acc_norm": 0.4498229043683589, | |
"acc_norm_stderr": 0.01710357334382571 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT", | |
"model_sha": "8b0d7ec8189c550741754e5a0c6fb830f43e0335", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |