Amber-GPTQ / eval_mmlu.json
TheBloke's picture
GPTQ model commit
2427ee5
{
"results": {
"hendrycksTest-abstract_algebra": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"hendrycksTest-anatomy": {
"acc": 0.26666666666666666,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"hendrycksTest-astronomy": {
"acc": 0.27631578947368424,
"acc_stderr": 0.03639057569952924,
"acc_norm": 0.27631578947368424,
"acc_norm_stderr": 0.03639057569952924
},
"hendrycksTest-business_ethics": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"hendrycksTest-clinical_knowledge": {
"acc": 0.26037735849056604,
"acc_stderr": 0.027008766090708104,
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.027008766090708104
},
"hendrycksTest-college_biology": {
"acc": 0.2986111111111111,
"acc_stderr": 0.03827052357950756,
"acc_norm": 0.2986111111111111,
"acc_norm_stderr": 0.03827052357950756
},
"hendrycksTest-college_chemistry": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"hendrycksTest-college_computer_science": {
"acc": 0.38,
"acc_stderr": 0.04878317312145631,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145631
},
"hendrycksTest-college_mathematics": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"hendrycksTest-college_medicine": {
"acc": 0.2774566473988439,
"acc_stderr": 0.03414014007044036,
"acc_norm": 0.2774566473988439,
"acc_norm_stderr": 0.03414014007044036
},
"hendrycksTest-college_physics": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179963,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.03950581861179963
},
"hendrycksTest-computer_security": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"hendrycksTest-conceptual_physics": {
"acc": 0.23829787234042554,
"acc_stderr": 0.027851252973889778,
"acc_norm": 0.23829787234042554,
"acc_norm_stderr": 0.027851252973889778
},
"hendrycksTest-econometrics": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512322
},
"hendrycksTest-electrical_engineering": {
"acc": 0.3103448275862069,
"acc_stderr": 0.03855289616378948,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03855289616378948
},
"hendrycksTest-elementary_mathematics": {
"acc": 0.2619047619047619,
"acc_stderr": 0.022644212615525214,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.022644212615525214
},
"hendrycksTest-formal_logic": {
"acc": 0.29365079365079366,
"acc_stderr": 0.040735243221471276,
"acc_norm": 0.29365079365079366,
"acc_norm_stderr": 0.040735243221471276
},
"hendrycksTest-global_facts": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"hendrycksTest-high_school_biology": {
"acc": 0.24193548387096775,
"acc_stderr": 0.024362599693031086,
"acc_norm": 0.24193548387096775,
"acc_norm_stderr": 0.024362599693031086
},
"hendrycksTest-high_school_chemistry": {
"acc": 0.15763546798029557,
"acc_stderr": 0.025639014131172404,
"acc_norm": 0.15763546798029557,
"acc_norm_stderr": 0.025639014131172404
},
"hendrycksTest-high_school_computer_science": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"hendrycksTest-high_school_european_history": {
"acc": 0.2909090909090909,
"acc_stderr": 0.03546563019624335,
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.03546563019624335
},
"hendrycksTest-high_school_geography": {
"acc": 0.2222222222222222,
"acc_stderr": 0.02962022787479047,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.02962022787479047
},
"hendrycksTest-high_school_government_and_politics": {
"acc": 0.21243523316062177,
"acc_stderr": 0.029519282616817234,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.029519282616817234
},
"hendrycksTest-high_school_macroeconomics": {
"acc": 0.24358974358974358,
"acc_stderr": 0.02176373368417393,
"acc_norm": 0.24358974358974358,
"acc_norm_stderr": 0.02176373368417393
},
"hendrycksTest-high_school_mathematics": {
"acc": 0.25555555555555554,
"acc_stderr": 0.02659393910184407,
"acc_norm": 0.25555555555555554,
"acc_norm_stderr": 0.02659393910184407
},
"hendrycksTest-high_school_microeconomics": {
"acc": 0.226890756302521,
"acc_stderr": 0.027205371538279496,
"acc_norm": 0.226890756302521,
"acc_norm_stderr": 0.027205371538279496
},
"hendrycksTest-high_school_physics": {
"acc": 0.2913907284768212,
"acc_stderr": 0.037101857261199946,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.037101857261199946
},
"hendrycksTest-high_school_psychology": {
"acc": 0.22201834862385322,
"acc_stderr": 0.01781884956479661,
"acc_norm": 0.22201834862385322,
"acc_norm_stderr": 0.01781884956479661
},
"hendrycksTest-high_school_statistics": {
"acc": 0.2638888888888889,
"acc_stderr": 0.030058202704309846,
"acc_norm": 0.2638888888888889,
"acc_norm_stderr": 0.030058202704309846
},
"hendrycksTest-high_school_us_history": {
"acc": 0.29901960784313725,
"acc_stderr": 0.03213325717373616,
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.03213325717373616
},
"hendrycksTest-high_school_world_history": {
"acc": 0.29535864978902954,
"acc_stderr": 0.029696338713422886,
"acc_norm": 0.29535864978902954,
"acc_norm_stderr": 0.029696338713422886
},
"hendrycksTest-human_aging": {
"acc": 0.27802690582959644,
"acc_stderr": 0.030069584874494033,
"acc_norm": 0.27802690582959644,
"acc_norm_stderr": 0.030069584874494033
},
"hendrycksTest-human_sexuality": {
"acc": 0.29770992366412213,
"acc_stderr": 0.04010358942462203,
"acc_norm": 0.29770992366412213,
"acc_norm_stderr": 0.04010358942462203
},
"hendrycksTest-international_law": {
"acc": 0.2892561983471074,
"acc_stderr": 0.04139112727635464,
"acc_norm": 0.2892561983471074,
"acc_norm_stderr": 0.04139112727635464
},
"hendrycksTest-jurisprudence": {
"acc": 0.28703703703703703,
"acc_stderr": 0.043733130409147614,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.043733130409147614
},
"hendrycksTest-logical_fallacies": {
"acc": 0.22699386503067484,
"acc_stderr": 0.032910995786157686,
"acc_norm": 0.22699386503067484,
"acc_norm_stderr": 0.032910995786157686
},
"hendrycksTest-machine_learning": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697625,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697625
},
"hendrycksTest-management": {
"acc": 0.18446601941747573,
"acc_stderr": 0.03840423627288276,
"acc_norm": 0.18446601941747573,
"acc_norm_stderr": 0.03840423627288276
},
"hendrycksTest-marketing": {
"acc": 0.2948717948717949,
"acc_stderr": 0.029872577708891148,
"acc_norm": 0.2948717948717949,
"acc_norm_stderr": 0.029872577708891148
},
"hendrycksTest-medical_genetics": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"hendrycksTest-miscellaneous": {
"acc": 0.30779054916985954,
"acc_stderr": 0.016506045045155637,
"acc_norm": 0.30779054916985954,
"acc_norm_stderr": 0.016506045045155637
},
"hendrycksTest-moral_disputes": {
"acc": 0.30346820809248554,
"acc_stderr": 0.024752411960917202,
"acc_norm": 0.30346820809248554,
"acc_norm_stderr": 0.024752411960917202
},
"hendrycksTest-moral_scenarios": {
"acc": 0.24916201117318434,
"acc_stderr": 0.014465893829859923,
"acc_norm": 0.24916201117318434,
"acc_norm_stderr": 0.014465893829859923
},
"hendrycksTest-nutrition": {
"acc": 0.2549019607843137,
"acc_stderr": 0.02495418432487991,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.02495418432487991
},
"hendrycksTest-philosophy": {
"acc": 0.34726688102893893,
"acc_stderr": 0.027040745502307336,
"acc_norm": 0.34726688102893893,
"acc_norm_stderr": 0.027040745502307336
},
"hendrycksTest-prehistory": {
"acc": 0.29012345679012347,
"acc_stderr": 0.025251173936495012,
"acc_norm": 0.29012345679012347,
"acc_norm_stderr": 0.025251173936495012
},
"hendrycksTest-professional_accounting": {
"acc": 0.30141843971631205,
"acc_stderr": 0.02737412888263115,
"acc_norm": 0.30141843971631205,
"acc_norm_stderr": 0.02737412888263115
},
"hendrycksTest-professional_law": {
"acc": 0.2816166883963494,
"acc_stderr": 0.011487783272786696,
"acc_norm": 0.2816166883963494,
"acc_norm_stderr": 0.011487783272786696
},
"hendrycksTest-professional_medicine": {
"acc": 0.1948529411764706,
"acc_stderr": 0.024060599423487428,
"acc_norm": 0.1948529411764706,
"acc_norm_stderr": 0.024060599423487428
},
"hendrycksTest-professional_psychology": {
"acc": 0.30718954248366015,
"acc_stderr": 0.018663359671463653,
"acc_norm": 0.30718954248366015,
"acc_norm_stderr": 0.018663359671463653
},
"hendrycksTest-public_relations": {
"acc": 0.2727272727272727,
"acc_stderr": 0.04265792110940588,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.04265792110940588
},
"hendrycksTest-security_studies": {
"acc": 0.2,
"acc_stderr": 0.025607375986579153,
"acc_norm": 0.2,
"acc_norm_stderr": 0.025607375986579153
},
"hendrycksTest-sociology": {
"acc": 0.24875621890547264,
"acc_stderr": 0.030567675938916707,
"acc_norm": 0.24875621890547264,
"acc_norm_stderr": 0.030567675938916707
},
"hendrycksTest-us_foreign_policy": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"hendrycksTest-virology": {
"acc": 0.2710843373493976,
"acc_stderr": 0.03460579907553027,
"acc_norm": 0.2710843373493976,
"acc_norm_stderr": 0.03460579907553027
},
"hendrycksTest-world_religions": {
"acc": 0.3742690058479532,
"acc_stderr": 0.03711601185389481,
"acc_norm": 0.3742690058479532,
"acc_norm_stderr": 0.03711601185389481
}
},
"versions": {
"hendrycksTest-abstract_algebra": 1,
"hendrycksTest-anatomy": 1,
"hendrycksTest-astronomy": 1,
"hendrycksTest-business_ethics": 1,
"hendrycksTest-clinical_knowledge": 1,
"hendrycksTest-college_biology": 1,
"hendrycksTest-college_chemistry": 1,
"hendrycksTest-college_computer_science": 1,
"hendrycksTest-college_mathematics": 1,
"hendrycksTest-college_medicine": 1,
"hendrycksTest-college_physics": 1,
"hendrycksTest-computer_security": 1,
"hendrycksTest-conceptual_physics": 1,
"hendrycksTest-econometrics": 1,
"hendrycksTest-electrical_engineering": 1,
"hendrycksTest-elementary_mathematics": 1,
"hendrycksTest-formal_logic": 1,
"hendrycksTest-global_facts": 1,
"hendrycksTest-high_school_biology": 1,
"hendrycksTest-high_school_chemistry": 1,
"hendrycksTest-high_school_computer_science": 1,
"hendrycksTest-high_school_european_history": 1,
"hendrycksTest-high_school_geography": 1,
"hendrycksTest-high_school_government_and_politics": 1,
"hendrycksTest-high_school_macroeconomics": 1,
"hendrycksTest-high_school_mathematics": 1,
"hendrycksTest-high_school_microeconomics": 1,
"hendrycksTest-high_school_physics": 1,
"hendrycksTest-high_school_psychology": 1,
"hendrycksTest-high_school_statistics": 1,
"hendrycksTest-high_school_us_history": 1,
"hendrycksTest-high_school_world_history": 1,
"hendrycksTest-human_aging": 1,
"hendrycksTest-human_sexuality": 1,
"hendrycksTest-international_law": 1,
"hendrycksTest-jurisprudence": 1,
"hendrycksTest-logical_fallacies": 1,
"hendrycksTest-machine_learning": 1,
"hendrycksTest-management": 1,
"hendrycksTest-marketing": 1,
"hendrycksTest-medical_genetics": 1,
"hendrycksTest-miscellaneous": 1,
"hendrycksTest-moral_disputes": 1,
"hendrycksTest-moral_scenarios": 1,
"hendrycksTest-nutrition": 1,
"hendrycksTest-philosophy": 1,
"hendrycksTest-prehistory": 1,
"hendrycksTest-professional_accounting": 1,
"hendrycksTest-professional_law": 1,
"hendrycksTest-professional_medicine": 1,
"hendrycksTest-professional_psychology": 1,
"hendrycksTest-public_relations": 1,
"hendrycksTest-security_studies": 1,
"hendrycksTest-sociology": 1,
"hendrycksTest-us_foreign_policy": 1,
"hendrycksTest-virology": 1,
"hendrycksTest-world_religions": 1
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=./workdir_7b_16mix/ckpt_359",
"num_fewshot": 5,
"batch_size": "1",
"batch_sizes": [],
"device": null,
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}