|
{ |
|
"config_general": { |
|
"lighteval_sha": "1.4", |
|
"num_few_shot_default": null, |
|
"num_fewshot_seeds": null, |
|
"override_batch_size": null, |
|
"max_samples": null, |
|
"job_id": -1, |
|
"start_time": null, |
|
"end_time": "2024-04-28-13-06-41", |
|
"total_evaluation_time_secondes": "", |
|
"model_name": "Qwen/Qwen1.5-7B-Chat-GPTQ-Int4", |
|
"model_sha": "", |
|
"model_dtype": "4bit", |
|
"model_size": 5.86, |
|
"model_params": 6.54, |
|
"quant_type": "GPTQ", |
|
"precision": "4bit" |
|
}, |
|
"results": { |
|
"harness|piqa|0": { |
|
"acc,none": 0.735038084874864, |
|
"acc_stderr,none": 0.01029655799331606, |
|
"acc_norm,none": 0.7453754080522307, |
|
"acc_norm_stderr,none": 0.010164432237060476, |
|
"alias": "piqa" |
|
}, |
|
"harness|openbookqa|0": { |
|
"acc,none": 0.332, |
|
"acc_stderr,none": 0.021081766571222852, |
|
"acc_norm,none": 0.416, |
|
"acc_norm_stderr,none": 0.022064943313928876, |
|
"alias": "openbookqa" |
|
}, |
|
"harness|arc:easy|0": { |
|
"acc,none": 0.6687710437710438, |
|
"acc_stderr,none": 0.009657641311350919, |
|
"acc_norm,none": 0.6056397306397306, |
|
"acc_norm_stderr,none": 0.010028176038393004, |
|
"alias": "arc_easy" |
|
}, |
|
"harness|mmlu|0": { |
|
"acc,none": 0.5904429568437545, |
|
"acc_stderr,none": 0.003957317221865859, |
|
"alias": "mmlu" |
|
}, |
|
"harness|mmlu_humanities|0": { |
|
"alias": " - humanities", |
|
"acc,none": 0.5377258235919234, |
|
"acc_stderr,none": 0.0068626131961731455 |
|
}, |
|
"harness|mmlu_formal_logic|0": { |
|
"alias": " - formal_logic", |
|
"acc,none": 0.4523809523809524, |
|
"acc_stderr,none": 0.044518079590553275 |
|
}, |
|
"harness|mmlu_high_school_european_history|0": { |
|
"alias": " - high_school_european_history", |
|
"acc,none": 0.7696969696969697, |
|
"acc_stderr,none": 0.0328766675860349 |
|
}, |
|
"harness|mmlu_high_school_us_history|0": { |
|
"alias": " - high_school_us_history", |
|
"acc,none": 0.7549019607843137, |
|
"acc_stderr,none": 0.03019028245350194 |
|
}, |
|
"harness|mmlu_high_school_world_history|0": { |
|
"alias": " - high_school_world_history", |
|
"acc,none": 0.7510548523206751, |
|
"acc_stderr,none": 0.028146970599422644 |
|
}, |
|
"harness|mmlu_international_law|0": { |
|
"alias": " - international_law", |
|
"acc,none": 0.768595041322314, |
|
"acc_stderr,none": 0.038498560987940904 |
|
}, |
|
"harness|mmlu_jurisprudence|0": { |
|
"alias": " - jurisprudence", |
|
"acc,none": 0.7685185185185185, |
|
"acc_stderr,none": 0.04077494709252627 |
|
}, |
|
"harness|mmlu_logical_fallacies|0": { |
|
"alias": " - logical_fallacies", |
|
"acc,none": 0.656441717791411, |
|
"acc_stderr,none": 0.03731133519673893 |
|
}, |
|
"harness|mmlu_moral_disputes|0": { |
|
"alias": " - moral_disputes", |
|
"acc,none": 0.653179190751445, |
|
"acc_stderr,none": 0.02562472399403046 |
|
}, |
|
"harness|mmlu_moral_scenarios|0": { |
|
"alias": " - moral_scenarios", |
|
"acc,none": 0.29720670391061454, |
|
"acc_stderr,none": 0.015285313353641592 |
|
}, |
|
"harness|mmlu_philosophy|0": { |
|
"alias": " - philosophy", |
|
"acc,none": 0.6591639871382636, |
|
"acc_stderr,none": 0.026920841260776155 |
|
}, |
|
"harness|mmlu_prehistory|0": { |
|
"alias": " - prehistory", |
|
"acc,none": 0.6481481481481481, |
|
"acc_stderr,none": 0.026571483480719967 |
|
}, |
|
"harness|mmlu_professional_law|0": { |
|
"alias": " - professional_law", |
|
"acc,none": 0.4517601043024772, |
|
"acc_stderr,none": 0.012710662233660247 |
|
}, |
|
"harness|mmlu_world_religions|0": { |
|
"alias": " - world_religions", |
|
"acc,none": 0.7660818713450293, |
|
"acc_stderr,none": 0.03246721765117826 |
|
}, |
|
"harness|mmlu_other|0": { |
|
"alias": " - other", |
|
"acc,none": 0.6501448342452526, |
|
"acc_stderr,none": 0.008309193510443607 |
|
}, |
|
"harness|mmlu_business_ethics|0": { |
|
"alias": " - business_ethics", |
|
"acc,none": 0.65, |
|
"acc_stderr,none": 0.0479372485441102 |
|
}, |
|
"harness|mmlu_clinical_knowledge|0": { |
|
"alias": " - clinical_knowledge", |
|
"acc,none": 0.6830188679245283, |
|
"acc_stderr,none": 0.028637235639800904 |
|
}, |
|
"harness|mmlu_college_medicine|0": { |
|
"alias": " - college_medicine", |
|
"acc,none": 0.5606936416184971, |
|
"acc_stderr,none": 0.037842719328874674 |
|
}, |
|
"harness|mmlu_global_facts|0": { |
|
"alias": " - global_facts", |
|
"acc,none": 0.4, |
|
"acc_stderr,none": 0.049236596391733084 |
|
}, |
|
"harness|mmlu_human_aging|0": { |
|
"alias": " - human_aging", |
|
"acc,none": 0.6098654708520179, |
|
"acc_stderr,none": 0.03273766725459157 |
|
}, |
|
"harness|mmlu_management|0": { |
|
"alias": " - management", |
|
"acc,none": 0.7281553398058253, |
|
"acc_stderr,none": 0.044052680241409216 |
|
}, |
|
"harness|mmlu_marketing|0": { |
|
"alias": " - marketing", |
|
"acc,none": 0.8333333333333334, |
|
"acc_stderr,none": 0.024414947304543674 |
|
}, |
|
"harness|mmlu_medical_genetics|0": { |
|
"alias": " - medical_genetics", |
|
"acc,none": 0.69, |
|
"acc_stderr,none": 0.04648231987117316 |
|
}, |
|
"harness|mmlu_miscellaneous|0": { |
|
"alias": " - miscellaneous", |
|
"acc,none": 0.7509578544061303, |
|
"acc_stderr,none": 0.015464676163395972 |
|
}, |
|
"harness|mmlu_nutrition|0": { |
|
"alias": " - nutrition", |
|
"acc,none": 0.6633986928104575, |
|
"acc_stderr,none": 0.02705797462449438 |
|
}, |
|
"harness|mmlu_professional_accounting|0": { |
|
"alias": " - professional_accounting", |
|
"acc,none": 0.4432624113475177, |
|
"acc_stderr,none": 0.029634838473766006 |
|
}, |
|
"harness|mmlu_professional_medicine|0": { |
|
"alias": " - professional_medicine", |
|
"acc,none": 0.6213235294117647, |
|
"acc_stderr,none": 0.02946513363977613 |
|
}, |
|
"harness|mmlu_virology|0": { |
|
"alias": " - virology", |
|
"acc,none": 0.463855421686747, |
|
"acc_stderr,none": 0.03882310850890594 |
|
}, |
|
"harness|mmlu_social_sciences|0": { |
|
"alias": " - social_sciences", |
|
"acc,none": 0.6821579460513487, |
|
"acc_stderr,none": 0.008189081115990158 |
|
}, |
|
"harness|mmlu_econometrics|0": { |
|
"alias": " - econometrics", |
|
"acc,none": 0.49122807017543857, |
|
"acc_stderr,none": 0.04702880432049615 |
|
}, |
|
"harness|mmlu_high_school_geography|0": { |
|
"alias": " - high_school_geography", |
|
"acc,none": 0.7777777777777778, |
|
"acc_stderr,none": 0.02962022787479048 |
|
}, |
|
"harness|mmlu_high_school_government_and_politics|0": { |
|
"alias": " - high_school_government_and_politics", |
|
"acc,none": 0.7772020725388601, |
|
"acc_stderr,none": 0.030031147977641538 |
|
}, |
|
"harness|mmlu_high_school_macroeconomics|0": { |
|
"alias": " - high_school_macroeconomics", |
|
"acc,none": 0.5974358974358974, |
|
"acc_stderr,none": 0.024864995159767752 |
|
}, |
|
"harness|mmlu_high_school_microeconomics|0": { |
|
"alias": " - high_school_microeconomics", |
|
"acc,none": 0.6428571428571429, |
|
"acc_stderr,none": 0.031124619309328177 |
|
}, |
|
"harness|mmlu_high_school_psychology|0": { |
|
"alias": " - high_school_psychology", |
|
"acc,none": 0.8128440366972477, |
|
"acc_stderr,none": 0.016722684526200148 |
|
}, |
|
"harness|mmlu_human_sexuality|0": { |
|
"alias": " - human_sexuality", |
|
"acc,none": 0.6870229007633588, |
|
"acc_stderr,none": 0.04066962905677697 |
|
}, |
|
"harness|mmlu_professional_psychology|0": { |
|
"alias": " - professional_psychology", |
|
"acc,none": 0.553921568627451, |
|
"acc_stderr,none": 0.020109864547181357 |
|
}, |
|
"harness|mmlu_public_relations|0": { |
|
"alias": " - public_relations", |
|
"acc,none": 0.6272727272727273, |
|
"acc_stderr,none": 0.04631381319425464 |
|
}, |
|
"harness|mmlu_security_studies|0": { |
|
"alias": " - security_studies", |
|
"acc,none": 0.7020408163265306, |
|
"acc_stderr,none": 0.02927956741106567 |
|
}, |
|
"harness|mmlu_sociology|0": { |
|
"alias": " - sociology", |
|
"acc,none": 0.7910447761194029, |
|
"acc_stderr,none": 0.028748298931728655 |
|
}, |
|
"harness|mmlu_us_foreign_policy|0": { |
|
"alias": " - us_foreign_policy", |
|
"acc,none": 0.81, |
|
"acc_stderr,none": 0.039427724440366234 |
|
}, |
|
"harness|mmlu_stem|0": { |
|
"alias": " - stem", |
|
"acc,none": 0.5207738661592134, |
|
"acc_stderr,none": 0.008650288386489482 |
|
}, |
|
"harness|mmlu_abstract_algebra|0": { |
|
"alias": " - abstract_algebra", |
|
"acc,none": 0.41, |
|
"acc_stderr,none": 0.04943110704237101 |
|
}, |
|
"harness|mmlu_anatomy|0": { |
|
"alias": " - anatomy", |
|
"acc,none": 0.5777777777777777, |
|
"acc_stderr,none": 0.04266763404099582 |
|
}, |
|
"harness|mmlu_astronomy|0": { |
|
"alias": " - astronomy", |
|
"acc,none": 0.631578947368421, |
|
"acc_stderr,none": 0.039255233810529325 |
|
}, |
|
"harness|mmlu_college_biology|0": { |
|
"alias": " - college_biology", |
|
"acc,none": 0.6527777777777778, |
|
"acc_stderr,none": 0.039812405437178615 |
|
}, |
|
"harness|mmlu_college_chemistry|0": { |
|
"alias": " - college_chemistry", |
|
"acc,none": 0.42, |
|
"acc_stderr,none": 0.049604496374885836 |
|
}, |
|
"harness|mmlu_college_computer_science|0": { |
|
"alias": " - college_computer_science", |
|
"acc,none": 0.58, |
|
"acc_stderr,none": 0.049604496374885836 |
|
}, |
|
"harness|mmlu_college_mathematics|0": { |
|
"alias": " - college_mathematics", |
|
"acc,none": 0.33, |
|
"acc_stderr,none": 0.04725815626252603 |
|
}, |
|
"harness|mmlu_college_physics|0": { |
|
"alias": " - college_physics", |
|
"acc,none": 0.4019607843137255, |
|
"acc_stderr,none": 0.04878608714466997 |
|
}, |
|
"harness|mmlu_computer_security|0": { |
|
"alias": " - computer_security", |
|
"acc,none": 0.74, |
|
"acc_stderr,none": 0.0440844002276808 |
|
}, |
|
"harness|mmlu_conceptual_physics|0": { |
|
"alias": " - conceptual_physics", |
|
"acc,none": 0.5361702127659574, |
|
"acc_stderr,none": 0.03260038511835771 |
|
}, |
|
"harness|mmlu_electrical_engineering|0": { |
|
"alias": " - electrical_engineering", |
|
"acc,none": 0.5310344827586206, |
|
"acc_stderr,none": 0.04158632762097828 |
|
}, |
|
"harness|mmlu_elementary_mathematics|0": { |
|
"alias": " - elementary_mathematics", |
|
"acc,none": 0.4603174603174603, |
|
"acc_stderr,none": 0.025670080636909193 |
|
}, |
|
"harness|mmlu_high_school_biology|0": { |
|
"alias": " - high_school_biology", |
|
"acc,none": 0.7193548387096774, |
|
"acc_stderr,none": 0.0255606047210229 |
|
}, |
|
"harness|mmlu_high_school_chemistry|0": { |
|
"alias": " - high_school_chemistry", |
|
"acc,none": 0.5320197044334976, |
|
"acc_stderr,none": 0.03510766597959217 |
|
}, |
|
"harness|mmlu_high_school_computer_science|0": { |
|
"alias": " - high_school_computer_science", |
|
"acc,none": 0.69, |
|
"acc_stderr,none": 0.04648231987117316 |
|
}, |
|
"harness|mmlu_high_school_mathematics|0": { |
|
"alias": " - high_school_mathematics", |
|
"acc,none": 0.34444444444444444, |
|
"acc_stderr,none": 0.02897264888484427 |
|
}, |
|
"harness|mmlu_high_school_physics|0": { |
|
"alias": " - high_school_physics", |
|
"acc,none": 0.37748344370860926, |
|
"acc_stderr,none": 0.03958027231121569 |
|
}, |
|
"harness|mmlu_high_school_statistics|0": { |
|
"alias": " - high_school_statistics", |
|
"acc,none": 0.5324074074074074, |
|
"acc_stderr,none": 0.03402801581358966 |
|
}, |
|
"harness|mmlu_machine_learning|0": { |
|
"alias": " - machine_learning", |
|
"acc,none": 0.38392857142857145, |
|
"acc_stderr,none": 0.04616143075028546 |
|
}, |
|
"harness|winogrande|0": { |
|
"acc,none": 0.6432517758484609, |
|
"acc_stderr,none": 0.01346339395802872, |
|
"alias": "winogrande" |
|
}, |
|
"harness|truthfulqa:mc2|0": { |
|
"acc,none": 0.541346036842384, |
|
"acc_stderr,none": 0.01584861213215754, |
|
"alias": "truthfulqa_mc2" |
|
}, |
|
"harness|arc:challenge|0": { |
|
"acc,none": 0.41638225255972694, |
|
"acc_stderr,none": 0.01440561827943617, |
|
"acc_norm,none": 0.439419795221843, |
|
"acc_norm_stderr,none": 0.014503747823580123, |
|
"alias": "arc_challenge" |
|
}, |
|
"harness|boolq|0": { |
|
"acc,none": 0.8403669724770643, |
|
"acc_stderr,none": 0.006406021659710515, |
|
"alias": "boolq" |
|
}, |
|
"harness|lambada:openai|0": { |
|
"perplexity,none": 6.125395895964018, |
|
"perplexity_stderr,none": 0.21476426826907466, |
|
"acc,none": 0.5940228992819717, |
|
"acc_stderr,none": 0.006841706431619957, |
|
"alias": "lambada_openai" |
|
}, |
|
"harness|truthfulqa:mc1|0": { |
|
"acc,none": 0.3806609547123623, |
|
"acc_stderr,none": 0.016997627871907915, |
|
"alias": "truthfulqa_mc1" |
|
}, |
|
"harness|hellaswag|0": { |
|
"acc,none": 0.578868751244772, |
|
"acc_stderr,none": 0.004927314729433566, |
|
"acc_norm,none": 0.7615016928898626, |
|
"acc_norm_stderr,none": 0.0042529434040930484, |
|
"alias": "hellaswag" |
|
} |
|
}, |
|
"task_info": { |
|
"model": "Qwen/Qwen1.5-7B-Chat-GPTQ-Int4", |
|
"revision": "main", |
|
"private": false, |
|
"params": 8.456, |
|
"architectures": "Qwen2ForCausalLM", |
|
"quant_type": "GPTQ", |
|
"precision": "4bit", |
|
"model_params": 16.912, |
|
"model_size": 8.456, |
|
"weight_dtype": "int4", |
|
"compute_dtype": "float16", |
|
"gguf_ftype": "*Q4_0.gguf", |
|
"hardware": "gpu", |
|
"status": "Pending", |
|
"submitted_time": "2024-04-27T08:09:37Z", |
|
"model_type": "quantization", |
|
"job_id": -1, |
|
"job_start_time": null, |
|
"scripts": "ITREX" |
|
}, |
|
"quantization_config": { |
|
"batch_size": 1, |
|
"bits": 4, |
|
"block_name_to_quantize": null, |
|
"cache_block_outputs": true, |
|
"damp_percent": 0.01, |
|
"dataset": null, |
|
"desc_act": false, |
|
"exllama_config": { |
|
"version": 1 |
|
}, |
|
"group_size": 128, |
|
"max_input_length": null, |
|
"model_seqlen": null, |
|
"module_name_preceding_first_block": null, |
|
"modules_in_block_to_quantize": null, |
|
"pad_token_id": null, |
|
"quant_method": "gptq", |
|
"sym": true, |
|
"tokenizer": null, |
|
"true_sequential": true, |
|
"use_cuda_fp16": false, |
|
"use_exllama": true |
|
}, |
|
"versions": { |
|
"harness|piqa|0": 1.0, |
|
"harness|openbookqa|0": 1.0, |
|
"harness|arc:easy|0": 1.0, |
|
"harness|mmlu|0": null, |
|
"harness|mmlu_humanities|0": null, |
|
"harness|mmlu_formal_logic|0": 0.0, |
|
"harness|mmlu_high_school_european_history|0": 0.0, |
|
"harness|mmlu_high_school_us_history|0": 0.0, |
|
"harness|mmlu_high_school_world_history|0": 0.0, |
|
"harness|mmlu_international_law|0": 0.0, |
|
"harness|mmlu_jurisprudence|0": 0.0, |
|
"harness|mmlu_logical_fallacies|0": 0.0, |
|
"harness|mmlu_moral_disputes|0": 0.0, |
|
"harness|mmlu_moral_scenarios|0": 0.0, |
|
"harness|mmlu_philosophy|0": 0.0, |
|
"harness|mmlu_prehistory|0": 0.0, |
|
"harness|mmlu_professional_law|0": 0.0, |
|
"harness|mmlu_world_religions|0": 0.0, |
|
"harness|mmlu_other|0": null, |
|
"harness|mmlu_business_ethics|0": 0.0, |
|
"harness|mmlu_clinical_knowledge|0": 0.0, |
|
"harness|mmlu_college_medicine|0": 0.0, |
|
"harness|mmlu_global_facts|0": 0.0, |
|
"harness|mmlu_human_aging|0": 0.0, |
|
"harness|mmlu_management|0": 0.0, |
|
"harness|mmlu_marketing|0": 0.0, |
|
"harness|mmlu_medical_genetics|0": 0.0, |
|
"harness|mmlu_miscellaneous|0": 0.0, |
|
"harness|mmlu_nutrition|0": 0.0, |
|
"harness|mmlu_professional_accounting|0": 0.0, |
|
"harness|mmlu_professional_medicine|0": 0.0, |
|
"harness|mmlu_virology|0": 0.0, |
|
"harness|mmlu_social_sciences|0": null, |
|
"harness|mmlu_econometrics|0": 0.0, |
|
"harness|mmlu_high_school_geography|0": 0.0, |
|
"harness|mmlu_high_school_government_and_politics|0": 0.0, |
|
"harness|mmlu_high_school_macroeconomics|0": 0.0, |
|
"harness|mmlu_high_school_microeconomics|0": 0.0, |
|
"harness|mmlu_high_school_psychology|0": 0.0, |
|
"harness|mmlu_human_sexuality|0": 0.0, |
|
"harness|mmlu_professional_psychology|0": 0.0, |
|
"harness|mmlu_public_relations|0": 0.0, |
|
"harness|mmlu_security_studies|0": 0.0, |
|
"harness|mmlu_sociology|0": 0.0, |
|
"harness|mmlu_us_foreign_policy|0": 0.0, |
|
"harness|mmlu_stem|0": null, |
|
"harness|mmlu_abstract_algebra|0": 0.0, |
|
"harness|mmlu_anatomy|0": 0.0, |
|
"harness|mmlu_astronomy|0": 0.0, |
|
"harness|mmlu_college_biology|0": 0.0, |
|
"harness|mmlu_college_chemistry|0": 0.0, |
|
"harness|mmlu_college_computer_science|0": 0.0, |
|
"harness|mmlu_college_mathematics|0": 0.0, |
|
"harness|mmlu_college_physics|0": 0.0, |
|
"harness|mmlu_computer_security|0": 0.0, |
|
"harness|mmlu_conceptual_physics|0": 0.0, |
|
"harness|mmlu_electrical_engineering|0": 0.0, |
|
"harness|mmlu_elementary_mathematics|0": 0.0, |
|
"harness|mmlu_high_school_biology|0": 0.0, |
|
"harness|mmlu_high_school_chemistry|0": 0.0, |
|
"harness|mmlu_high_school_computer_science|0": 0.0, |
|
"harness|mmlu_high_school_mathematics|0": 0.0, |
|
"harness|mmlu_high_school_physics|0": 0.0, |
|
"harness|mmlu_high_school_statistics|0": 0.0, |
|
"harness|mmlu_machine_learning|0": 0.0, |
|
"harness|winogrande|0": 1.0, |
|
"harness|truthfulqa:mc2|0": 2.0, |
|
"harness|arc:challenge|0": 1.0, |
|
"harness|boolq|0": 2.0, |
|
"harness|lambada:openai|0": 1.0, |
|
"harness|truthfulqa:mc1|0": 2.0, |
|
"harness|hellaswag|0": 1.0 |
|
}, |
|
"n-shot": { |
|
"arc_challenge": 0, |
|
"arc_easy": 0, |
|
"boolq": 0, |
|
"hellaswag": 0, |
|
"lambada_openai": 0, |
|
"mmlu": 0, |
|
"mmlu_abstract_algebra": 0, |
|
"mmlu_anatomy": 0, |
|
"mmlu_astronomy": 0, |
|
"mmlu_business_ethics": 0, |
|
"mmlu_clinical_knowledge": 0, |
|
"mmlu_college_biology": 0, |
|
"mmlu_college_chemistry": 0, |
|
"mmlu_college_computer_science": 0, |
|
"mmlu_college_mathematics": 0, |
|
"mmlu_college_medicine": 0, |
|
"mmlu_college_physics": 0, |
|
"mmlu_computer_security": 0, |
|
"mmlu_conceptual_physics": 0, |
|
"mmlu_econometrics": 0, |
|
"mmlu_electrical_engineering": 0, |
|
"mmlu_elementary_mathematics": 0, |
|
"mmlu_formal_logic": 0, |
|
"mmlu_global_facts": 0, |
|
"mmlu_high_school_biology": 0, |
|
"mmlu_high_school_chemistry": 0, |
|
"mmlu_high_school_computer_science": 0, |
|
"mmlu_high_school_european_history": 0, |
|
"mmlu_high_school_geography": 0, |
|
"mmlu_high_school_government_and_politics": 0, |
|
"mmlu_high_school_macroeconomics": 0, |
|
"mmlu_high_school_mathematics": 0, |
|
"mmlu_high_school_microeconomics": 0, |
|
"mmlu_high_school_physics": 0, |
|
"mmlu_high_school_psychology": 0, |
|
"mmlu_high_school_statistics": 0, |
|
"mmlu_high_school_us_history": 0, |
|
"mmlu_high_school_world_history": 0, |
|
"mmlu_human_aging": 0, |
|
"mmlu_human_sexuality": 0, |
|
"mmlu_humanities": 0, |
|
"mmlu_international_law": 0, |
|
"mmlu_jurisprudence": 0, |
|
"mmlu_logical_fallacies": 0, |
|
"mmlu_machine_learning": 0, |
|
"mmlu_management": 0, |
|
"mmlu_marketing": 0, |
|
"mmlu_medical_genetics": 0, |
|
"mmlu_miscellaneous": 0, |
|
"mmlu_moral_disputes": 0, |
|
"mmlu_moral_scenarios": 0, |
|
"mmlu_nutrition": 0, |
|
"mmlu_other": 0, |
|
"mmlu_philosophy": 0, |
|
"mmlu_prehistory": 0, |
|
"mmlu_professional_accounting": 0, |
|
"mmlu_professional_law": 0, |
|
"mmlu_professional_medicine": 0, |
|
"mmlu_professional_psychology": 0, |
|
"mmlu_public_relations": 0, |
|
"mmlu_security_studies": 0, |
|
"mmlu_social_sciences": 0, |
|
"mmlu_sociology": 0, |
|
"mmlu_stem": 0, |
|
"mmlu_us_foreign_policy": 0, |
|
"mmlu_virology": 0, |
|
"mmlu_world_religions": 0, |
|
"openbookqa": 0, |
|
"piqa": 0, |
|
"truthfulqa_mc1": 0, |
|
"truthfulqa_mc2": 0, |
|
"winogrande": 0 |
|
}, |
|
"date": 1714275925.6022973, |
|
"config": { |
|
"model": "hf", |
|
"model_args": "pretrained=Qwen/Qwen1.5-7B-Chat-GPTQ-Int4,dtype=float16,_commit_hash=main", |
|
"batch_size": 2, |
|
"batch_sizes": [], |
|
"device": "cuda", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null |
|
} |
|
} |