olmoe-dclm-metrics / heavy-OLMoE-7B-A1B-main.json
dwadden's picture
Upload 8 files
3389d64 verified
{
"name": "heavy",
"uuid": "83bb37f6-ddd0-43d8-894e-446d345ff7c1",
"model": "/net/nfs.cirrascale/allennlp/davidw/checkpoints/moe-release/OLMoE-7B-A1B/main",
"creation_date": "2024_08_06-17_10_22",
"eval_metrics": {
"icl": {
"mmlu_zeroshot": 0.4163723833728255,
"hellaswag_zeroshot": 0.7599083781242371,
"jeopardy": 0.4884525716304779,
"triviaqa_sm_sub": 0.5419999957084656,
"gsm8k_cot": 0.06444276124238968,
"agi_eval_sat_math_cot": 0.05454545468091965,
"aqua_cot": 0.02448979578912258,
"svamp_cot": 0.30000001192092896,
"bigbench_qa_wikidata": 0.6824467182159424,
"arc_easy": 0.7714646458625793,
"arc_challenge": 0.5332764387130737,
"mmlu_fewshot": 0.5218371202548345,
"bigbench_misconceptions": 0.5981734991073608,
"copa": 0.8100000023841858,
"siqa": 0.7154554724693298,
"commonsense_qa": 0.6723996996879578,
"piqa": 0.8133841156959534,
"openbook_qa": 0.414000004529953,
"bigbench_novel_concepts": 0.625,
"bigbench_strange_stories": 0.6666666865348816,
"bigbench_strategy_qa": 0.5622542500495911,
"lambada_openai": 0.7269551753997803,
"hellaswag": 0.776239812374115,
"winograd": 0.8278388381004333,
"winogrande": 0.6803472638130188,
"bigbench_conlang_translation": 0.03658536449074745,
"bigbench_language_identification": 0.3192000091075897,
"bigbench_conceptual_combinations": 0.5145630836486816,
"bigbench_elementary_math_qa": 0.26218554377555847,
"bigbench_dyck_languages": 0.19300000369548798,
"agi_eval_lsat_ar": 0.24347825348377228,
"bigbench_cs_algorithms": 0.47121211886405945,
"bigbench_logical_deduction": 0.26600000262260437,
"bigbench_operators": 0.3619047701358795,
"bigbench_repeat_copy_logic": 0.15625,
"simple_arithmetic_nospaces": 0.17599999904632568,
"simple_arithmetic_withspaces": 0.19499999284744263,
"math_qa": 0.26449882984161377,
"logi_qa": 0.3486943244934082,
"pubmed_qa_labeled": 0.5609999895095825,
"squad": 0.5288552641868591,
"agi_eval_lsat_rc": 0.4738805890083313,
"agi_eval_lsat_lr": 0.4019607901573181,
"coqa": 0.43692848086357117,
"bigbench_understanding_fables": 0.47089946269989014,
"boolq": 0.7333333492279053,
"agi_eval_sat_en": 0.553398072719574,
"winogender_mc_female": 0.5,
"winogender_mc_male": 0.550000011920929,
"enterprise_pii_classification": 0.523122251033783,
"bbq": 0.4975668625398116,
"gpqa_main": 0.2477678507566452,
"gpqa_diamond": 0.2222222238779068
}
},
"missing tasks": "[]",
"aggregated_task_categories_centered": {
"commonsense reasoning": 0.4523028646368181,
"language understanding": 0.4706549446387432,
"reading comprehension": 0.39537327579761805,
"safety": 0.03534456274726175,
"symbolic problem solving": 0.15998990932590068,
"world knowledge": 0.3526500500886761
},
"aggregated_centered_results": 0.31347879381791993,
"aggregated_results": 0.4633482752871252,
"rw_small": 0.7147216796875,
"rw_small_centered": 0.5053010230873064,
"95%_CI_above": 0.5612098809380036,
"95%_CI_above_centered": 0.41586115306825494,
"99%_CI_above": 0.5724260063275047,
"99%_CI_above_centered": 0.45794212339228574,
"low_variance_datasets": 0.5636761779134924,
"low_variance_datasets_centered": 0.462566793680692
}