olmoe-dclm-metrics / heavy-OLMoE-7B-A1B-step1220000-tokens5117B.json
dwadden's picture
Upload 8 files
3389d64 verified
{
"name": "heavy",
"uuid": "70f3f719-28b5-46f5-b58e-bd89765d1e40",
"model": "/net/nfs.cirrascale/allennlp/davidw/checkpoints/moe-release/OLMoE-7B-A1B/step1220000-tokens5117B",
"creation_date": "2024_08_06-17_05_21",
"eval_metrics": {
"icl": {
"mmlu_zeroshot": 0.4273025756865217,
"hellaswag_zeroshot": 0.7597092390060425,
"jeopardy": 0.4871806979179382,
"triviaqa_sm_sub": 0.5303333401679993,
"gsm8k_cot": 0.07354056090116501,
"agi_eval_sat_math_cot": 0.040909089148044586,
"aqua_cot": 0.02857142873108387,
"svamp_cot": 0.2800000011920929,
"bigbench_qa_wikidata": 0.6884503960609436,
"arc_easy": 0.7845118045806885,
"arc_challenge": 0.5341296792030334,
"mmlu_fewshot": 0.5194499257363772,
"bigbench_misconceptions": 0.5525113940238953,
"copa": 0.800000011920929,
"siqa": 0.707267165184021,
"commonsense_qa": 0.6699426770210266,
"piqa": 0.8117519021034241,
"openbook_qa": 0.4399999976158142,
"bigbench_novel_concepts": 0.625,
"bigbench_strange_stories": 0.6839080452919006,
"bigbench_strategy_qa": 0.5810397267341614,
"lambada_openai": 0.7219095826148987,
"hellaswag": 0.7750447988510132,
"winograd": 0.831501841545105,
"winogrande": 0.6850828528404236,
"bigbench_conlang_translation": 0.060975611209869385,
"bigbench_language_identification": 0.34049999713897705,
"bigbench_conceptual_combinations": 0.5048543810844421,
"bigbench_elementary_math_qa": 0.27025681734085083,
"bigbench_dyck_languages": 0.1589999943971634,
"agi_eval_lsat_ar": 0.2652173936367035,
"bigbench_cs_algorithms": 0.5015151500701904,
"bigbench_logical_deduction": 0.25333333015441895,
"bigbench_operators": 0.34285715222358704,
"bigbench_repeat_copy_logic": 0.15625,
"simple_arithmetic_nospaces": 0.1809999942779541,
"simple_arithmetic_withspaces": 0.20600000023841858,
"math_qa": 0.2708682417869568,
"logi_qa": 0.3425499200820923,
"pubmed_qa_labeled": 0.4659999907016754,
"squad": 0.5244086980819702,
"agi_eval_lsat_rc": 0.43656715750694275,
"agi_eval_lsat_lr": 0.386274516582489,
"coqa": 0.4436928331851959,
"bigbench_understanding_fables": 0.4444444477558136,
"boolq": 0.7281345725059509,
"agi_eval_sat_en": 0.5485436916351318,
"winogender_mc_female": 0.46666666865348816,
"winogender_mc_male": 0.5833333134651184,
"enterprise_pii_classification": 0.5372606515884399,
"bbq": 0.48323566534302453,
"gpqa_main": 0.2232142835855484,
"gpqa_diamond": 0.21212121844291687
}
},
"missing tasks": "[]",
"aggregated_task_categories_centered": {
"commonsense reasoning": 0.46081640452671535,
"language understanding": 0.4762512398893946,
"reading comprehension": 0.36885401178478144,
"safety": 0.03524814952503552,
"symbolic problem solving": 0.15957477013304083,
"world knowledge": 0.34119598718414534
},
"aggregated_centered_results": 0.30931975984045923,
"aggregated_results": 0.4599646118255447,
"rw_small": 0.7152613600095113,
"rw_small_centered": 0.5043107818441781,
"95%_CI_above": 0.5589368432469957,
"95%_CI_above_centered": 0.41426754302993024,
"99%_CI_above": 0.5701544312031374,
"99%_CI_above_centered": 0.45654352726393493,
"low_variance_datasets": 0.5659450578418646,
"low_variance_datasets_centered": 0.46520260353587645
}