|
{ |
|
"name": "heavy", |
|
"uuid": "72557004-cff2-408c-b3e2-6a004215f09d", |
|
"model": "/net/nfs.cirrascale/allennlp/davidw/checkpoints/moe-release/jetmoe-8b/main", |
|
"creation_date": "2024_08_06-16_55_05", |
|
"eval_metrics": { |
|
"icl": { |
|
"mmlu_zeroshot": 0.4326617845840621, |
|
"hellaswag_zeroshot": 0.7868950366973877, |
|
"jeopardy": 0.45192375779151917, |
|
"triviaqa_sm_sub": 0.4986666738986969, |
|
"gsm8k_cot": 0.3025018870830536, |
|
"agi_eval_sat_math_cot": 0.11363636702299118, |
|
"aqua_cot": 0.05306122452020645, |
|
"svamp_cot": 0.4466666579246521, |
|
"bigbench_qa_wikidata": 0.7317061424255371, |
|
"arc_easy": 0.7356902360916138, |
|
"arc_challenge": 0.4530716836452484, |
|
"mmlu_fewshot": 0.4966564126182021, |
|
"bigbench_misconceptions": 0.5707762837409973, |
|
"copa": 0.8399999737739563, |
|
"siqa": 0.8162742853164673, |
|
"commonsense_qa": 0.7911548018455505, |
|
"piqa": 0.8084874749183655, |
|
"openbook_qa": 0.3959999978542328, |
|
"bigbench_novel_concepts": 0.53125, |
|
"bigbench_strange_stories": 0.6781609058380127, |
|
"bigbench_strategy_qa": 0.5801659822463989, |
|
"lambada_openai": 0.6976518630981445, |
|
"hellaswag": 0.793367862701416, |
|
"winograd": 0.8388278484344482, |
|
"winogrande": 0.6835043430328369, |
|
"bigbench_conlang_translation": 0.09146341681480408, |
|
"bigbench_language_identification": 0.336899995803833, |
|
"bigbench_conceptual_combinations": 0.553398072719574, |
|
"bigbench_elementary_math_qa": 0.2795073390007019, |
|
"bigbench_dyck_languages": 0.3190000057220459, |
|
"agi_eval_lsat_ar": 0.2869565188884735, |
|
"bigbench_cs_algorithms": 0.5098484754562378, |
|
"bigbench_logical_deduction": 0.3033333420753479, |
|
"bigbench_operators": 0.538095235824585, |
|
"bigbench_repeat_copy_logic": 0.21875, |
|
"simple_arithmetic_nospaces": 0.3070000112056732, |
|
"simple_arithmetic_withspaces": 0.3100000023841858, |
|
"math_qa": 0.28226616978645325, |
|
"logi_qa": 0.33794161677360535, |
|
"pubmed_qa_labeled": 0.6430000066757202, |
|
"squad": 0.2401135265827179, |
|
"agi_eval_lsat_rc": 0.46268656849861145, |
|
"agi_eval_lsat_lr": 0.3450980484485626, |
|
"coqa": 0.43893274664878845, |
|
"bigbench_understanding_fables": 0.41798943281173706, |
|
"boolq": 0.8223241567611694, |
|
"agi_eval_sat_en": 0.6213592290878296, |
|
"winogender_mc_female": 0.5, |
|
"winogender_mc_male": 0.5833333134651184, |
|
"enterprise_pii_classification": 0.6017673015594482, |
|
"bbq": 0.5354551727121527, |
|
"gpqa_main": 0.2566964328289032, |
|
"gpqa_diamond": 0.2222222238779068 |
|
} |
|
}, |
|
"missing tasks": "[]", |
|
"aggregated_task_categories_centered": { |
|
"commonsense reasoning": 0.4857313116330063, |
|
"language understanding": 0.4936472507617851, |
|
"reading comprehension": 0.3914801840458, |
|
"safety": 0.11027789386835968, |
|
"symbolic problem solving": 0.24344731914942666, |
|
"world knowledge": 0.3286514173125663 |
|
}, |
|
"aggregated_centered_results": 0.3461548208936837, |
|
"aggregated_results": 0.4885697707456261, |
|
"rw_small": 0.7202475716670355, |
|
"rw_small_centered": 0.5356083167226692, |
|
"95%_CI_above": 0.5773314228886133, |
|
"95%_CI_above_centered": 0.44323355322011243, |
|
"99%_CI_above": 0.5924812607143236, |
|
"99%_CI_above_centered": 0.48989709603090853, |
|
"low_variance_datasets": 0.5781455310908231, |
|
"low_variance_datasets_centered": 0.4859825293638729 |
|
} |