evaluation / gpt2_finnish_large_bigbench_1shot.csv
Muennighoff's picture
Add
79b60e5
raw
history blame
3.3 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.27692307692307694,0.039398253452664705,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.28,0.04512608598542127,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.5217391304347826,0.10649955403405122,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.16,0.03684529491774709,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.38,0.04878317312145632,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.09,0.028762349126466153,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.37,0.04852365870939098,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.02,0.01407052941362896,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.22,0.0416333199893227,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.04,0.01969463855669323,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.2,0.04020151261036846,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.05,0.02190429135575908,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.23,0.04229525846816506,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.15,0.035887028128263714,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.15,0.03588702812826371,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.07,0.025643239997624294,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.13,0.03379976689896308,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.06,0.023868325657594197,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.08,0.027265992434429086,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.06,0.023868325657594183,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.16,0.03684529491774708,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5098039215686274,0.07069708383262727,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.6862745098039216,0.06562039423796669,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.43137254901960786,0.07004145529212454,0
bigbench_emotions,multiple_choice_grade,0.2125,0.03244189290245472,0
bigbench_empirical_judgments,multiple_choice_grade,0.30303030303030304,0.04642339954443119,0
bigbench_general_knowledge,multiple_choice_grade,0.2571428571428571,0.052615698346701524,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.41379310344827586,0.06523484847771846,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.288135593220339,0.059467967781548406,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3220338983050847,0.06135370413564329,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5813953488372093,0.07612251984976479,0
bigbench_intent_recognition,multiple_choice_grade,0.1416184971098266,0.013263591635637853,0
bigbench_misconceptions,multiple_choice_grade,0.4925373134328358,0.04335066912520505,0
bigbench_paraphrase,multiple_choice_grade,0.46,0.03533045720097816,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5,0.06509445549041193,0
bigbench_similarities_abstraction,multiple_choice_grade,0.5526315789473685,0.05741427428755636,0