Add results for 2023-11-22 14:04:32
Browse files
wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"results": {
|
3 |
+
"ko_eqbench": {
|
4 |
+
"alias": " - ko_eqbench",
|
5 |
+
"eqbench,none": 17.326817173363374,
|
6 |
+
"eqbench_stderr,none": 2.8909857883650805,
|
7 |
+
"percent_parseable,none": 87.13450292397661,
|
8 |
+
"percent_parseable_stderr,none": 2.567934272327684
|
9 |
+
},
|
10 |
+
"ko_gpqa_diamond_zeroshot": {
|
11 |
+
"alias": " - ko_gpqa_diamond_zeroshot",
|
12 |
+
"acc_norm,none": 0.22727272727272727,
|
13 |
+
"acc_norm_stderr,none": 0.02985751567338641
|
14 |
+
},
|
15 |
+
"ko_gsm8k": {
|
16 |
+
"alias": " - ko_gsm8k",
|
17 |
+
"exact_match,strict-match": 0.04245640636846096,
|
18 |
+
"exact_match_stderr,strict-match": 0.005553837749990043,
|
19 |
+
"exact_match,flexible-extract": 0.23275208491281274,
|
20 |
+
"exact_match_stderr,flexible-extract": 0.011640106217202961
|
21 |
+
},
|
22 |
+
"ko_ifeval": {
|
23 |
+
"alias": " - ko_ifeval",
|
24 |
+
"prompt_level_strict_acc,none": 0.2793522267206478,
|
25 |
+
"prompt_level_strict_acc_stderr,none": 0.020207571941891923,
|
26 |
+
"inst_level_strict_acc,none": 0.37091988130563797,
|
27 |
+
"inst_level_strict_acc_stderr,none": "N/A",
|
28 |
+
"prompt_level_loose_acc,none": 0.29554655870445345,
|
29 |
+
"prompt_level_loose_acc_stderr,none": 0.020550180523748192,
|
30 |
+
"inst_level_loose_acc,none": 0.39020771513353114,
|
31 |
+
"inst_level_loose_acc_stderr,none": "N/A"
|
32 |
+
},
|
33 |
+
"ko_winogrande": {
|
34 |
+
"alias": " - ko_winogrande",
|
35 |
+
"acc,none": 0.580110497237569,
|
36 |
+
"acc_stderr,none": 0.013870943986310391
|
37 |
+
},
|
38 |
+
"kornat_common": {
|
39 |
+
"alias": " - kornat_common",
|
40 |
+
"acc_norm,none": 0.18375499334221038,
|
41 |
+
"acc_norm_stderr,none": 0.004996904248291179
|
42 |
+
},
|
43 |
+
"kornat_harmless": {
|
44 |
+
"alias": " - kornat_harmless",
|
45 |
+
"acc_norm,none": 0.6211333333333496,
|
46 |
+
"acc_norm_stderr,none": 0.002098094527067178
|
47 |
+
},
|
48 |
+
"kornat_helpful": {
|
49 |
+
"alias": " - kornat_helpful",
|
50 |
+
"acc_norm,none": 0.4835,
|
51 |
+
"acc_norm_stderr,none": 0.007036592344018987
|
52 |
+
},
|
53 |
+
"kornat_social": {
|
54 |
+
"alias": " - kornat_social",
|
55 |
+
"A-SVA,none": 0.45446788840763486,
|
56 |
+
"A-SVA_stderr,none": 0.003924616035569883
|
57 |
+
}
|
58 |
+
},
|
59 |
+
"versions": {
|
60 |
+
"all": 2,
|
61 |
+
"ko_eqbench": 2,
|
62 |
+
"ko_gpqa_diamond_zeroshot": 2,
|
63 |
+
"ko_gsm8k": 2,
|
64 |
+
"ko_ifeval": 2,
|
65 |
+
"ko_winogrande": 2,
|
66 |
+
"kornat_common": 2,
|
67 |
+
"kornat_harmless": 2,
|
68 |
+
"kornat_helpful": 2,
|
69 |
+
"kornat_social": 2
|
70 |
+
},
|
71 |
+
"config_general": {
|
72 |
+
"model_name": "wons/mistral-7B-test-v0.1",
|
73 |
+
"model_sha": "ad71e129d126732f7c2f08bdbf88cdfab5866e45",
|
74 |
+
"model_dtype": "torch.float16",
|
75 |
+
"lighteval_sha": "",
|
76 |
+
"num_few_shot_default": 0,
|
77 |
+
"num_fewshot_seeds": 1,
|
78 |
+
"override_batch_size": 1,
|
79 |
+
"max_samples": null
|
80 |
+
}
|
81 |
+
}
|