choco_9966
commited on
Commit
•
fdebfff
1
Parent(s):
db8342c
remove all results
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +0 -55
- QuantumIntelligence/QI-mistral-7B-slerp/result_2024-04-11 04:45:52.json +0 -444
- QuantumIntelligence/QI-neural-chat-7B-ko-DPO/result_2024-04-11 23:50:16.json +0 -444
- Qwen/Qwen1.5-14B-Chat/result_2024-05-13 18:15:27.json +0 -444
- Qwen/Qwen1.5-14B/result_2024-05-13 18:15:41.json +0 -444
- Qwen/Qwen2-7B-Instruct/result_2024-06-06 17:19:27.json +0 -444
- Qwen/Qwen2-7B/result_2024-06-06 17:19:35.json +0 -444
- RLHFlow/LLaMA3-iterative-DPO-final/result_2024-06-05 14:46:44.json +0 -444
- Raphael21/Raphael21-SOLAR-10.7B/result_2024-02-26 11:01:49.json +0 -444
- RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2/result_2024-07-03 09:55:23.json +0 -444
- SJ-Donald/SJ-SOLAR-10.7b-DPO/result_2024-01-25 00:56:50.json +0 -444
- SJ-Donald/SOLAR-10.7B-slerp/result_2024-01-11 05:42:26.json +0 -444
- SJ-Donald/llama3-passthrough-chat/result_2024-05-17 07:48:22.json +0 -444
- SJ-Donald/llama3-passthrough/result_2024-05-16 12:56:07.json +0 -444
- SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:16:04.json +0 -444
- SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:35:13.json +0 -444
- SakanaAI/DiscoPOP-zephyr-7b-gemma/result_2024-06-17 05:07:55.json +0 -444
- Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1/result_2024-03-12 18:32:39.json +0 -444
- SeaLLMs/SeaLLM-7B-v2.5/result_2024-05-13 17:46:50.json +0 -444
- SeaLLMs/SeaLLM-7B-v2/result_2024-08-06 11:14:54.json +0 -444
- SeaLLMs/SeaLLMs-v3-7B-Chat/result_2024-07-28 03:06:24.json +0 -444
- SkyOrbis/SKY-Ko-Llama3-8B-lora/result_2024-06-23 07:48:25.json +0 -444
- SkyOrbis/SKY-Ko-Solar-10.7B-lora/result_2024-06-23 08:12:36.json +0 -444
- StatPan/SinGung7B-DPO-v0.1-12600c/result_2024-01-06 08:57:13.json +0 -444
- StatPan/all-you-need-is/result_2024-01-04 01:51:48.json +0 -444
- StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json +0 -444
- StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json +0 -444
- SuperPowerMz/SON_Mistral-7B-QLoRA-Peft/result_2024-04-17 02:06:08.json +0 -444
- Surromind/Solar_v0.1/result_2024-03-28 23:58:36.json +0 -444
- Surromind/gemma-2b-v0.1/result_2024-03-29 02:10:12.json +0 -444
- T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0/result_2024-04-17 12:40:55.json +0 -444
- T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0/result_2024-04-16 10:11:27.json +0 -444
- T3Q-LLM/T3Q-LLM1-CV-v1.0/result_2024-05-07 12:24:30.json +0 -444
- T3Q-LLM/T3Q-LLM1-CV-v2.0/result_2024-05-09 22:59:00.json +0 -444
- T3Q-LLM/T3Q-LLM1-v2.0/result_2024-05-02 12:31:10.json +0 -444
- T3Q-LLM/T3Q-LLM2-FP-v1.0/result_2024-05-08 00:08:39.json +0 -444
- T3Q-LLM/T3Q-LLM2-FP-v2.0/result_2024-05-12 04:44:25.json +0 -444
- T3Q-LLM/T3Q-LLM3-NC-v1.0/result_2024-05-09 08:08:07.json +0 -444
- TIGER-Lab/MAmmoTH2-7B-Plus/result_2024-05-13 17:04:51.json +0 -444
- TIGER-Lab/MAmmoTH2-8B-Plus/result_2024-05-13 17:04:55.json +0 -444
- Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json +0 -444
- TeamUNIVA/Komodo_6B_v1.0.0/result_2024-01-30 12:17:31.json +0 -444
- TeamUNIVA/Komodo_6B_v2.0.0/result_2024-02-09 17:20:47.json +0 -444
- TeamUNIVA/Komodo_6B_v3.0.0/result_2024-03-04 11:29:12.json +0 -444
- TeamUNIVA/Komodo_7B_v1.0.0/result_2024-01-30 12:16:24.json +0 -444
- TeamUNIVA/Komodo_7B_v1.0.1/result_2024-01-30 12:16:51.json +0 -444
- The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json +0 -444
- The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json +0 -444
- The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json +0 -444
- TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json +0 -444
.gitattributes
DELETED
@@ -1,55 +0,0 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.lz4 filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
26 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
27 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
36 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
37 |
-
# Audio files - uncompressed
|
38 |
-
*.pcm filter=lfs diff=lfs merge=lfs -text
|
39 |
-
*.sam filter=lfs diff=lfs merge=lfs -text
|
40 |
-
*.raw filter=lfs diff=lfs merge=lfs -text
|
41 |
-
# Audio files - compressed
|
42 |
-
*.aac filter=lfs diff=lfs merge=lfs -text
|
43 |
-
*.flac filter=lfs diff=lfs merge=lfs -text
|
44 |
-
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
45 |
-
*.ogg filter=lfs diff=lfs merge=lfs -text
|
46 |
-
*.wav filter=lfs diff=lfs merge=lfs -text
|
47 |
-
# Image files - uncompressed
|
48 |
-
*.bmp filter=lfs diff=lfs merge=lfs -text
|
49 |
-
*.gif filter=lfs diff=lfs merge=lfs -text
|
50 |
-
*.png filter=lfs diff=lfs merge=lfs -text
|
51 |
-
*.tiff filter=lfs diff=lfs merge=lfs -text
|
52 |
-
# Image files - compressed
|
53 |
-
*.jpg filter=lfs diff=lfs merge=lfs -text
|
54 |
-
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
55 |
-
*.webp filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
QuantumIntelligence/QI-mistral-7B-slerp/result_2024-04-11 04:45:52.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.363481228668942,
|
5 |
-
"acc_stderr": 0.014056207319068285,
|
6 |
-
"acc_norm": 0.41552901023890787,
|
7 |
-
"acc_norm_stderr": 0.014401366641216391
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.39145588528181635,
|
11 |
-
"acc_stderr": 0.004870785036708288,
|
12 |
-
"acc_norm": 0.5085640310695081,
|
13 |
-
"acc_norm_stderr": 0.004989049430391295
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.49122807017543857,
|
17 |
-
"acc_stderr": 0.038342347441649924,
|
18 |
-
"acc_norm": 0.49122807017543857,
|
19 |
-
"acc_norm_stderr": 0.038342347441649924
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5825242718446602,
|
23 |
-
"acc_stderr": 0.048828405482122375,
|
24 |
-
"acc_norm": 0.5825242718446602,
|
25 |
-
"acc_norm_stderr": 0.048828405482122375
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.49169859514687103,
|
29 |
-
"acc_stderr": 0.017877498991072008,
|
30 |
-
"acc_norm": 0.49169859514687103,
|
31 |
-
"acc_norm_stderr": 0.017877498991072008
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4074074074074074,
|
35 |
-
"acc_stderr": 0.042446332383532286,
|
36 |
-
"acc_norm": 0.4074074074074074,
|
37 |
-
"acc_norm_stderr": 0.042446332383532286
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.39574468085106385,
|
47 |
-
"acc_stderr": 0.03196758697835362,
|
48 |
-
"acc_norm": 0.39574468085106385,
|
49 |
-
"acc_norm_stderr": 0.03196758697835362
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4036144578313253,
|
53 |
-
"acc_stderr": 0.03819486140758397,
|
54 |
-
"acc_norm": 0.4036144578313253,
|
55 |
-
"acc_norm_stderr": 0.03819486140758397
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4662379421221865,
|
59 |
-
"acc_stderr": 0.02833327710956279,
|
60 |
-
"acc_norm": 0.4662379421221865,
|
61 |
-
"acc_norm_stderr": 0.02833327710956279
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4663677130044843,
|
65 |
-
"acc_stderr": 0.033481800170603065,
|
66 |
-
"acc_norm": 0.4663677130044843,
|
67 |
-
"acc_norm_stderr": 0.033481800170603065
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4351145038167939,
|
71 |
-
"acc_stderr": 0.04348208051644858,
|
72 |
-
"acc_norm": 0.4351145038167939,
|
73 |
-
"acc_norm_stderr": 0.04348208051644858
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.37,
|
77 |
-
"acc_stderr": 0.04852365870939099,
|
78 |
-
"acc_norm": 0.37,
|
79 |
-
"acc_norm_stderr": 0.04852365870939099
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5858585858585859,
|
83 |
-
"acc_stderr": 0.03509438348879629,
|
84 |
-
"acc_norm": 0.5858585858585859,
|
85 |
-
"acc_norm_stderr": 0.03509438348879629
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.27450980392156865,
|
95 |
-
"acc_stderr": 0.044405219061793275,
|
96 |
-
"acc_norm": 0.27450980392156865,
|
97 |
-
"acc_norm_stderr": 0.044405219061793275
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5084033613445378,
|
101 |
-
"acc_stderr": 0.0324739027656967,
|
102 |
-
"acc_norm": 0.5084033613445378,
|
103 |
-
"acc_norm_stderr": 0.0324739027656967
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5076923076923077,
|
107 |
-
"acc_stderr": 0.025348006031534743,
|
108 |
-
"acc_norm": 0.5076923076923077,
|
109 |
-
"acc_norm_stderr": 0.025348006031534743
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.6,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.6,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5462962962962963,
|
125 |
-
"acc_stderr": 0.04812917324536823,
|
126 |
-
"acc_norm": 0.5462962962962963,
|
127 |
-
"acc_norm_stderr": 0.04812917324536823
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.39901477832512317,
|
131 |
-
"acc_stderr": 0.03445487686264715,
|
132 |
-
"acc_norm": 0.39901477832512317,
|
133 |
-
"acc_norm_stderr": 0.03445487686264715
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4096774193548387,
|
137 |
-
"acc_stderr": 0.027976054915347354,
|
138 |
-
"acc_norm": 0.4096774193548387,
|
139 |
-
"acc_norm_stderr": 0.027976054915347354
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7393162393162394,
|
143 |
-
"acc_stderr": 0.02876034895652341,
|
144 |
-
"acc_norm": 0.7393162393162394,
|
145 |
-
"acc_norm_stderr": 0.02876034895652341
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.44528301886792454,
|
149 |
-
"acc_stderr": 0.030588052974270658,
|
150 |
-
"acc_norm": 0.44528301886792454,
|
151 |
-
"acc_norm_stderr": 0.030588052974270658
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.509090909090909,
|
155 |
-
"acc_stderr": 0.0478833976870286,
|
156 |
-
"acc_norm": 0.509090909090909,
|
157 |
-
"acc_norm_stderr": 0.0478833976870286
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3037037037037037,
|
161 |
-
"acc_stderr": 0.02803792996911499,
|
162 |
-
"acc_norm": 0.3037037037037037,
|
163 |
-
"acc_norm_stderr": 0.02803792996911499
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.25165562913907286,
|
167 |
-
"acc_stderr": 0.03543304234389985,
|
168 |
-
"acc_norm": 0.25165562913907286,
|
169 |
-
"acc_norm_stderr": 0.03543304234389985
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6019900497512438,
|
173 |
-
"acc_stderr": 0.034611994290400135,
|
174 |
-
"acc_norm": 0.6019900497512438,
|
175 |
-
"acc_norm_stderr": 0.034611994290400135
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3815028901734104,
|
179 |
-
"acc_stderr": 0.03703851193099521,
|
180 |
-
"acc_norm": 0.3815028901734104,
|
181 |
-
"acc_norm_stderr": 0.03703851193099521
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3968253968253968,
|
185 |
-
"acc_stderr": 0.02519710107424649,
|
186 |
-
"acc_norm": 0.3968253968253968,
|
187 |
-
"acc_norm_stderr": 0.02519710107424649
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3472222222222222,
|
191 |
-
"acc_stderr": 0.03981240543717861,
|
192 |
-
"acc_norm": 0.3472222222222222,
|
193 |
-
"acc_norm_stderr": 0.03981240543717861
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.31,
|
197 |
-
"acc_stderr": 0.04648231987117316,
|
198 |
-
"acc_norm": 0.31,
|
199 |
-
"acc_norm_stderr": 0.04648231987117316
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.51,
|
203 |
-
"acc_stderr": 0.050241839379569095,
|
204 |
-
"acc_norm": 0.51,
|
205 |
-
"acc_norm_stderr": 0.050241839379569095
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5115606936416185,
|
209 |
-
"acc_stderr": 0.02691189868637792,
|
210 |
-
"acc_norm": 0.5115606936416185,
|
211 |
-
"acc_norm_stderr": 0.02691189868637792
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5337423312883436,
|
215 |
-
"acc_stderr": 0.039194155450484096,
|
216 |
-
"acc_norm": 0.5337423312883436,
|
217 |
-
"acc_norm_stderr": 0.039194155450484096
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.45987654320987653,
|
221 |
-
"acc_stderr": 0.02773102275353928,
|
222 |
-
"acc_norm": 0.45987654320987653,
|
223 |
-
"acc_norm_stderr": 0.02773102275353928
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.36,
|
227 |
-
"acc_stderr": 0.048241815132442176,
|
228 |
-
"acc_norm": 0.36,
|
229 |
-
"acc_norm_stderr": 0.048241815132442176
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5077720207253886,
|
233 |
-
"acc_stderr": 0.03608003225569654,
|
234 |
-
"acc_norm": 0.5077720207253886,
|
235 |
-
"acc_norm_stderr": 0.03608003225569654
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2894736842105263,
|
239 |
-
"acc_stderr": 0.04266339443159394,
|
240 |
-
"acc_norm": 0.2894736842105263,
|
241 |
-
"acc_norm_stderr": 0.04266339443159394
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5321100917431193,
|
245 |
-
"acc_stderr": 0.021393071222680804,
|
246 |
-
"acc_norm": 0.5321100917431193,
|
247 |
-
"acc_norm_stderr": 0.021393071222680804
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4126984126984127,
|
251 |
-
"acc_stderr": 0.04403438954768177,
|
252 |
-
"acc_norm": 0.4126984126984127,
|
253 |
-
"acc_norm_stderr": 0.04403438954768177
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4934640522875817,
|
257 |
-
"acc_stderr": 0.028627470550556054,
|
258 |
-
"acc_norm": 0.4934640522875817,
|
259 |
-
"acc_norm_stderr": 0.028627470550556054
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.51,
|
263 |
-
"acc_stderr": 0.05024183937956912,
|
264 |
-
"acc_norm": 0.51,
|
265 |
-
"acc_norm_stderr": 0.05024183937956912
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6446280991735537,
|
269 |
-
"acc_stderr": 0.0436923632657398,
|
270 |
-
"acc_norm": 0.6446280991735537,
|
271 |
-
"acc_norm_stderr": 0.0436923632657398
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4473684210526316,
|
275 |
-
"acc_stderr": 0.04046336883978251,
|
276 |
-
"acc_norm": 0.4473684210526316,
|
277 |
-
"acc_norm_stderr": 0.04046336883978251
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.40032679738562094,
|
281 |
-
"acc_stderr": 0.01982184368827176,
|
282 |
-
"acc_norm": 0.40032679738562094,
|
283 |
-
"acc_norm_stderr": 0.01982184368827176
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3829787234042553,
|
287 |
-
"acc_stderr": 0.028999080904806178,
|
288 |
-
"acc_norm": 0.3829787234042553,
|
289 |
-
"acc_norm_stderr": 0.028999080904806178
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4375,
|
293 |
-
"acc_stderr": 0.04708567521880525,
|
294 |
-
"acc_norm": 0.4375,
|
295 |
-
"acc_norm_stderr": 0.04708567521880525
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.39351851851851855,
|
299 |
-
"acc_stderr": 0.03331747876370312,
|
300 |
-
"acc_norm": 0.39351851851851855,
|
301 |
-
"acc_norm_stderr": 0.03331747876370312
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25251396648044694,
|
305 |
-
"acc_stderr": 0.014530330201468641,
|
306 |
-
"acc_norm": 0.25251396648044694,
|
307 |
-
"acc_norm_stderr": 0.014530330201468641
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.36,
|
311 |
-
"acc_stderr": 0.04824181513244218,
|
312 |
-
"acc_norm": 0.36,
|
313 |
-
"acc_norm_stderr": 0.04824181513244218
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.61,
|
317 |
-
"acc_stderr": 0.04902071300001974,
|
318 |
-
"acc_norm": 0.61,
|
319 |
-
"acc_norm_stderr": 0.04902071300001974
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3161764705882353,
|
323 |
-
"acc_stderr": 0.028245687391462913,
|
324 |
-
"acc_norm": 0.3161764705882353,
|
325 |
-
"acc_norm_stderr": 0.028245687391462913
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5673469387755102,
|
329 |
-
"acc_stderr": 0.03171752824062665,
|
330 |
-
"acc_norm": 0.5673469387755102,
|
331 |
-
"acc_norm_stderr": 0.03171752824062665
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5991561181434599,
|
335 |
-
"acc_stderr": 0.03190080389473236,
|
336 |
-
"acc_norm": 0.5991561181434599,
|
337 |
-
"acc_norm_stderr": 0.03190080389473236
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3324641460234681,
|
341 |
-
"acc_stderr": 0.012032022332260521,
|
342 |
-
"acc_norm": 0.3324641460234681,
|
343 |
-
"acc_norm_stderr": 0.012032022332260521
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4803921568627451,
|
347 |
-
"acc_stderr": 0.03506612560524866,
|
348 |
-
"acc_norm": 0.4803921568627451,
|
349 |
-
"acc_norm_stderr": 0.03506612560524866
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.509090909090909,
|
353 |
-
"acc_stderr": 0.03903698647748441,
|
354 |
-
"acc_norm": 0.509090909090909,
|
355 |
-
"acc_norm_stderr": 0.03903698647748441
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.35128518971848227,
|
359 |
-
"mc1_stderr": 0.0167113581635444,
|
360 |
-
"mc2": 0.532589840184244,
|
361 |
-
"mc2_stderr": 0.016038282113032338
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4380165289256198,
|
365 |
-
"acc_stderr": 0.01705775370216029,
|
366 |
-
"acc_norm": 0.45336481700118064,
|
367 |
-
"acc_norm_stderr": 0.01711541822522687
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "QuantumIntelligence/QI-mistral-7B-slerp",
|
436 |
-
"model_sha": "1a9a8379a7651644dbd6c9ae99dfba6ae8aeb4e0",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
QuantumIntelligence/QI-neural-chat-7B-ko-DPO/result_2024-04-11 23:50:16.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3430034129692833,
|
5 |
-
"acc_stderr": 0.01387242322371817,
|
6 |
-
"acc_norm": 0.3822525597269625,
|
7 |
-
"acc_norm_stderr": 0.014200454049979293
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.36227843059151565,
|
11 |
-
"acc_stderr": 0.004796763521045227,
|
12 |
-
"acc_norm": 0.4691296554471221,
|
13 |
-
"acc_norm_stderr": 0.004980262025472491
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.45614035087719296,
|
17 |
-
"acc_stderr": 0.03820042586602967,
|
18 |
-
"acc_norm": 0.45614035087719296,
|
19 |
-
"acc_norm_stderr": 0.03820042586602967
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5825242718446602,
|
23 |
-
"acc_stderr": 0.048828405482122375,
|
24 |
-
"acc_norm": 0.5825242718446602,
|
25 |
-
"acc_norm_stderr": 0.048828405482122375
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4508301404853129,
|
29 |
-
"acc_stderr": 0.017793297572699054,
|
30 |
-
"acc_norm": 0.4508301404853129,
|
31 |
-
"acc_norm_stderr": 0.017793297572699054
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37777777777777777,
|
35 |
-
"acc_stderr": 0.04188307537595853,
|
36 |
-
"acc_norm": 0.37777777777777777,
|
37 |
-
"acc_norm_stderr": 0.04188307537595853
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.24,
|
41 |
-
"acc_stderr": 0.042923469599092816,
|
42 |
-
"acc_norm": 0.24,
|
43 |
-
"acc_norm_stderr": 0.042923469599092816
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.34893617021276596,
|
47 |
-
"acc_stderr": 0.031158522131357787,
|
48 |
-
"acc_norm": 0.34893617021276596,
|
49 |
-
"acc_norm_stderr": 0.031158522131357787
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3614457831325301,
|
53 |
-
"acc_stderr": 0.03740059382029319,
|
54 |
-
"acc_norm": 0.3614457831325301,
|
55 |
-
"acc_norm_stderr": 0.03740059382029319
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.42765273311897106,
|
59 |
-
"acc_stderr": 0.02809924077580956,
|
60 |
-
"acc_norm": 0.42765273311897106,
|
61 |
-
"acc_norm_stderr": 0.02809924077580956
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.36771300448430494,
|
65 |
-
"acc_stderr": 0.03236198350928276,
|
66 |
-
"acc_norm": 0.36771300448430494,
|
67 |
-
"acc_norm_stderr": 0.03236198350928276
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.40458015267175573,
|
71 |
-
"acc_stderr": 0.043046937953806645,
|
72 |
-
"acc_norm": 0.40458015267175573,
|
73 |
-
"acc_norm_stderr": 0.043046937953806645
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.4,
|
77 |
-
"acc_stderr": 0.049236596391733084,
|
78 |
-
"acc_norm": 0.4,
|
79 |
-
"acc_norm_stderr": 0.049236596391733084
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.494949494949495,
|
83 |
-
"acc_stderr": 0.035621707606254015,
|
84 |
-
"acc_norm": 0.494949494949495,
|
85 |
-
"acc_norm_stderr": 0.035621707606254015
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4413793103448276,
|
89 |
-
"acc_stderr": 0.04137931034482758,
|
90 |
-
"acc_norm": 0.4413793103448276,
|
91 |
-
"acc_norm_stderr": 0.04137931034482758
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171453,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171453
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5252100840336135,
|
101 |
-
"acc_stderr": 0.03243718055137411,
|
102 |
-
"acc_norm": 0.5252100840336135,
|
103 |
-
"acc_norm_stderr": 0.03243718055137411
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.45897435897435895,
|
107 |
-
"acc_stderr": 0.025265525491284295,
|
108 |
-
"acc_norm": 0.45897435897435895,
|
109 |
-
"acc_norm_stderr": 0.025265525491284295
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.54,
|
113 |
-
"acc_stderr": 0.05009082659620332,
|
114 |
-
"acc_norm": 0.54,
|
115 |
-
"acc_norm_stderr": 0.05009082659620332
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.29,
|
119 |
-
"acc_stderr": 0.04560480215720684,
|
120 |
-
"acc_norm": 0.29,
|
121 |
-
"acc_norm_stderr": 0.04560480215720684
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5092592592592593,
|
125 |
-
"acc_stderr": 0.04832853553437056,
|
126 |
-
"acc_norm": 0.5092592592592593,
|
127 |
-
"acc_norm_stderr": 0.04832853553437056
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.31527093596059114,
|
131 |
-
"acc_stderr": 0.03269080871970186,
|
132 |
-
"acc_norm": 0.31527093596059114,
|
133 |
-
"acc_norm_stderr": 0.03269080871970186
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4161290322580645,
|
137 |
-
"acc_stderr": 0.028040981380761543,
|
138 |
-
"acc_norm": 0.4161290322580645,
|
139 |
-
"acc_norm_stderr": 0.028040981380761543
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7435897435897436,
|
143 |
-
"acc_stderr": 0.02860595370200424,
|
144 |
-
"acc_norm": 0.7435897435897436,
|
145 |
-
"acc_norm_stderr": 0.02860595370200424
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.44528301886792454,
|
149 |
-
"acc_stderr": 0.030588052974270655,
|
150 |
-
"acc_norm": 0.44528301886792454,
|
151 |
-
"acc_norm_stderr": 0.030588052974270655
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5181818181818182,
|
155 |
-
"acc_stderr": 0.04785964010794916,
|
156 |
-
"acc_norm": 0.5181818181818182,
|
157 |
-
"acc_norm_stderr": 0.04785964010794916
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3111111111111111,
|
161 |
-
"acc_stderr": 0.02822644674968352,
|
162 |
-
"acc_norm": 0.3111111111111111,
|
163 |
-
"acc_norm_stderr": 0.02822644674968352
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3841059602649007,
|
167 |
-
"acc_stderr": 0.03971301814719197,
|
168 |
-
"acc_norm": 0.3841059602649007,
|
169 |
-
"acc_norm_stderr": 0.03971301814719197
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5671641791044776,
|
173 |
-
"acc_stderr": 0.0350349092367328,
|
174 |
-
"acc_norm": 0.5671641791044776,
|
175 |
-
"acc_norm_stderr": 0.0350349092367328
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.42196531791907516,
|
179 |
-
"acc_stderr": 0.037657466938651483,
|
180 |
-
"acc_norm": 0.42196531791907516,
|
181 |
-
"acc_norm_stderr": 0.037657466938651483
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.35714285714285715,
|
185 |
-
"acc_stderr": 0.024677862841332783,
|
186 |
-
"acc_norm": 0.35714285714285715,
|
187 |
-
"acc_norm_stderr": 0.024677862841332783
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3680555555555556,
|
191 |
-
"acc_stderr": 0.040329990539607195,
|
192 |
-
"acc_norm": 0.3680555555555556,
|
193 |
-
"acc_norm_stderr": 0.040329990539607195
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.33,
|
197 |
-
"acc_stderr": 0.04725815626252604,
|
198 |
-
"acc_norm": 0.33,
|
199 |
-
"acc_norm_stderr": 0.04725815626252604
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.49,
|
203 |
-
"acc_stderr": 0.05024183937956913,
|
204 |
-
"acc_norm": 0.49,
|
205 |
-
"acc_norm_stderr": 0.05024183937956913
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.476878612716763,
|
209 |
-
"acc_stderr": 0.026890297881303125,
|
210 |
-
"acc_norm": 0.476878612716763,
|
211 |
-
"acc_norm_stderr": 0.026890297881303125
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4601226993865031,
|
215 |
-
"acc_stderr": 0.03915857291436971,
|
216 |
-
"acc_norm": 0.4601226993865031,
|
217 |
-
"acc_norm_stderr": 0.03915857291436971
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4537037037037037,
|
221 |
-
"acc_stderr": 0.027701228468542602,
|
222 |
-
"acc_norm": 0.4537037037037037,
|
223 |
-
"acc_norm_stderr": 0.027701228468542602
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.04512608598542128,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.04512608598542128
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.49222797927461137,
|
233 |
-
"acc_stderr": 0.03608003225569654,
|
234 |
-
"acc_norm": 0.49222797927461137,
|
235 |
-
"acc_norm_stderr": 0.03608003225569654
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2894736842105263,
|
239 |
-
"acc_stderr": 0.04266339443159394,
|
240 |
-
"acc_norm": 0.2894736842105263,
|
241 |
-
"acc_norm_stderr": 0.04266339443159394
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5045871559633027,
|
245 |
-
"acc_stderr": 0.02143642095552942,
|
246 |
-
"acc_norm": 0.5045871559633027,
|
247 |
-
"acc_norm_stderr": 0.02143642095552942
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3888888888888889,
|
251 |
-
"acc_stderr": 0.04360314860077459,
|
252 |
-
"acc_norm": 0.3888888888888889,
|
253 |
-
"acc_norm_stderr": 0.04360314860077459
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4738562091503268,
|
257 |
-
"acc_stderr": 0.028590752958852394,
|
258 |
-
"acc_norm": 0.4738562091503268,
|
259 |
-
"acc_norm_stderr": 0.028590752958852394
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.49,
|
263 |
-
"acc_stderr": 0.05024183937956911,
|
264 |
-
"acc_norm": 0.49,
|
265 |
-
"acc_norm_stderr": 0.05024183937956911
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5950413223140496,
|
269 |
-
"acc_stderr": 0.04481137755942469,
|
270 |
-
"acc_norm": 0.5950413223140496,
|
271 |
-
"acc_norm_stderr": 0.04481137755942469
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4276315789473684,
|
275 |
-
"acc_stderr": 0.040260970832965565,
|
276 |
-
"acc_norm": 0.4276315789473684,
|
277 |
-
"acc_norm_stderr": 0.040260970832965565
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3562091503267974,
|
281 |
-
"acc_stderr": 0.0193733324207245,
|
282 |
-
"acc_norm": 0.3562091503267974,
|
283 |
-
"acc_norm_stderr": 0.0193733324207245
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.30851063829787234,
|
287 |
-
"acc_stderr": 0.027553366165101376,
|
288 |
-
"acc_norm": 0.30851063829787234,
|
289 |
-
"acc_norm_stderr": 0.027553366165101376
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.3482142857142857,
|
293 |
-
"acc_stderr": 0.045218299028335865,
|
294 |
-
"acc_norm": 0.3482142857142857,
|
295 |
-
"acc_norm_stderr": 0.045218299028335865
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.39814814814814814,
|
299 |
-
"acc_stderr": 0.033384734032074016,
|
300 |
-
"acc_norm": 0.39814814814814814,
|
301 |
-
"acc_norm_stderr": 0.033384734032074016
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.3340782122905028,
|
305 |
-
"acc_stderr": 0.015774911422381632,
|
306 |
-
"acc_norm": 0.3340782122905028,
|
307 |
-
"acc_norm_stderr": 0.015774911422381632
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.41,
|
311 |
-
"acc_stderr": 0.049431107042371025,
|
312 |
-
"acc_norm": 0.41,
|
313 |
-
"acc_norm_stderr": 0.049431107042371025
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.63,
|
317 |
-
"acc_stderr": 0.048523658709391,
|
318 |
-
"acc_norm": 0.63,
|
319 |
-
"acc_norm_stderr": 0.048523658709391
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.36764705882352944,
|
323 |
-
"acc_stderr": 0.02928941340940319,
|
324 |
-
"acc_norm": 0.36764705882352944,
|
325 |
-
"acc_norm_stderr": 0.02928941340940319
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5510204081632653,
|
329 |
-
"acc_stderr": 0.03184213866687578,
|
330 |
-
"acc_norm": 0.5510204081632653,
|
331 |
-
"acc_norm_stderr": 0.03184213866687578
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4978902953586498,
|
335 |
-
"acc_stderr": 0.032546938018020076,
|
336 |
-
"acc_norm": 0.4978902953586498,
|
337 |
-
"acc_norm_stderr": 0.032546938018020076
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3161668839634941,
|
341 |
-
"acc_stderr": 0.01187578089438658,
|
342 |
-
"acc_norm": 0.3161668839634941,
|
343 |
-
"acc_norm_stderr": 0.01187578089438658
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.44607843137254904,
|
347 |
-
"acc_stderr": 0.03488845451304974,
|
348 |
-
"acc_norm": 0.44607843137254904,
|
349 |
-
"acc_norm_stderr": 0.03488845451304974
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3939393939393939,
|
353 |
-
"acc_stderr": 0.03815494308688929,
|
354 |
-
"acc_norm": 0.3939393939393939,
|
355 |
-
"acc_norm_stderr": 0.03815494308688929
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3623011015911873,
|
359 |
-
"mc1_stderr": 0.01682664689726226,
|
360 |
-
"mc2": 0.5291842386279259,
|
361 |
-
"mc2_stderr": 0.01594543639067517
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3400236127508855,
|
365 |
-
"acc_stderr": 0.01628671722073768,
|
366 |
-
"acc_norm": 0.3565525383707202,
|
367 |
-
"acc_norm_stderr": 0.016467706981527448
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "QuantumIntelligence/QI-neural-chat-7B-ko-DPO",
|
436 |
-
"model_sha": "f3fb87056e1937507f8c343da4ace31a1fbaaf68",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Qwen/Qwen1.5-14B-Chat/result_2024-05-13 18:15:27.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.37457337883959047,
|
5 |
-
"acc_stderr": 0.014144193471893452,
|
6 |
-
"acc_norm": 0.41638225255972694,
|
7 |
-
"acc_norm_stderr": 0.014405618279436172
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.38707428799044014,
|
11 |
-
"acc_stderr": 0.004860854240821965,
|
12 |
-
"acc_norm": 0.5027882891854212,
|
13 |
-
"acc_norm_stderr": 0.004989703824167097
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6257309941520468,
|
17 |
-
"acc_stderr": 0.03711601185389481,
|
18 |
-
"acc_norm": 0.6257309941520468,
|
19 |
-
"acc_norm_stderr": 0.03711601185389481
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280041,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280041
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6130268199233716,
|
29 |
-
"acc_stderr": 0.017417138059440125,
|
30 |
-
"acc_norm": 0.6130268199233716,
|
31 |
-
"acc_norm_stderr": 0.017417138059440125
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34814814814814815,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.34814814814814815,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.23,
|
41 |
-
"acc_stderr": 0.04229525846816506,
|
42 |
-
"acc_norm": 0.23,
|
43 |
-
"acc_norm_stderr": 0.04229525846816506
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.5404255319148936,
|
47 |
-
"acc_stderr": 0.032579014820998335,
|
48 |
-
"acc_norm": 0.5404255319148936,
|
49 |
-
"acc_norm_stderr": 0.032579014820998335
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4036144578313253,
|
53 |
-
"acc_stderr": 0.03819486140758396,
|
54 |
-
"acc_norm": 0.4036144578313253,
|
55 |
-
"acc_norm_stderr": 0.03819486140758396
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5594855305466238,
|
59 |
-
"acc_stderr": 0.028196400574197422,
|
60 |
-
"acc_norm": 0.5594855305466238,
|
61 |
-
"acc_norm_stderr": 0.028196400574197422
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.515695067264574,
|
65 |
-
"acc_stderr": 0.0335412657542081,
|
66 |
-
"acc_norm": 0.515695067264574,
|
67 |
-
"acc_norm_stderr": 0.0335412657542081
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5801526717557252,
|
71 |
-
"acc_stderr": 0.04328577215262971,
|
72 |
-
"acc_norm": 0.5801526717557252,
|
73 |
-
"acc_norm_stderr": 0.04328577215262971
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.43,
|
77 |
-
"acc_stderr": 0.049756985195624284,
|
78 |
-
"acc_norm": 0.43,
|
79 |
-
"acc_norm_stderr": 0.049756985195624284
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.702020202020202,
|
83 |
-
"acc_stderr": 0.03258630383836556,
|
84 |
-
"acc_norm": 0.702020202020202,
|
85 |
-
"acc_norm_stderr": 0.03258630383836556
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5517241379310345,
|
89 |
-
"acc_stderr": 0.04144311810878152,
|
90 |
-
"acc_norm": 0.5517241379310345,
|
91 |
-
"acc_norm_stderr": 0.04144311810878152
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.37254901960784315,
|
95 |
-
"acc_stderr": 0.04810840148082636,
|
96 |
-
"acc_norm": 0.37254901960784315,
|
97 |
-
"acc_norm_stderr": 0.04810840148082636
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5798319327731093,
|
101 |
-
"acc_stderr": 0.032061837832361516,
|
102 |
-
"acc_norm": 0.5798319327731093,
|
103 |
-
"acc_norm_stderr": 0.032061837832361516
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5717948717948718,
|
107 |
-
"acc_stderr": 0.025088301454694824,
|
108 |
-
"acc_norm": 0.5717948717948718,
|
109 |
-
"acc_norm_stderr": 0.025088301454694824
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.63,
|
113 |
-
"acc_stderr": 0.048523658709390974,
|
114 |
-
"acc_norm": 0.63,
|
115 |
-
"acc_norm_stderr": 0.048523658709390974
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.44,
|
119 |
-
"acc_stderr": 0.04988876515698589,
|
120 |
-
"acc_norm": 0.44,
|
121 |
-
"acc_norm_stderr": 0.04988876515698589
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6388888888888888,
|
125 |
-
"acc_stderr": 0.04643454608906275,
|
126 |
-
"acc_norm": 0.6388888888888888,
|
127 |
-
"acc_norm_stderr": 0.04643454608906275
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.41379310344827586,
|
131 |
-
"acc_stderr": 0.03465304488406795,
|
132 |
-
"acc_norm": 0.41379310344827586,
|
133 |
-
"acc_norm_stderr": 0.03465304488406795
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.567741935483871,
|
137 |
-
"acc_stderr": 0.02818173972001941,
|
138 |
-
"acc_norm": 0.567741935483871,
|
139 |
-
"acc_norm_stderr": 0.02818173972001941
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7948717948717948,
|
143 |
-
"acc_stderr": 0.026453508054040332,
|
144 |
-
"acc_norm": 0.7948717948717948,
|
145 |
-
"acc_norm_stderr": 0.026453508054040332
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5886792452830188,
|
149 |
-
"acc_stderr": 0.030285009259009787,
|
150 |
-
"acc_norm": 0.5886792452830188,
|
151 |
-
"acc_norm_stderr": 0.030285009259009787
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.6363636363636364,
|
155 |
-
"acc_stderr": 0.04607582090719976,
|
156 |
-
"acc_norm": 0.6363636363636364,
|
157 |
-
"acc_norm_stderr": 0.04607582090719976
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.37407407407407406,
|
161 |
-
"acc_stderr": 0.029502861128955293,
|
162 |
-
"acc_norm": 0.37407407407407406,
|
163 |
-
"acc_norm_stderr": 0.029502861128955293
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3708609271523179,
|
167 |
-
"acc_stderr": 0.039439666991836285,
|
168 |
-
"acc_norm": 0.3708609271523179,
|
169 |
-
"acc_norm_stderr": 0.039439666991836285
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6666666666666666,
|
173 |
-
"acc_stderr": 0.03333333333333333,
|
174 |
-
"acc_norm": 0.6666666666666666,
|
175 |
-
"acc_norm_stderr": 0.03333333333333333
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5375722543352601,
|
179 |
-
"acc_stderr": 0.03801685104524458,
|
180 |
-
"acc_norm": 0.5375722543352601,
|
181 |
-
"acc_norm_stderr": 0.03801685104524458
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.4973544973544973,
|
185 |
-
"acc_stderr": 0.02575094967813038,
|
186 |
-
"acc_norm": 0.4973544973544973,
|
187 |
-
"acc_norm_stderr": 0.02575094967813038
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4236111111111111,
|
191 |
-
"acc_stderr": 0.04132125019723368,
|
192 |
-
"acc_norm": 0.4236111111111111,
|
193 |
-
"acc_norm_stderr": 0.04132125019723368
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.38,
|
197 |
-
"acc_stderr": 0.048783173121456316,
|
198 |
-
"acc_norm": 0.38,
|
199 |
-
"acc_norm_stderr": 0.048783173121456316
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.65,
|
203 |
-
"acc_stderr": 0.04793724854411019,
|
204 |
-
"acc_norm": 0.65,
|
205 |
-
"acc_norm_stderr": 0.04793724854411019
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.6011560693641619,
|
209 |
-
"acc_stderr": 0.026362437574546545,
|
210 |
-
"acc_norm": 0.6011560693641619,
|
211 |
-
"acc_norm_stderr": 0.026362437574546545
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5214723926380368,
|
215 |
-
"acc_stderr": 0.03924746876751129,
|
216 |
-
"acc_norm": 0.5214723926380368,
|
217 |
-
"acc_norm_stderr": 0.03924746876751129
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5524691358024691,
|
221 |
-
"acc_stderr": 0.027667138569422708,
|
222 |
-
"acc_norm": 0.5524691358024691,
|
223 |
-
"acc_norm_stderr": 0.027667138569422708
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.047609522856952365,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.047609522856952365
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6321243523316062,
|
233 |
-
"acc_stderr": 0.034801756684660366,
|
234 |
-
"acc_norm": 0.6321243523316062,
|
235 |
-
"acc_norm_stderr": 0.034801756684660366
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.4473684210526316,
|
239 |
-
"acc_stderr": 0.046774730044912,
|
240 |
-
"acc_norm": 0.4473684210526316,
|
241 |
-
"acc_norm_stderr": 0.046774730044912
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6440366972477064,
|
245 |
-
"acc_stderr": 0.020528559278244214,
|
246 |
-
"acc_norm": 0.6440366972477064,
|
247 |
-
"acc_norm_stderr": 0.020528559278244214
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.47619047619047616,
|
251 |
-
"acc_stderr": 0.04467062628403273,
|
252 |
-
"acc_norm": 0.47619047619047616,
|
253 |
-
"acc_norm_stderr": 0.04467062628403273
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5751633986928104,
|
257 |
-
"acc_stderr": 0.02830457667314111,
|
258 |
-
"acc_norm": 0.5751633986928104,
|
259 |
-
"acc_norm_stderr": 0.02830457667314111
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.61,
|
263 |
-
"acc_stderr": 0.049020713000019756,
|
264 |
-
"acc_norm": 0.61,
|
265 |
-
"acc_norm_stderr": 0.049020713000019756
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.040655781409087044,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.040655781409087044
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.6447368421052632,
|
275 |
-
"acc_stderr": 0.038947344870133176,
|
276 |
-
"acc_norm": 0.6447368421052632,
|
277 |
-
"acc_norm_stderr": 0.038947344870133176
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5098039215686274,
|
281 |
-
"acc_stderr": 0.0202239460050743,
|
282 |
-
"acc_norm": 0.5098039215686274,
|
283 |
-
"acc_norm_stderr": 0.0202239460050743
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.375886524822695,
|
287 |
-
"acc_stderr": 0.028893955412115882,
|
288 |
-
"acc_norm": 0.375886524822695,
|
289 |
-
"acc_norm_stderr": 0.028893955412115882
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4732142857142857,
|
293 |
-
"acc_stderr": 0.04738975119274153,
|
294 |
-
"acc_norm": 0.4732142857142857,
|
295 |
-
"acc_norm_stderr": 0.04738975119274153
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.44907407407407407,
|
299 |
-
"acc_stderr": 0.03392238405321617,
|
300 |
-
"acc_norm": 0.44907407407407407,
|
301 |
-
"acc_norm_stderr": 0.03392238405321617
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.3418994413407821,
|
305 |
-
"acc_stderr": 0.015864506461604654,
|
306 |
-
"acc_norm": 0.3418994413407821,
|
307 |
-
"acc_norm_stderr": 0.015864506461604654
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.47,
|
311 |
-
"acc_stderr": 0.050161355804659205,
|
312 |
-
"acc_norm": 0.47,
|
313 |
-
"acc_norm_stderr": 0.050161355804659205
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.7,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.7,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4485294117647059,
|
323 |
-
"acc_stderr": 0.030211479609121596,
|
324 |
-
"acc_norm": 0.4485294117647059,
|
325 |
-
"acc_norm_stderr": 0.030211479609121596
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6285714285714286,
|
329 |
-
"acc_stderr": 0.03093285879278985,
|
330 |
-
"acc_norm": 0.6285714285714286,
|
331 |
-
"acc_norm_stderr": 0.03093285879278985
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7172995780590717,
|
335 |
-
"acc_stderr": 0.029312814153955934,
|
336 |
-
"acc_norm": 0.7172995780590717,
|
337 |
-
"acc_norm_stderr": 0.029312814153955934
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.36962190352020863,
|
341 |
-
"acc_stderr": 0.01232844577857526,
|
342 |
-
"acc_norm": 0.36962190352020863,
|
343 |
-
"acc_norm_stderr": 0.01232844577857526
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6470588235294118,
|
347 |
-
"acc_stderr": 0.03354092437591519,
|
348 |
-
"acc_norm": 0.6470588235294118,
|
349 |
-
"acc_norm_stderr": 0.03354092437591519
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6424242424242425,
|
353 |
-
"acc_stderr": 0.03742597043806587,
|
354 |
-
"acc_norm": 0.6424242424242425,
|
355 |
-
"acc_norm_stderr": 0.03742597043806587
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3843329253365973,
|
359 |
-
"mc1_stderr": 0.0170287073012452,
|
360 |
-
"mc2": 0.5573073600753922,
|
361 |
-
"mc2_stderr": 0.016173107129410107
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5360094451003542,
|
365 |
-
"acc_stderr": 0.017145715365486657,
|
366 |
-
"acc_norm": 0.5749704840613932,
|
367 |
-
"acc_norm_stderr": 0.016996016308362887
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Qwen/Qwen1.5-14B-Chat",
|
436 |
-
"model_sha": "9492b22871f43e975435455f5c616c77fe7a50ec",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Qwen/Qwen1.5-14B/result_2024-05-13 18:15:41.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.34726962457337884,
|
5 |
-
"acc_stderr": 0.013913034529620437,
|
6 |
-
"acc_norm": 0.3890784982935154,
|
7 |
-
"acc_norm_stderr": 0.014247309976045605
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.37343158733320053,
|
11 |
-
"acc_stderr": 0.004827266662144028,
|
12 |
-
"acc_norm": 0.49203345947022503,
|
13 |
-
"acc_norm_stderr": 0.0049891480106251185
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6198830409356725,
|
17 |
-
"acc_stderr": 0.037229657413855394,
|
18 |
-
"acc_norm": 0.6198830409356725,
|
19 |
-
"acc_norm_stderr": 0.037229657413855394
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280042,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280042
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6040868454661558,
|
29 |
-
"acc_stderr": 0.017488247006979266,
|
30 |
-
"acc_norm": 0.6040868454661558,
|
31 |
-
"acc_norm_stderr": 0.017488247006979266
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.35555555555555557,
|
35 |
-
"acc_stderr": 0.04135176749720386,
|
36 |
-
"acc_norm": 0.35555555555555557,
|
37 |
-
"acc_norm_stderr": 0.04135176749720386
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.31,
|
41 |
-
"acc_stderr": 0.04648231987117316,
|
42 |
-
"acc_norm": 0.31,
|
43 |
-
"acc_norm_stderr": 0.04648231987117316
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.6,
|
47 |
-
"acc_stderr": 0.032025630761017373,
|
48 |
-
"acc_norm": 0.6,
|
49 |
-
"acc_norm_stderr": 0.032025630761017373
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.41566265060240964,
|
53 |
-
"acc_stderr": 0.038367221765980515,
|
54 |
-
"acc_norm": 0.41566265060240964,
|
55 |
-
"acc_norm_stderr": 0.038367221765980515
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5691318327974276,
|
59 |
-
"acc_stderr": 0.028125340983972714,
|
60 |
-
"acc_norm": 0.5691318327974276,
|
61 |
-
"acc_norm_stderr": 0.028125340983972714
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5336322869955157,
|
65 |
-
"acc_stderr": 0.033481800170603065,
|
66 |
-
"acc_norm": 0.5336322869955157,
|
67 |
-
"acc_norm_stderr": 0.033481800170603065
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5877862595419847,
|
71 |
-
"acc_stderr": 0.04317171194870254,
|
72 |
-
"acc_norm": 0.5877862595419847,
|
73 |
-
"acc_norm_stderr": 0.04317171194870254
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.46,
|
77 |
-
"acc_stderr": 0.05009082659620332,
|
78 |
-
"acc_norm": 0.46,
|
79 |
-
"acc_norm_stderr": 0.05009082659620332
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7222222222222222,
|
83 |
-
"acc_stderr": 0.031911782267135445,
|
84 |
-
"acc_norm": 0.7222222222222222,
|
85 |
-
"acc_norm_stderr": 0.031911782267135445
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5448275862068965,
|
89 |
-
"acc_stderr": 0.04149886942192118,
|
90 |
-
"acc_norm": 0.5448275862068965,
|
91 |
-
"acc_norm_stderr": 0.04149886942192118
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3235294117647059,
|
95 |
-
"acc_stderr": 0.04655010411319616,
|
96 |
-
"acc_norm": 0.3235294117647059,
|
97 |
-
"acc_norm_stderr": 0.04655010411319616
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6092436974789915,
|
101 |
-
"acc_stderr": 0.03169380235712997,
|
102 |
-
"acc_norm": 0.6092436974789915,
|
103 |
-
"acc_norm_stderr": 0.03169380235712997
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5717948717948718,
|
107 |
-
"acc_stderr": 0.025088301454694824,
|
108 |
-
"acc_norm": 0.5717948717948718,
|
109 |
-
"acc_norm_stderr": 0.025088301454694824
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.66,
|
113 |
-
"acc_stderr": 0.04760952285695237,
|
114 |
-
"acc_norm": 0.66,
|
115 |
-
"acc_norm_stderr": 0.04760952285695237
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.38,
|
119 |
-
"acc_stderr": 0.048783173121456316,
|
120 |
-
"acc_norm": 0.38,
|
121 |
-
"acc_norm_stderr": 0.048783173121456316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6203703703703703,
|
125 |
-
"acc_stderr": 0.04691521224077742,
|
126 |
-
"acc_norm": 0.6203703703703703,
|
127 |
-
"acc_norm_stderr": 0.04691521224077742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4729064039408867,
|
131 |
-
"acc_stderr": 0.03512819077876106,
|
132 |
-
"acc_norm": 0.4729064039408867,
|
133 |
-
"acc_norm_stderr": 0.03512819077876106
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6258064516129033,
|
137 |
-
"acc_stderr": 0.027528904299845697,
|
138 |
-
"acc_norm": 0.6258064516129033,
|
139 |
-
"acc_norm_stderr": 0.027528904299845697
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7905982905982906,
|
143 |
-
"acc_stderr": 0.026655699653922737,
|
144 |
-
"acc_norm": 0.7905982905982906,
|
145 |
-
"acc_norm_stderr": 0.026655699653922737
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5471698113207547,
|
149 |
-
"acc_stderr": 0.03063562795796182,
|
150 |
-
"acc_norm": 0.5471698113207547,
|
151 |
-
"acc_norm_stderr": 0.03063562795796182
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.6181818181818182,
|
155 |
-
"acc_stderr": 0.046534298079135075,
|
156 |
-
"acc_norm": 0.6181818181818182,
|
157 |
-
"acc_norm_stderr": 0.046534298079135075
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3851851851851852,
|
161 |
-
"acc_stderr": 0.02967090612463088,
|
162 |
-
"acc_norm": 0.3851851851851852,
|
163 |
-
"acc_norm_stderr": 0.02967090612463088
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3576158940397351,
|
167 |
-
"acc_stderr": 0.03913453431177258,
|
168 |
-
"acc_norm": 0.3576158940397351,
|
169 |
-
"acc_norm_stderr": 0.03913453431177258
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6865671641791045,
|
173 |
-
"acc_stderr": 0.03280188205348642,
|
174 |
-
"acc_norm": 0.6865671641791045,
|
175 |
-
"acc_norm_stderr": 0.03280188205348642
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5433526011560693,
|
179 |
-
"acc_stderr": 0.03798106566014498,
|
180 |
-
"acc_norm": 0.5433526011560693,
|
181 |
-
"acc_norm_stderr": 0.03798106566014498
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.49206349206349204,
|
185 |
-
"acc_stderr": 0.025748065871673297,
|
186 |
-
"acc_norm": 0.49206349206349204,
|
187 |
-
"acc_norm_stderr": 0.025748065871673297
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4722222222222222,
|
191 |
-
"acc_stderr": 0.04174752578923185,
|
192 |
-
"acc_norm": 0.4722222222222222,
|
193 |
-
"acc_norm_stderr": 0.04174752578923185
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.43,
|
197 |
-
"acc_stderr": 0.049756985195624284,
|
198 |
-
"acc_norm": 0.43,
|
199 |
-
"acc_norm_stderr": 0.049756985195624284
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.71,
|
203 |
-
"acc_stderr": 0.04560480215720684,
|
204 |
-
"acc_norm": 0.71,
|
205 |
-
"acc_norm_stderr": 0.04560480215720684
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.6127167630057804,
|
209 |
-
"acc_stderr": 0.02622615860512465,
|
210 |
-
"acc_norm": 0.6127167630057804,
|
211 |
-
"acc_norm_stderr": 0.02622615860512465
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.558282208588957,
|
215 |
-
"acc_stderr": 0.03901591825836184,
|
216 |
-
"acc_norm": 0.558282208588957,
|
217 |
-
"acc_norm_stderr": 0.03901591825836184
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5216049382716049,
|
221 |
-
"acc_stderr": 0.02779476010500874,
|
222 |
-
"acc_norm": 0.5216049382716049,
|
223 |
-
"acc_norm_stderr": 0.02779476010500874
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.39,
|
227 |
-
"acc_stderr": 0.04902071300001975,
|
228 |
-
"acc_norm": 0.39,
|
229 |
-
"acc_norm_stderr": 0.04902071300001975
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6321243523316062,
|
233 |
-
"acc_stderr": 0.034801756684660366,
|
234 |
-
"acc_norm": 0.6321243523316062,
|
235 |
-
"acc_norm_stderr": 0.034801756684660366
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.39473684210526316,
|
239 |
-
"acc_stderr": 0.04598188057816542,
|
240 |
-
"acc_norm": 0.39473684210526316,
|
241 |
-
"acc_norm_stderr": 0.04598188057816542
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.673394495412844,
|
245 |
-
"acc_stderr": 0.020106990889937303,
|
246 |
-
"acc_norm": 0.673394495412844,
|
247 |
-
"acc_norm_stderr": 0.020106990889937303
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4603174603174603,
|
251 |
-
"acc_stderr": 0.04458029125470973,
|
252 |
-
"acc_norm": 0.4603174603174603,
|
253 |
-
"acc_norm_stderr": 0.04458029125470973
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.6013071895424836,
|
257 |
-
"acc_stderr": 0.028036092273891776,
|
258 |
-
"acc_norm": 0.6013071895424836,
|
259 |
-
"acc_norm_stderr": 0.028036092273891776
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.62,
|
263 |
-
"acc_stderr": 0.04878317312145632,
|
264 |
-
"acc_norm": 0.62,
|
265 |
-
"acc_norm_stderr": 0.04878317312145632
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.768595041322314,
|
269 |
-
"acc_stderr": 0.03849856098794089,
|
270 |
-
"acc_norm": 0.768595041322314,
|
271 |
-
"acc_norm_stderr": 0.03849856098794089
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5855263157894737,
|
275 |
-
"acc_stderr": 0.04008973785779205,
|
276 |
-
"acc_norm": 0.5855263157894737,
|
277 |
-
"acc_norm_stderr": 0.04008973785779205
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4869281045751634,
|
281 |
-
"acc_stderr": 0.020220920829626916,
|
282 |
-
"acc_norm": 0.4869281045751634,
|
283 |
-
"acc_norm_stderr": 0.020220920829626916
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3829787234042553,
|
287 |
-
"acc_stderr": 0.02899908090480618,
|
288 |
-
"acc_norm": 0.3829787234042553,
|
289 |
-
"acc_norm_stderr": 0.02899908090480618
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4642857142857143,
|
293 |
-
"acc_stderr": 0.04733667890053757,
|
294 |
-
"acc_norm": 0.4642857142857143,
|
295 |
-
"acc_norm_stderr": 0.04733667890053757
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5046296296296297,
|
299 |
-
"acc_stderr": 0.03409825519163572,
|
300 |
-
"acc_norm": 0.5046296296296297,
|
301 |
-
"acc_norm_stderr": 0.03409825519163572
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2759776536312849,
|
305 |
-
"acc_stderr": 0.014950103002475363,
|
306 |
-
"acc_norm": 0.2759776536312849,
|
307 |
-
"acc_norm_stderr": 0.014950103002475363
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.43,
|
311 |
-
"acc_stderr": 0.049756985195624284,
|
312 |
-
"acc_norm": 0.43,
|
313 |
-
"acc_norm_stderr": 0.049756985195624284
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.72,
|
317 |
-
"acc_stderr": 0.045126085985421296,
|
318 |
-
"acc_norm": 0.72,
|
319 |
-
"acc_norm_stderr": 0.045126085985421296
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.47058823529411764,
|
323 |
-
"acc_stderr": 0.030320243265004137,
|
324 |
-
"acc_norm": 0.47058823529411764,
|
325 |
-
"acc_norm_stderr": 0.030320243265004137
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6693877551020408,
|
329 |
-
"acc_stderr": 0.03011642629654061,
|
330 |
-
"acc_norm": 0.6693877551020408,
|
331 |
-
"acc_norm_stderr": 0.03011642629654061
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7215189873417721,
|
335 |
-
"acc_stderr": 0.02917868230484252,
|
336 |
-
"acc_norm": 0.7215189873417721,
|
337 |
-
"acc_norm_stderr": 0.02917868230484252
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3767926988265971,
|
341 |
-
"acc_stderr": 0.012376459593894402,
|
342 |
-
"acc_norm": 0.3767926988265971,
|
343 |
-
"acc_norm_stderr": 0.012376459593894402
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6225490196078431,
|
347 |
-
"acc_stderr": 0.03402272044340703,
|
348 |
-
"acc_norm": 0.6225490196078431,
|
349 |
-
"acc_norm_stderr": 0.03402272044340703
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.696969696969697,
|
353 |
-
"acc_stderr": 0.035886248000917075,
|
354 |
-
"acc_norm": 0.696969696969697,
|
355 |
-
"acc_norm_stderr": 0.035886248000917075
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3292533659730722,
|
359 |
-
"mc1_stderr": 0.016451264440068246,
|
360 |
-
"mc2": 0.48890432930564176,
|
361 |
-
"mc2_stderr": 0.01607466925683564
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5088547815820543,
|
365 |
-
"acc_stderr": 0.017187658199336736,
|
366 |
-
"acc_norm": 0.6092089728453365,
|
367 |
-
"acc_norm_stderr": 0.016775298465108255
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Qwen/Qwen1.5-14B",
|
436 |
-
"model_sha": "dce4b190d34470818e5bec2a92cb8233aaa02ca2",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Qwen/Qwen2-7B-Instruct/result_2024-06-06 17:19:27.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3924914675767918,
|
5 |
-
"acc_stderr": 0.014269634635670712,
|
6 |
-
"acc_norm": 0.4522184300341297,
|
7 |
-
"acc_norm_stderr": 0.014544519880633839
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2504481179047998,
|
11 |
-
"acc_stderr": 0.004323856300539177,
|
12 |
-
"acc_norm": 0.2504481179047998,
|
13 |
-
"acc_norm_stderr": 0.004323856300539177
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6783625730994152,
|
17 |
-
"acc_stderr": 0.03582529442573122,
|
18 |
-
"acc_norm": 0.6783625730994152,
|
19 |
-
"acc_norm_stderr": 0.03582529442573122
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.8058252427184466,
|
23 |
-
"acc_stderr": 0.03916667762822585,
|
24 |
-
"acc_norm": 0.8058252427184466,
|
25 |
-
"acc_norm_stderr": 0.03916667762822585
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6590038314176245,
|
29 |
-
"acc_stderr": 0.01695178138322332,
|
30 |
-
"acc_norm": 0.6590038314176245,
|
31 |
-
"acc_norm_stderr": 0.01695178138322332
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37037037037037035,
|
35 |
-
"acc_stderr": 0.04171654161354544,
|
36 |
-
"acc_norm": 0.37037037037037035,
|
37 |
-
"acc_norm_stderr": 0.04171654161354544
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.38,
|
41 |
-
"acc_stderr": 0.04878317312145633,
|
42 |
-
"acc_norm": 0.38,
|
43 |
-
"acc_norm_stderr": 0.04878317312145633
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.6127659574468085,
|
47 |
-
"acc_stderr": 0.03184389265339525,
|
48 |
-
"acc_norm": 0.6127659574468085,
|
49 |
-
"acc_norm_stderr": 0.03184389265339525
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4578313253012048,
|
53 |
-
"acc_stderr": 0.03878626771002361,
|
54 |
-
"acc_norm": 0.4578313253012048,
|
55 |
-
"acc_norm_stderr": 0.03878626771002361
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6366559485530546,
|
59 |
-
"acc_stderr": 0.027316847674192714,
|
60 |
-
"acc_norm": 0.6366559485530546,
|
61 |
-
"acc_norm_stderr": 0.027316847674192714
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6053811659192825,
|
65 |
-
"acc_stderr": 0.03280400504755291,
|
66 |
-
"acc_norm": 0.6053811659192825,
|
67 |
-
"acc_norm_stderr": 0.03280400504755291
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.6183206106870229,
|
71 |
-
"acc_stderr": 0.0426073515764456,
|
72 |
-
"acc_norm": 0.6183206106870229,
|
73 |
-
"acc_norm_stderr": 0.0426073515764456
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.57,
|
77 |
-
"acc_stderr": 0.04975698519562428,
|
78 |
-
"acc_norm": 0.57,
|
79 |
-
"acc_norm_stderr": 0.04975698519562428
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7222222222222222,
|
83 |
-
"acc_stderr": 0.03191178226713547,
|
84 |
-
"acc_norm": 0.7222222222222222,
|
85 |
-
"acc_norm_stderr": 0.03191178226713547
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.6275862068965518,
|
89 |
-
"acc_stderr": 0.0402873153294756,
|
90 |
-
"acc_norm": 0.6275862068965518,
|
91 |
-
"acc_norm_stderr": 0.0402873153294756
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.29411764705882354,
|
95 |
-
"acc_stderr": 0.04533838195929778,
|
96 |
-
"acc_norm": 0.29411764705882354,
|
97 |
-
"acc_norm_stderr": 0.04533838195929778
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6638655462184874,
|
101 |
-
"acc_stderr": 0.030684737115135353,
|
102 |
-
"acc_norm": 0.6638655462184874,
|
103 |
-
"acc_norm_stderr": 0.030684737115135353
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.6641025641025641,
|
107 |
-
"acc_stderr": 0.023946724741563986,
|
108 |
-
"acc_norm": 0.6641025641025641,
|
109 |
-
"acc_norm_stderr": 0.023946724741563986
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.69,
|
113 |
-
"acc_stderr": 0.046482319871173156,
|
114 |
-
"acc_norm": 0.69,
|
115 |
-
"acc_norm_stderr": 0.046482319871173156
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.7129629629629629,
|
125 |
-
"acc_stderr": 0.043733130409147614,
|
126 |
-
"acc_norm": 0.7129629629629629,
|
127 |
-
"acc_norm_stderr": 0.043733130409147614
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.5221674876847291,
|
131 |
-
"acc_stderr": 0.035145285621750066,
|
132 |
-
"acc_norm": 0.5221674876847291,
|
133 |
-
"acc_norm_stderr": 0.035145285621750066
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.635483870967742,
|
137 |
-
"acc_stderr": 0.027379871229943252,
|
138 |
-
"acc_norm": 0.635483870967742,
|
139 |
-
"acc_norm_stderr": 0.027379871229943252
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8162393162393162,
|
143 |
-
"acc_stderr": 0.02537213967172293,
|
144 |
-
"acc_norm": 0.8162393162393162,
|
145 |
-
"acc_norm_stderr": 0.02537213967172293
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.6150943396226415,
|
149 |
-
"acc_stderr": 0.029946498567699948,
|
150 |
-
"acc_norm": 0.6150943396226415,
|
151 |
-
"acc_norm_stderr": 0.029946498567699948
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5545454545454546,
|
155 |
-
"acc_stderr": 0.047605488214603246,
|
156 |
-
"acc_norm": 0.5545454545454546,
|
157 |
-
"acc_norm_stderr": 0.047605488214603246
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.5037037037037037,
|
161 |
-
"acc_stderr": 0.030484701665084362,
|
162 |
-
"acc_norm": 0.5037037037037037,
|
163 |
-
"acc_norm_stderr": 0.030484701665084362
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3509933774834437,
|
167 |
-
"acc_stderr": 0.03896981964257375,
|
168 |
-
"acc_norm": 0.3509933774834437,
|
169 |
-
"acc_norm_stderr": 0.03896981964257375
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7711442786069652,
|
173 |
-
"acc_stderr": 0.029705284056772443,
|
174 |
-
"acc_norm": 0.7711442786069652,
|
175 |
-
"acc_norm_stderr": 0.029705284056772443
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5491329479768786,
|
179 |
-
"acc_stderr": 0.03794012674697029,
|
180 |
-
"acc_norm": 0.5491329479768786,
|
181 |
-
"acc_norm_stderr": 0.03794012674697029
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.582010582010582,
|
185 |
-
"acc_stderr": 0.02540255550326091,
|
186 |
-
"acc_norm": 0.582010582010582,
|
187 |
-
"acc_norm_stderr": 0.02540255550326091
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5416666666666666,
|
191 |
-
"acc_stderr": 0.041666666666666664,
|
192 |
-
"acc_norm": 0.5416666666666666,
|
193 |
-
"acc_norm_stderr": 0.041666666666666664
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.42,
|
197 |
-
"acc_stderr": 0.04960449637488584,
|
198 |
-
"acc_norm": 0.42,
|
199 |
-
"acc_norm_stderr": 0.04960449637488584
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.72,
|
203 |
-
"acc_stderr": 0.04512608598542127,
|
204 |
-
"acc_norm": 0.72,
|
205 |
-
"acc_norm_stderr": 0.04512608598542127
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.6329479768786127,
|
209 |
-
"acc_stderr": 0.025950054337654085,
|
210 |
-
"acc_norm": 0.6329479768786127,
|
211 |
-
"acc_norm_stderr": 0.025950054337654085
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.6073619631901841,
|
215 |
-
"acc_stderr": 0.0383674090783103,
|
216 |
-
"acc_norm": 0.6073619631901841,
|
217 |
-
"acc_norm_stderr": 0.0383674090783103
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6203703703703703,
|
221 |
-
"acc_stderr": 0.027002521034516478,
|
222 |
-
"acc_norm": 0.6203703703703703,
|
223 |
-
"acc_norm_stderr": 0.027002521034516478
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.42,
|
227 |
-
"acc_stderr": 0.049604496374885836,
|
228 |
-
"acc_norm": 0.42,
|
229 |
-
"acc_norm_stderr": 0.049604496374885836
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6580310880829016,
|
233 |
-
"acc_stderr": 0.03423465100104284,
|
234 |
-
"acc_norm": 0.6580310880829016,
|
235 |
-
"acc_norm_stderr": 0.03423465100104284
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.49122807017543857,
|
239 |
-
"acc_stderr": 0.047028804320496165,
|
240 |
-
"acc_norm": 0.49122807017543857,
|
241 |
-
"acc_norm_stderr": 0.047028804320496165
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6880733944954128,
|
245 |
-
"acc_stderr": 0.019862967976707245,
|
246 |
-
"acc_norm": 0.6880733944954128,
|
247 |
-
"acc_norm_stderr": 0.019862967976707245
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.5238095238095238,
|
251 |
-
"acc_stderr": 0.04467062628403273,
|
252 |
-
"acc_norm": 0.5238095238095238,
|
253 |
-
"acc_norm_stderr": 0.04467062628403273
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5915032679738562,
|
257 |
-
"acc_stderr": 0.028146405993096358,
|
258 |
-
"acc_norm": 0.5915032679738562,
|
259 |
-
"acc_norm_stderr": 0.028146405993096358
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.68,
|
263 |
-
"acc_stderr": 0.046882617226215034,
|
264 |
-
"acc_norm": 0.68,
|
265 |
-
"acc_norm_stderr": 0.046882617226215034
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7933884297520661,
|
269 |
-
"acc_stderr": 0.03695980128098824,
|
270 |
-
"acc_norm": 0.7933884297520661,
|
271 |
-
"acc_norm_stderr": 0.03695980128098824
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.7105263157894737,
|
275 |
-
"acc_stderr": 0.036906779861372814,
|
276 |
-
"acc_norm": 0.7105263157894737,
|
277 |
-
"acc_norm_stderr": 0.036906779861372814
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5261437908496732,
|
281 |
-
"acc_stderr": 0.020200164564804588,
|
282 |
-
"acc_norm": 0.5261437908496732,
|
283 |
-
"acc_norm_stderr": 0.020200164564804588
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.4574468085106383,
|
287 |
-
"acc_stderr": 0.02971928127223684,
|
288 |
-
"acc_norm": 0.4574468085106383,
|
289 |
-
"acc_norm_stderr": 0.02971928127223684
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4642857142857143,
|
293 |
-
"acc_stderr": 0.04733667890053756,
|
294 |
-
"acc_norm": 0.4642857142857143,
|
295 |
-
"acc_norm_stderr": 0.04733667890053756
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5694444444444444,
|
299 |
-
"acc_stderr": 0.03376922151252336,
|
300 |
-
"acc_norm": 0.5694444444444444,
|
301 |
-
"acc_norm_stderr": 0.03376922151252336
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.4301675977653631,
|
305 |
-
"acc_stderr": 0.01655860163604103,
|
306 |
-
"acc_norm": 0.4301675977653631,
|
307 |
-
"acc_norm_stderr": 0.01655860163604103
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.55,
|
311 |
-
"acc_stderr": 0.04999999999999999,
|
312 |
-
"acc_norm": 0.55,
|
313 |
-
"acc_norm_stderr": 0.04999999999999999
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.71,
|
317 |
-
"acc_stderr": 0.045604802157206845,
|
318 |
-
"acc_norm": 0.71,
|
319 |
-
"acc_norm_stderr": 0.045604802157206845
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5036764705882353,
|
323 |
-
"acc_stderr": 0.030372015885428195,
|
324 |
-
"acc_norm": 0.5036764705882353,
|
325 |
-
"acc_norm_stderr": 0.030372015885428195
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6326530612244898,
|
329 |
-
"acc_stderr": 0.03086214492108757,
|
330 |
-
"acc_norm": 0.6326530612244898,
|
331 |
-
"acc_norm_stderr": 0.03086214492108757
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7341772151898734,
|
335 |
-
"acc_stderr": 0.02875679962965834,
|
336 |
-
"acc_norm": 0.7341772151898734,
|
337 |
-
"acc_norm_stderr": 0.02875679962965834
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.39504563233376794,
|
341 |
-
"acc_stderr": 0.012485727813251565,
|
342 |
-
"acc_norm": 0.39504563233376794,
|
343 |
-
"acc_norm_stderr": 0.012485727813251565
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6911764705882353,
|
347 |
-
"acc_stderr": 0.03242661719827218,
|
348 |
-
"acc_norm": 0.6911764705882353,
|
349 |
-
"acc_norm_stderr": 0.03242661719827218
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.696969696969697,
|
353 |
-
"acc_stderr": 0.03588624800091708,
|
354 |
-
"acc_norm": 0.696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03588624800091708
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3880048959608323,
|
359 |
-
"mc1_stderr": 0.017058761501347962,
|
360 |
-
"mc2": 0.5601057396457566,
|
361 |
-
"mc2_stderr": 0.01603029702389325
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5525383707201889,
|
365 |
-
"acc_stderr": 0.01709519030150058,
|
366 |
-
"acc_norm": 0.5714285714285714,
|
367 |
-
"acc_norm_stderr": 0.01701403811929749
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Qwen/Qwen2-7B-Instruct",
|
436 |
-
"model_sha": "41c66b0be1c3081f13defc6bdf946c2ef240d6a6",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Qwen/Qwen2-7B/result_2024-06-06 17:19:35.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3651877133105802,
|
5 |
-
"acc_stderr": 0.014070265519268804,
|
6 |
-
"acc_norm": 0.42406143344709896,
|
7 |
-
"acc_norm_stderr": 0.01444188962746439
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.38856801433977295,
|
11 |
-
"acc_stderr": 0.004864286176731831,
|
12 |
-
"acc_norm": 0.5241983668591914,
|
13 |
-
"acc_norm_stderr": 0.004983934343250459
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6783625730994152,
|
17 |
-
"acc_stderr": 0.03582529442573122,
|
18 |
-
"acc_norm": 0.6783625730994152,
|
19 |
-
"acc_norm_stderr": 0.03582529442573122
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.7864077669902912,
|
23 |
-
"acc_stderr": 0.040580420156460364,
|
24 |
-
"acc_norm": 0.7864077669902912,
|
25 |
-
"acc_norm_stderr": 0.040580420156460364
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.669220945083014,
|
29 |
-
"acc_stderr": 0.016824818462563756,
|
30 |
-
"acc_norm": 0.669220945083014,
|
31 |
-
"acc_norm_stderr": 0.016824818462563756
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4,
|
35 |
-
"acc_stderr": 0.04232073695151589,
|
36 |
-
"acc_norm": 0.4,
|
37 |
-
"acc_norm_stderr": 0.04232073695151589
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.36,
|
41 |
-
"acc_stderr": 0.04824181513244218,
|
42 |
-
"acc_norm": 0.36,
|
43 |
-
"acc_norm_stderr": 0.04824181513244218
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.6127659574468085,
|
47 |
-
"acc_stderr": 0.03184389265339525,
|
48 |
-
"acc_norm": 0.6127659574468085,
|
49 |
-
"acc_norm_stderr": 0.03184389265339525
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.536144578313253,
|
53 |
-
"acc_stderr": 0.03882310850890593,
|
54 |
-
"acc_norm": 0.536144578313253,
|
55 |
-
"acc_norm_stderr": 0.03882310850890593
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.662379421221865,
|
59 |
-
"acc_stderr": 0.026858825879488554,
|
60 |
-
"acc_norm": 0.662379421221865,
|
61 |
-
"acc_norm_stderr": 0.026858825879488554
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6233183856502242,
|
65 |
-
"acc_stderr": 0.032521134899291884,
|
66 |
-
"acc_norm": 0.6233183856502242,
|
67 |
-
"acc_norm_stderr": 0.032521134899291884
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.648854961832061,
|
71 |
-
"acc_stderr": 0.0418644516301375,
|
72 |
-
"acc_norm": 0.648854961832061,
|
73 |
-
"acc_norm_stderr": 0.0418644516301375
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.53,
|
77 |
-
"acc_stderr": 0.050161355804659205,
|
78 |
-
"acc_norm": 0.53,
|
79 |
-
"acc_norm_stderr": 0.050161355804659205
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7525252525252525,
|
83 |
-
"acc_stderr": 0.030746300742124505,
|
84 |
-
"acc_norm": 0.7525252525252525,
|
85 |
-
"acc_norm_stderr": 0.030746300742124505
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.6758620689655173,
|
89 |
-
"acc_stderr": 0.039004320691855554,
|
90 |
-
"acc_norm": 0.6758620689655173,
|
91 |
-
"acc_norm_stderr": 0.039004320691855554
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3333333333333333,
|
95 |
-
"acc_stderr": 0.04690650298201942,
|
96 |
-
"acc_norm": 0.3333333333333333,
|
97 |
-
"acc_norm_stderr": 0.04690650298201942
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6890756302521008,
|
101 |
-
"acc_stderr": 0.03006676158297794,
|
102 |
-
"acc_norm": 0.6890756302521008,
|
103 |
-
"acc_norm_stderr": 0.03006676158297794
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.6435897435897436,
|
107 |
-
"acc_stderr": 0.02428314052946731,
|
108 |
-
"acc_norm": 0.6435897435897436,
|
109 |
-
"acc_norm_stderr": 0.02428314052946731
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.72,
|
113 |
-
"acc_stderr": 0.045126085985421296,
|
114 |
-
"acc_norm": 0.72,
|
115 |
-
"acc_norm_stderr": 0.045126085985421296
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.7129629629629629,
|
125 |
-
"acc_stderr": 0.043733130409147614,
|
126 |
-
"acc_norm": 0.7129629629629629,
|
127 |
-
"acc_norm_stderr": 0.043733130409147614
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.5467980295566502,
|
131 |
-
"acc_stderr": 0.03502544650845872,
|
132 |
-
"acc_norm": 0.5467980295566502,
|
133 |
-
"acc_norm_stderr": 0.03502544650845872
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6741935483870968,
|
137 |
-
"acc_stderr": 0.026662010578567097,
|
138 |
-
"acc_norm": 0.6741935483870968,
|
139 |
-
"acc_norm_stderr": 0.026662010578567097
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8205128205128205,
|
143 |
-
"acc_stderr": 0.02514093595033544,
|
144 |
-
"acc_norm": 0.8205128205128205,
|
145 |
-
"acc_norm_stderr": 0.02514093595033544
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.6377358490566037,
|
149 |
-
"acc_stderr": 0.029582245128384303,
|
150 |
-
"acc_norm": 0.6377358490566037,
|
151 |
-
"acc_norm_stderr": 0.029582245128384303
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5909090909090909,
|
155 |
-
"acc_stderr": 0.04709306978661896,
|
156 |
-
"acc_norm": 0.5909090909090909,
|
157 |
-
"acc_norm_stderr": 0.04709306978661896
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.4703703703703704,
|
161 |
-
"acc_stderr": 0.030431963547936577,
|
162 |
-
"acc_norm": 0.4703703703703704,
|
163 |
-
"acc_norm_stderr": 0.030431963547936577
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3443708609271523,
|
167 |
-
"acc_stderr": 0.038796870240733264,
|
168 |
-
"acc_norm": 0.3443708609271523,
|
169 |
-
"acc_norm_stderr": 0.038796870240733264
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7512437810945274,
|
173 |
-
"acc_stderr": 0.030567675938916714,
|
174 |
-
"acc_norm": 0.7512437810945274,
|
175 |
-
"acc_norm_stderr": 0.030567675938916714
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5491329479768786,
|
179 |
-
"acc_stderr": 0.037940126746970296,
|
180 |
-
"acc_norm": 0.5491329479768786,
|
181 |
-
"acc_norm_stderr": 0.037940126746970296
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.5846560846560847,
|
185 |
-
"acc_stderr": 0.02537952491077839,
|
186 |
-
"acc_norm": 0.5846560846560847,
|
187 |
-
"acc_norm_stderr": 0.02537952491077839
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5069444444444444,
|
191 |
-
"acc_stderr": 0.04180806750294939,
|
192 |
-
"acc_norm": 0.5069444444444444,
|
193 |
-
"acc_norm_stderr": 0.04180806750294939
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.47,
|
197 |
-
"acc_stderr": 0.05016135580465919,
|
198 |
-
"acc_norm": 0.47,
|
199 |
-
"acc_norm_stderr": 0.05016135580465919
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.73,
|
203 |
-
"acc_stderr": 0.0446196043338474,
|
204 |
-
"acc_norm": 0.73,
|
205 |
-
"acc_norm_stderr": 0.0446196043338474
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.6445086705202312,
|
209 |
-
"acc_stderr": 0.025770292082977254,
|
210 |
-
"acc_norm": 0.6445086705202312,
|
211 |
-
"acc_norm_stderr": 0.025770292082977254
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.6012269938650306,
|
215 |
-
"acc_stderr": 0.03847021420456024,
|
216 |
-
"acc_norm": 0.6012269938650306,
|
217 |
-
"acc_norm_stderr": 0.03847021420456024
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6327160493827161,
|
221 |
-
"acc_stderr": 0.026822801759507894,
|
222 |
-
"acc_norm": 0.6327160493827161,
|
223 |
-
"acc_norm_stderr": 0.026822801759507894
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.41,
|
227 |
-
"acc_stderr": 0.049431107042371025,
|
228 |
-
"acc_norm": 0.41,
|
229 |
-
"acc_norm_stderr": 0.049431107042371025
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6476683937823834,
|
233 |
-
"acc_stderr": 0.03447478286414357,
|
234 |
-
"acc_norm": 0.6476683937823834,
|
235 |
-
"acc_norm_stderr": 0.03447478286414357
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.4473684210526316,
|
239 |
-
"acc_stderr": 0.04677473004491199,
|
240 |
-
"acc_norm": 0.4473684210526316,
|
241 |
-
"acc_norm_stderr": 0.04677473004491199
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7009174311926606,
|
245 |
-
"acc_stderr": 0.01963041728541517,
|
246 |
-
"acc_norm": 0.7009174311926606,
|
247 |
-
"acc_norm_stderr": 0.01963041728541517
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.49206349206349204,
|
251 |
-
"acc_stderr": 0.044715725362943486,
|
252 |
-
"acc_norm": 0.49206349206349204,
|
253 |
-
"acc_norm_stderr": 0.044715725362943486
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.6111111111111112,
|
257 |
-
"acc_stderr": 0.027914055510468,
|
258 |
-
"acc_norm": 0.6111111111111112,
|
259 |
-
"acc_norm_stderr": 0.027914055510468
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.7,
|
263 |
-
"acc_stderr": 0.046056618647183814,
|
264 |
-
"acc_norm": 0.7,
|
265 |
-
"acc_norm_stderr": 0.046056618647183814
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7603305785123967,
|
269 |
-
"acc_stderr": 0.03896878985070416,
|
270 |
-
"acc_norm": 0.7603305785123967,
|
271 |
-
"acc_norm_stderr": 0.03896878985070416
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.7171052631578947,
|
275 |
-
"acc_stderr": 0.03665349695640767,
|
276 |
-
"acc_norm": 0.7171052631578947,
|
277 |
-
"acc_norm_stderr": 0.03665349695640767
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5359477124183006,
|
281 |
-
"acc_stderr": 0.020175488765484043,
|
282 |
-
"acc_norm": 0.5359477124183006,
|
283 |
-
"acc_norm_stderr": 0.020175488765484043
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.475177304964539,
|
287 |
-
"acc_stderr": 0.029790719243829707,
|
288 |
-
"acc_norm": 0.475177304964539,
|
289 |
-
"acc_norm_stderr": 0.029790719243829707
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4375,
|
293 |
-
"acc_stderr": 0.04708567521880525,
|
294 |
-
"acc_norm": 0.4375,
|
295 |
-
"acc_norm_stderr": 0.04708567521880525
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5370370370370371,
|
299 |
-
"acc_stderr": 0.03400603625538272,
|
300 |
-
"acc_norm": 0.5370370370370371,
|
301 |
-
"acc_norm_stderr": 0.03400603625538272
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.4022346368715084,
|
305 |
-
"acc_stderr": 0.016399716732847146,
|
306 |
-
"acc_norm": 0.4022346368715084,
|
307 |
-
"acc_norm_stderr": 0.016399716732847146
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.57,
|
311 |
-
"acc_stderr": 0.04975698519562428,
|
312 |
-
"acc_norm": 0.57,
|
313 |
-
"acc_norm_stderr": 0.04975698519562428
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.76,
|
317 |
-
"acc_stderr": 0.042923469599092816,
|
318 |
-
"acc_norm": 0.76,
|
319 |
-
"acc_norm_stderr": 0.042923469599092816
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5772058823529411,
|
323 |
-
"acc_stderr": 0.030008562845003476,
|
324 |
-
"acc_norm": 0.5772058823529411,
|
325 |
-
"acc_norm_stderr": 0.030008562845003476
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.673469387755102,
|
329 |
-
"acc_stderr": 0.030021056238440317,
|
330 |
-
"acc_norm": 0.673469387755102,
|
331 |
-
"acc_norm_stderr": 0.030021056238440317
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7383966244725738,
|
335 |
-
"acc_stderr": 0.028609516716994934,
|
336 |
-
"acc_norm": 0.7383966244725738,
|
337 |
-
"acc_norm_stderr": 0.028609516716994934
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.42698826597131684,
|
341 |
-
"acc_stderr": 0.012633353557534423,
|
342 |
-
"acc_norm": 0.42698826597131684,
|
343 |
-
"acc_norm_stderr": 0.012633353557534423
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.7303921568627451,
|
347 |
-
"acc_stderr": 0.03114557065948678,
|
348 |
-
"acc_norm": 0.7303921568627451,
|
349 |
-
"acc_norm_stderr": 0.03114557065948678
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.7212121212121212,
|
353 |
-
"acc_stderr": 0.03501438706296781,
|
354 |
-
"acc_norm": 0.7212121212121212,
|
355 |
-
"acc_norm_stderr": 0.03501438706296781
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3219094247246022,
|
359 |
-
"mc1_stderr": 0.016355567611960383,
|
360 |
-
"mc2": 0.49264334051888825,
|
361 |
-
"mc2_stderr": 0.015667716428599748
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.538370720188902,
|
365 |
-
"acc_stderr": 0.01713966022184556,
|
366 |
-
"acc_norm": 0.6115702479338843,
|
367 |
-
"acc_norm_stderr": 0.016756921571069422
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Qwen/Qwen2-7B",
|
436 |
-
"model_sha": "453ed1575b739b5b03ce3758b23befdb0967f40e",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
RLHFlow/LLaMA3-iterative-DPO-final/result_2024-06-05 14:46:44.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4104095563139932,
|
5 |
-
"acc_stderr": 0.014374922192642667,
|
6 |
-
"acc_norm": 0.4709897610921502,
|
7 |
-
"acc_norm_stderr": 0.014586776355294307
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4032065325632344,
|
11 |
-
"acc_stderr": 0.004895390341445628,
|
12 |
-
"acc_norm": 0.5456084445329615,
|
13 |
-
"acc_norm_stderr": 0.0049689792597383325
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6549707602339181,
|
17 |
-
"acc_stderr": 0.03645981377388807,
|
18 |
-
"acc_norm": 0.6549707602339181,
|
19 |
-
"acc_norm_stderr": 0.03645981377388807
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6699029126213593,
|
23 |
-
"acc_stderr": 0.0465614711001235,
|
24 |
-
"acc_norm": 0.6699029126213593,
|
25 |
-
"acc_norm_stderr": 0.0465614711001235
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5555555555555556,
|
29 |
-
"acc_stderr": 0.017769250583533253,
|
30 |
-
"acc_norm": 0.5555555555555556,
|
31 |
-
"acc_norm_stderr": 0.017769250583533253
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3851851851851852,
|
35 |
-
"acc_stderr": 0.042039210401562783,
|
36 |
-
"acc_norm": 0.3851851851851852,
|
37 |
-
"acc_norm_stderr": 0.042039210401562783
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4851063829787234,
|
47 |
-
"acc_stderr": 0.032671518489247764,
|
48 |
-
"acc_norm": 0.4851063829787234,
|
49 |
-
"acc_norm_stderr": 0.032671518489247764
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.39156626506024095,
|
53 |
-
"acc_stderr": 0.03799857454479636,
|
54 |
-
"acc_norm": 0.39156626506024095,
|
55 |
-
"acc_norm_stderr": 0.03799857454479636
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5498392282958199,
|
59 |
-
"acc_stderr": 0.028256660723360177,
|
60 |
-
"acc_norm": 0.5498392282958199,
|
61 |
-
"acc_norm_stderr": 0.028256660723360177
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.48878923766816146,
|
65 |
-
"acc_stderr": 0.033549366530984746,
|
66 |
-
"acc_norm": 0.48878923766816146,
|
67 |
-
"acc_norm_stderr": 0.033549366530984746
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.549618320610687,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.549618320610687,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.43,
|
77 |
-
"acc_stderr": 0.049756985195624284,
|
78 |
-
"acc_norm": 0.43,
|
79 |
-
"acc_norm_stderr": 0.049756985195624284
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.601010101010101,
|
83 |
-
"acc_stderr": 0.03488901616852729,
|
84 |
-
"acc_norm": 0.601010101010101,
|
85 |
-
"acc_norm_stderr": 0.03488901616852729
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5103448275862069,
|
89 |
-
"acc_stderr": 0.04165774775728762,
|
90 |
-
"acc_norm": 0.5103448275862069,
|
91 |
-
"acc_norm_stderr": 0.04165774775728762
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.38235294117647056,
|
95 |
-
"acc_stderr": 0.04835503696107223,
|
96 |
-
"acc_norm": 0.38235294117647056,
|
97 |
-
"acc_norm_stderr": 0.04835503696107223
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5546218487394958,
|
101 |
-
"acc_stderr": 0.03228410626716391,
|
102 |
-
"acc_norm": 0.5546218487394958,
|
103 |
-
"acc_norm_stderr": 0.03228410626716391
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5128205128205128,
|
107 |
-
"acc_stderr": 0.025342671293807247,
|
108 |
-
"acc_norm": 0.5128205128205128,
|
109 |
-
"acc_norm_stderr": 0.025342671293807247
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.62,
|
113 |
-
"acc_stderr": 0.0487831731214563,
|
114 |
-
"acc_norm": 0.62,
|
115 |
-
"acc_norm_stderr": 0.0487831731214563
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.04824181513244218,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.04824181513244218
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6203703703703703,
|
125 |
-
"acc_stderr": 0.04691521224077742,
|
126 |
-
"acc_norm": 0.6203703703703703,
|
127 |
-
"acc_norm_stderr": 0.04691521224077742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4187192118226601,
|
131 |
-
"acc_stderr": 0.034711928605184676,
|
132 |
-
"acc_norm": 0.4187192118226601,
|
133 |
-
"acc_norm_stderr": 0.034711928605184676
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5225806451612903,
|
137 |
-
"acc_stderr": 0.02841498501970786,
|
138 |
-
"acc_norm": 0.5225806451612903,
|
139 |
-
"acc_norm_stderr": 0.02841498501970786
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7521367521367521,
|
143 |
-
"acc_stderr": 0.028286324075564407,
|
144 |
-
"acc_norm": 0.7521367521367521,
|
145 |
-
"acc_norm_stderr": 0.028286324075564407
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5283018867924528,
|
149 |
-
"acc_stderr": 0.030723535249006107,
|
150 |
-
"acc_norm": 0.5283018867924528,
|
151 |
-
"acc_norm_stderr": 0.030723535249006107
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5727272727272728,
|
155 |
-
"acc_stderr": 0.047381987035454834,
|
156 |
-
"acc_norm": 0.5727272727272728,
|
157 |
-
"acc_norm_stderr": 0.047381987035454834
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3962962962962963,
|
161 |
-
"acc_stderr": 0.029822619458533997,
|
162 |
-
"acc_norm": 0.3962962962962963,
|
163 |
-
"acc_norm_stderr": 0.029822619458533997
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6616915422885572,
|
173 |
-
"acc_stderr": 0.03345563070339191,
|
174 |
-
"acc_norm": 0.6616915422885572,
|
175 |
-
"acc_norm_stderr": 0.03345563070339191
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4624277456647399,
|
179 |
-
"acc_stderr": 0.038016851045244604,
|
180 |
-
"acc_norm": 0.4624277456647399,
|
181 |
-
"acc_norm_stderr": 0.038016851045244604
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.42328042328042326,
|
185 |
-
"acc_stderr": 0.025446365634406783,
|
186 |
-
"acc_norm": 0.42328042328042326,
|
187 |
-
"acc_norm_stderr": 0.025446365634406783
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4930555555555556,
|
191 |
-
"acc_stderr": 0.04180806750294938,
|
192 |
-
"acc_norm": 0.4930555555555556,
|
193 |
-
"acc_norm_stderr": 0.04180806750294938
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.45,
|
197 |
-
"acc_stderr": 0.049999999999999996,
|
198 |
-
"acc_norm": 0.45,
|
199 |
-
"acc_norm_stderr": 0.049999999999999996
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.67,
|
203 |
-
"acc_stderr": 0.04725815626252606,
|
204 |
-
"acc_norm": 0.67,
|
205 |
-
"acc_norm_stderr": 0.04725815626252606
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5751445086705202,
|
209 |
-
"acc_stderr": 0.026613350840261733,
|
210 |
-
"acc_norm": 0.5751445086705202,
|
211 |
-
"acc_norm_stderr": 0.026613350840261733
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4785276073619632,
|
215 |
-
"acc_stderr": 0.03924746876751129,
|
216 |
-
"acc_norm": 0.4785276073619632,
|
217 |
-
"acc_norm_stderr": 0.03924746876751129
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5493827160493827,
|
221 |
-
"acc_stderr": 0.027684721415656196,
|
222 |
-
"acc_norm": 0.5493827160493827,
|
223 |
-
"acc_norm_stderr": 0.027684721415656196
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.047937248544110196,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.047937248544110196
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6010362694300518,
|
233 |
-
"acc_stderr": 0.03533999094065696,
|
234 |
-
"acc_norm": 0.6010362694300518,
|
235 |
-
"acc_norm_stderr": 0.03533999094065696
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.40350877192982454,
|
239 |
-
"acc_stderr": 0.04615186962583703,
|
240 |
-
"acc_norm": 0.40350877192982454,
|
241 |
-
"acc_norm_stderr": 0.04615186962583703
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6128440366972477,
|
245 |
-
"acc_stderr": 0.02088423199264345,
|
246 |
-
"acc_norm": 0.6128440366972477,
|
247 |
-
"acc_norm_stderr": 0.02088423199264345
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4444444444444444,
|
251 |
-
"acc_stderr": 0.044444444444444495,
|
252 |
-
"acc_norm": 0.4444444444444444,
|
253 |
-
"acc_norm_stderr": 0.044444444444444495
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5522875816993464,
|
257 |
-
"acc_stderr": 0.028472938478033522,
|
258 |
-
"acc_norm": 0.5522875816993464,
|
259 |
-
"acc_norm_stderr": 0.028472938478033522
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.58,
|
263 |
-
"acc_stderr": 0.049604496374885836,
|
264 |
-
"acc_norm": 0.58,
|
265 |
-
"acc_norm_stderr": 0.049604496374885836
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7107438016528925,
|
269 |
-
"acc_stderr": 0.041391127276354626,
|
270 |
-
"acc_norm": 0.7107438016528925,
|
271 |
-
"acc_norm_stderr": 0.041391127276354626
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5526315789473685,
|
275 |
-
"acc_stderr": 0.0404633688397825,
|
276 |
-
"acc_norm": 0.5526315789473685,
|
277 |
-
"acc_norm_stderr": 0.0404633688397825
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.434640522875817,
|
281 |
-
"acc_stderr": 0.020054269200726452,
|
282 |
-
"acc_norm": 0.434640522875817,
|
283 |
-
"acc_norm_stderr": 0.020054269200726452
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3262411347517731,
|
287 |
-
"acc_stderr": 0.02796845304356317,
|
288 |
-
"acc_norm": 0.3262411347517731,
|
289 |
-
"acc_norm_stderr": 0.02796845304356317
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.5357142857142857,
|
293 |
-
"acc_stderr": 0.04733667890053756,
|
294 |
-
"acc_norm": 0.5357142857142857,
|
295 |
-
"acc_norm_stderr": 0.04733667890053756
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4351851851851852,
|
299 |
-
"acc_stderr": 0.033812000056435254,
|
300 |
-
"acc_norm": 0.4351851851851852,
|
301 |
-
"acc_norm_stderr": 0.033812000056435254
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.30837988826815643,
|
305 |
-
"acc_stderr": 0.015445716910998877,
|
306 |
-
"acc_norm": 0.30837988826815643,
|
307 |
-
"acc_norm_stderr": 0.015445716910998877
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.42,
|
311 |
-
"acc_stderr": 0.049604496374885836,
|
312 |
-
"acc_norm": 0.42,
|
313 |
-
"acc_norm_stderr": 0.049604496374885836
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.67,
|
317 |
-
"acc_stderr": 0.047258156262526094,
|
318 |
-
"acc_norm": 0.67,
|
319 |
-
"acc_norm_stderr": 0.047258156262526094
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.40808823529411764,
|
323 |
-
"acc_stderr": 0.029855261393483924,
|
324 |
-
"acc_norm": 0.40808823529411764,
|
325 |
-
"acc_norm_stderr": 0.029855261393483924
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6204081632653061,
|
329 |
-
"acc_stderr": 0.03106721126287246,
|
330 |
-
"acc_norm": 0.6204081632653061,
|
331 |
-
"acc_norm_stderr": 0.03106721126287246
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6751054852320675,
|
335 |
-
"acc_stderr": 0.030486039389105317,
|
336 |
-
"acc_norm": 0.6751054852320675,
|
337 |
-
"acc_norm_stderr": 0.030486039389105317
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3683181225554107,
|
341 |
-
"acc_stderr": 0.012319403369564644,
|
342 |
-
"acc_norm": 0.3683181225554107,
|
343 |
-
"acc_norm_stderr": 0.012319403369564644
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5588235294117647,
|
347 |
-
"acc_stderr": 0.034849415144292316,
|
348 |
-
"acc_norm": 0.5588235294117647,
|
349 |
-
"acc_norm_stderr": 0.034849415144292316
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6363636363636364,
|
353 |
-
"acc_stderr": 0.03756335775187895,
|
354 |
-
"acc_norm": 0.6363636363636364,
|
355 |
-
"acc_norm_stderr": 0.03756335775187895
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3953488372093023,
|
359 |
-
"mc1_stderr": 0.017115815632418183,
|
360 |
-
"mc2": 0.5576718488551453,
|
361 |
-
"mc2_stderr": 0.01598772875231436
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5100354191263282,
|
365 |
-
"acc_stderr": 0.017186891286894053,
|
366 |
-
"acc_norm": 0.5088547815820543,
|
367 |
-
"acc_norm_stderr": 0.017187658199336736
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "RLHFlow/LLaMA3-iterative-DPO-final",
|
436 |
-
"model_sha": "40b73bd07a019795837f80579fe95470484ca82b",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Raphael21/Raphael21-SOLAR-10.7B/result_2024-02-26 11:01:49.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.48293515358361777,
|
5 |
-
"acc_stderr": 0.014602878388536597,
|
6 |
-
"acc_norm": 0.5418088737201365,
|
7 |
-
"acc_norm_stderr": 0.0145602203087147
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4885480979884485,
|
11 |
-
"acc_stderr": 0.004988472459418033,
|
12 |
-
"acc_norm": 0.675363473411671,
|
13 |
-
"acc_norm_stderr": 0.004672819355838559
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5847953216374269,
|
17 |
-
"acc_stderr": 0.03779275945503201,
|
18 |
-
"acc_norm": 0.5847953216374269,
|
19 |
-
"acc_norm_stderr": 0.03779275945503201
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6504854368932039,
|
23 |
-
"acc_stderr": 0.047211885060971716,
|
24 |
-
"acc_norm": 0.6504854368932039,
|
25 |
-
"acc_norm_stderr": 0.047211885060971716
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6538952745849298,
|
29 |
-
"acc_stderr": 0.017011965266412077,
|
30 |
-
"acc_norm": 0.6538952745849298,
|
31 |
-
"acc_norm_stderr": 0.017011965266412077
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.43703703703703706,
|
35 |
-
"acc_stderr": 0.04284958639753399,
|
36 |
-
"acc_norm": 0.43703703703703706,
|
37 |
-
"acc_norm_stderr": 0.04284958639753399
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.22,
|
41 |
-
"acc_stderr": 0.04163331998932269,
|
42 |
-
"acc_norm": 0.22,
|
43 |
-
"acc_norm_stderr": 0.04163331998932269
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46808510638297873,
|
47 |
-
"acc_stderr": 0.03261936918467381,
|
48 |
-
"acc_norm": 0.46808510638297873,
|
49 |
-
"acc_norm_stderr": 0.03261936918467381
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.463855421686747,
|
53 |
-
"acc_stderr": 0.03882310850890594,
|
54 |
-
"acc_norm": 0.463855421686747,
|
55 |
-
"acc_norm_stderr": 0.03882310850890594
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6205787781350482,
|
59 |
-
"acc_stderr": 0.027559949802347824,
|
60 |
-
"acc_norm": 0.6205787781350482,
|
61 |
-
"acc_norm_stderr": 0.027559949802347824
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5650224215246636,
|
65 |
-
"acc_stderr": 0.033272833702713445,
|
66 |
-
"acc_norm": 0.5650224215246636,
|
67 |
-
"acc_norm_stderr": 0.033272833702713445
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.6335877862595419,
|
71 |
-
"acc_stderr": 0.042258754519696386,
|
72 |
-
"acc_norm": 0.6335877862595419,
|
73 |
-
"acc_norm_stderr": 0.042258754519696386
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.53,
|
77 |
-
"acc_stderr": 0.050161355804659205,
|
78 |
-
"acc_norm": 0.53,
|
79 |
-
"acc_norm_stderr": 0.050161355804659205
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7070707070707071,
|
83 |
-
"acc_stderr": 0.03242497958178817,
|
84 |
-
"acc_norm": 0.7070707070707071,
|
85 |
-
"acc_norm_stderr": 0.03242497958178817
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4689655172413793,
|
89 |
-
"acc_stderr": 0.04158632762097828,
|
90 |
-
"acc_norm": 0.4689655172413793,
|
91 |
-
"acc_norm_stderr": 0.04158632762097828
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.37254901960784315,
|
95 |
-
"acc_stderr": 0.04810840148082635,
|
96 |
-
"acc_norm": 0.37254901960784315,
|
97 |
-
"acc_norm_stderr": 0.04810840148082635
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6638655462184874,
|
101 |
-
"acc_stderr": 0.03068473711513537,
|
102 |
-
"acc_norm": 0.6638655462184874,
|
103 |
-
"acc_norm_stderr": 0.03068473711513537
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.558974358974359,
|
107 |
-
"acc_stderr": 0.025174048384000718,
|
108 |
-
"acc_norm": 0.558974358974359,
|
109 |
-
"acc_norm_stderr": 0.025174048384000718
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.55,
|
113 |
-
"acc_stderr": 0.04999999999999999,
|
114 |
-
"acc_norm": 0.55,
|
115 |
-
"acc_norm_stderr": 0.04999999999999999
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.37,
|
119 |
-
"acc_stderr": 0.048523658709391,
|
120 |
-
"acc_norm": 0.37,
|
121 |
-
"acc_norm_stderr": 0.048523658709391
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6111111111111112,
|
125 |
-
"acc_stderr": 0.0471282125742677,
|
126 |
-
"acc_norm": 0.6111111111111112,
|
127 |
-
"acc_norm_stderr": 0.0471282125742677
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4039408866995074,
|
131 |
-
"acc_stderr": 0.03452453903822039,
|
132 |
-
"acc_norm": 0.4039408866995074,
|
133 |
-
"acc_norm_stderr": 0.03452453903822039
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5838709677419355,
|
137 |
-
"acc_stderr": 0.028040981380761533,
|
138 |
-
"acc_norm": 0.5838709677419355,
|
139 |
-
"acc_norm_stderr": 0.028040981380761533
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7478632478632479,
|
143 |
-
"acc_stderr": 0.02844796547623102,
|
144 |
-
"acc_norm": 0.7478632478632479,
|
145 |
-
"acc_norm_stderr": 0.02844796547623102
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5245283018867924,
|
149 |
-
"acc_stderr": 0.030735822206205615,
|
150 |
-
"acc_norm": 0.5245283018867924,
|
151 |
-
"acc_norm_stderr": 0.030735822206205615
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5636363636363636,
|
155 |
-
"acc_stderr": 0.04750185058907296,
|
156 |
-
"acc_norm": 0.5636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04750185058907296
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.37037037037037035,
|
161 |
-
"acc_stderr": 0.02944316932303154,
|
162 |
-
"acc_norm": 0.37037037037037035,
|
163 |
-
"acc_norm_stderr": 0.02944316932303154
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6467661691542289,
|
173 |
-
"acc_stderr": 0.03379790611796777,
|
174 |
-
"acc_norm": 0.6467661691542289,
|
175 |
-
"acc_norm_stderr": 0.03379790611796777
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5028901734104047,
|
179 |
-
"acc_stderr": 0.038124005659748335,
|
180 |
-
"acc_norm": 0.5028901734104047,
|
181 |
-
"acc_norm_stderr": 0.038124005659748335
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.42857142857142855,
|
185 |
-
"acc_stderr": 0.025487187147859372,
|
186 |
-
"acc_norm": 0.42857142857142855,
|
187 |
-
"acc_norm_stderr": 0.025487187147859372
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5,
|
191 |
-
"acc_stderr": 0.04181210050035455,
|
192 |
-
"acc_norm": 0.5,
|
193 |
-
"acc_norm_stderr": 0.04181210050035455
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001975,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001975
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.72,
|
203 |
-
"acc_stderr": 0.04512608598542127,
|
204 |
-
"acc_norm": 0.72,
|
205 |
-
"acc_norm_stderr": 0.04512608598542127
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5549132947976878,
|
209 |
-
"acc_stderr": 0.02675625512966377,
|
210 |
-
"acc_norm": 0.5549132947976878,
|
211 |
-
"acc_norm_stderr": 0.02675625512966377
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5153374233128835,
|
215 |
-
"acc_stderr": 0.039265223787088445,
|
216 |
-
"acc_norm": 0.5153374233128835,
|
217 |
-
"acc_norm_stderr": 0.039265223787088445
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6080246913580247,
|
221 |
-
"acc_stderr": 0.027163686038271146,
|
222 |
-
"acc_norm": 0.6080246913580247,
|
223 |
-
"acc_norm_stderr": 0.027163686038271146
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.32,
|
227 |
-
"acc_stderr": 0.046882617226215034,
|
228 |
-
"acc_norm": 0.32,
|
229 |
-
"acc_norm_stderr": 0.046882617226215034
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.694300518134715,
|
233 |
-
"acc_stderr": 0.033248379397581594,
|
234 |
-
"acc_norm": 0.694300518134715,
|
235 |
-
"acc_norm_stderr": 0.033248379397581594
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.45614035087719296,
|
239 |
-
"acc_stderr": 0.046854730419077895,
|
240 |
-
"acc_norm": 0.45614035087719296,
|
241 |
-
"acc_norm_stderr": 0.046854730419077895
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6972477064220184,
|
245 |
-
"acc_stderr": 0.019698711434756346,
|
246 |
-
"acc_norm": 0.6972477064220184,
|
247 |
-
"acc_norm_stderr": 0.019698711434756346
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.35714285714285715,
|
251 |
-
"acc_stderr": 0.04285714285714281,
|
252 |
-
"acc_norm": 0.35714285714285715,
|
253 |
-
"acc_norm_stderr": 0.04285714285714281
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.6013071895424836,
|
257 |
-
"acc_stderr": 0.02803609227389177,
|
258 |
-
"acc_norm": 0.6013071895424836,
|
259 |
-
"acc_norm_stderr": 0.02803609227389177
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.56,
|
263 |
-
"acc_stderr": 0.04988876515698589,
|
264 |
-
"acc_norm": 0.56,
|
265 |
-
"acc_norm_stderr": 0.04988876515698589
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6694214876033058,
|
269 |
-
"acc_stderr": 0.042943408452120926,
|
270 |
-
"acc_norm": 0.6694214876033058,
|
271 |
-
"acc_norm_stderr": 0.042943408452120926
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5723684210526315,
|
275 |
-
"acc_stderr": 0.04026097083296564,
|
276 |
-
"acc_norm": 0.5723684210526315,
|
277 |
-
"acc_norm_stderr": 0.04026097083296564
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4918300653594771,
|
281 |
-
"acc_stderr": 0.020225134343057255,
|
282 |
-
"acc_norm": 0.4918300653594771,
|
283 |
-
"acc_norm_stderr": 0.020225134343057255
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3546099290780142,
|
287 |
-
"acc_stderr": 0.02853865002887864,
|
288 |
-
"acc_norm": 0.3546099290780142,
|
289 |
-
"acc_norm_stderr": 0.02853865002887864
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.38392857142857145,
|
293 |
-
"acc_stderr": 0.04616143075028546,
|
294 |
-
"acc_norm": 0.38392857142857145,
|
295 |
-
"acc_norm_stderr": 0.04616143075028546
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5416666666666666,
|
299 |
-
"acc_stderr": 0.03398110890294636,
|
300 |
-
"acc_norm": 0.5416666666666666,
|
301 |
-
"acc_norm_stderr": 0.03398110890294636
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.3463687150837989,
|
305 |
-
"acc_stderr": 0.015913546784020117,
|
306 |
-
"acc_norm": 0.3463687150837989,
|
307 |
-
"acc_norm_stderr": 0.015913546784020117
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.48,
|
311 |
-
"acc_stderr": 0.050211673156867795,
|
312 |
-
"acc_norm": 0.48,
|
313 |
-
"acc_norm_stderr": 0.050211673156867795
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.62,
|
317 |
-
"acc_stderr": 0.048783173121456316,
|
318 |
-
"acc_norm": 0.62,
|
319 |
-
"acc_norm_stderr": 0.048783173121456316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4963235294117647,
|
323 |
-
"acc_stderr": 0.030372015885428195,
|
324 |
-
"acc_norm": 0.4963235294117647,
|
325 |
-
"acc_norm_stderr": 0.030372015885428195
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6204081632653061,
|
329 |
-
"acc_stderr": 0.031067211262872464,
|
330 |
-
"acc_norm": 0.6204081632653061,
|
331 |
-
"acc_norm_stderr": 0.031067211262872464
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7510548523206751,
|
335 |
-
"acc_stderr": 0.028146970599422644,
|
336 |
-
"acc_norm": 0.7510548523206751,
|
337 |
-
"acc_norm_stderr": 0.028146970599422644
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.423728813559322,
|
341 |
-
"acc_stderr": 0.01262078515588599,
|
342 |
-
"acc_norm": 0.423728813559322,
|
343 |
-
"acc_norm_stderr": 0.01262078515588599
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5931372549019608,
|
347 |
-
"acc_stderr": 0.03447891136353382,
|
348 |
-
"acc_norm": 0.5931372549019608,
|
349 |
-
"acc_norm_stderr": 0.03447891136353382
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6848484848484848,
|
353 |
-
"acc_stderr": 0.0362773057502241,
|
354 |
-
"acc_norm": 0.6848484848484848,
|
355 |
-
"acc_norm_stderr": 0.0362773057502241
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.47613219094247244,
|
359 |
-
"mc1_stderr": 0.017483547156961585,
|
360 |
-
"mc2": 0.6437655582948802,
|
361 |
-
"mc2_stderr": 0.015843103346719872
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.45690672963400236,
|
365 |
-
"acc_stderr": 0.017126389093086784,
|
366 |
-
"acc_norm": 0.5289256198347108,
|
367 |
-
"acc_norm_stderr": 0.017161563949916348
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Raphael21/Raphael21-SOLAR-10.7B",
|
436 |
-
"model_sha": "7e0a60cde6431778dd80b90376415ad8bb171de7",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2/result_2024-07-03 09:55:23.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.40017064846416384,
|
5 |
-
"acc_stderr": 0.014317197787809178,
|
6 |
-
"acc_norm": 0.4598976109215017,
|
7 |
-
"acc_norm_stderr": 0.01456431885692485
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4041027683728341,
|
11 |
-
"acc_stderr": 0.004897146690596257,
|
12 |
-
"acc_norm": 0.536247759410476,
|
13 |
-
"acc_norm_stderr": 0.004976651989757642
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.47953216374269003,
|
17 |
-
"acc_stderr": 0.0383161053282193,
|
18 |
-
"acc_norm": 0.47953216374269003,
|
19 |
-
"acc_norm_stderr": 0.0383161053282193
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5922330097087378,
|
23 |
-
"acc_stderr": 0.04865777570410768,
|
24 |
-
"acc_norm": 0.5922330097087378,
|
25 |
-
"acc_norm_stderr": 0.04865777570410768
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4763729246487867,
|
29 |
-
"acc_stderr": 0.017859989765176453,
|
30 |
-
"acc_norm": 0.4763729246487867,
|
31 |
-
"acc_norm_stderr": 0.017859989765176453
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37037037037037035,
|
35 |
-
"acc_stderr": 0.04171654161354543,
|
36 |
-
"acc_norm": 0.37037037037037035,
|
37 |
-
"acc_norm_stderr": 0.04171654161354543
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.04512608598542127,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.04512608598542127
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.40425531914893614,
|
47 |
-
"acc_stderr": 0.03208115750788684,
|
48 |
-
"acc_norm": 0.40425531914893614,
|
49 |
-
"acc_norm_stderr": 0.03208115750788684
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.37349397590361444,
|
53 |
-
"acc_stderr": 0.03765845117168863,
|
54 |
-
"acc_norm": 0.37349397590361444,
|
55 |
-
"acc_norm_stderr": 0.03765845117168863
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.44694533762057875,
|
59 |
-
"acc_stderr": 0.02823776942208533,
|
60 |
-
"acc_norm": 0.44694533762057875,
|
61 |
-
"acc_norm_stderr": 0.02823776942208533
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4260089686098655,
|
65 |
-
"acc_stderr": 0.033188332862172806,
|
66 |
-
"acc_norm": 0.4260089686098655,
|
67 |
-
"acc_norm_stderr": 0.033188332862172806
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4351145038167939,
|
71 |
-
"acc_stderr": 0.04348208051644858,
|
72 |
-
"acc_norm": 0.4351145038167939,
|
73 |
-
"acc_norm_stderr": 0.04348208051644858
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.44,
|
77 |
-
"acc_stderr": 0.04988876515698589,
|
78 |
-
"acc_norm": 0.44,
|
79 |
-
"acc_norm_stderr": 0.04988876515698589
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5606060606060606,
|
83 |
-
"acc_stderr": 0.035360859475294805,
|
84 |
-
"acc_norm": 0.5606060606060606,
|
85 |
-
"acc_norm_stderr": 0.035360859475294805
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4413793103448276,
|
89 |
-
"acc_stderr": 0.04137931034482758,
|
90 |
-
"acc_norm": 0.4413793103448276,
|
91 |
-
"acc_norm_stderr": 0.04137931034482758
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2549019607843137,
|
95 |
-
"acc_stderr": 0.04336432707993178,
|
96 |
-
"acc_norm": 0.2549019607843137,
|
97 |
-
"acc_norm_stderr": 0.04336432707993178
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5252100840336135,
|
101 |
-
"acc_stderr": 0.03243718055137411,
|
102 |
-
"acc_norm": 0.5252100840336135,
|
103 |
-
"acc_norm_stderr": 0.03243718055137411
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.46923076923076923,
|
107 |
-
"acc_stderr": 0.025302958890850154,
|
108 |
-
"acc_norm": 0.46923076923076923,
|
109 |
-
"acc_norm_stderr": 0.025302958890850154
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.63,
|
113 |
-
"acc_stderr": 0.04852365870939098,
|
114 |
-
"acc_norm": 0.63,
|
115 |
-
"acc_norm_stderr": 0.04852365870939098
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.0446196043338474,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.0446196043338474
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.49074074074074076,
|
125 |
-
"acc_stderr": 0.04832853553437055,
|
126 |
-
"acc_norm": 0.49074074074074076,
|
127 |
-
"acc_norm_stderr": 0.04832853553437055
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3694581280788177,
|
131 |
-
"acc_stderr": 0.033959703819985754,
|
132 |
-
"acc_norm": 0.3694581280788177,
|
133 |
-
"acc_norm_stderr": 0.033959703819985754
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.44516129032258067,
|
137 |
-
"acc_stderr": 0.028272410186214906,
|
138 |
-
"acc_norm": 0.44516129032258067,
|
139 |
-
"acc_norm_stderr": 0.028272410186214906
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7777777777777778,
|
143 |
-
"acc_stderr": 0.027236013946196687,
|
144 |
-
"acc_norm": 0.7777777777777778,
|
145 |
-
"acc_norm_stderr": 0.027236013946196687
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4716981132075472,
|
149 |
-
"acc_stderr": 0.030723535249006107,
|
150 |
-
"acc_norm": 0.4716981132075472,
|
151 |
-
"acc_norm_stderr": 0.030723535249006107
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5181818181818182,
|
155 |
-
"acc_stderr": 0.04785964010794916,
|
156 |
-
"acc_norm": 0.5181818181818182,
|
157 |
-
"acc_norm_stderr": 0.04785964010794916
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.34814814814814815,
|
161 |
-
"acc_stderr": 0.029045600290616255,
|
162 |
-
"acc_norm": 0.34814814814814815,
|
163 |
-
"acc_norm_stderr": 0.029045600290616255
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.03658603262763744,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.03658603262763744
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.572139303482587,
|
173 |
-
"acc_stderr": 0.03498541988407795,
|
174 |
-
"acc_norm": 0.572139303482587,
|
175 |
-
"acc_norm_stderr": 0.03498541988407795
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4161849710982659,
|
179 |
-
"acc_stderr": 0.03758517775404947,
|
180 |
-
"acc_norm": 0.4161849710982659,
|
181 |
-
"acc_norm_stderr": 0.03758517775404947
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3888888888888889,
|
185 |
-
"acc_stderr": 0.025107425481137282,
|
186 |
-
"acc_norm": 0.3888888888888889,
|
187 |
-
"acc_norm_stderr": 0.025107425481137282
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3611111111111111,
|
191 |
-
"acc_stderr": 0.040166600304512336,
|
192 |
-
"acc_norm": 0.3611111111111111,
|
193 |
-
"acc_norm_stderr": 0.040166600304512336
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.29,
|
197 |
-
"acc_stderr": 0.045604802157206845,
|
198 |
-
"acc_norm": 0.29,
|
199 |
-
"acc_norm_stderr": 0.045604802157206845
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.53,
|
203 |
-
"acc_stderr": 0.050161355804659205,
|
204 |
-
"acc_norm": 0.53,
|
205 |
-
"acc_norm_stderr": 0.050161355804659205
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5115606936416185,
|
209 |
-
"acc_stderr": 0.026911898686377913,
|
210 |
-
"acc_norm": 0.5115606936416185,
|
211 |
-
"acc_norm_stderr": 0.026911898686377913
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5337423312883436,
|
215 |
-
"acc_stderr": 0.039194155450484096,
|
216 |
-
"acc_norm": 0.5337423312883436,
|
217 |
-
"acc_norm_stderr": 0.039194155450484096
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4506172839506173,
|
221 |
-
"acc_stderr": 0.027684721415656203,
|
222 |
-
"acc_norm": 0.4506172839506173,
|
223 |
-
"acc_norm_stderr": 0.027684721415656203
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.047258156262526045,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.047258156262526045
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.49740932642487046,
|
233 |
-
"acc_stderr": 0.03608390745384488,
|
234 |
-
"acc_norm": 0.49740932642487046,
|
235 |
-
"acc_norm_stderr": 0.03608390745384488
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2894736842105263,
|
239 |
-
"acc_stderr": 0.04266339443159394,
|
240 |
-
"acc_norm": 0.2894736842105263,
|
241 |
-
"acc_norm_stderr": 0.04266339443159394
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5321100917431193,
|
245 |
-
"acc_stderr": 0.02139307122268081,
|
246 |
-
"acc_norm": 0.5321100917431193,
|
247 |
-
"acc_norm_stderr": 0.02139307122268081
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4444444444444444,
|
251 |
-
"acc_stderr": 0.044444444444444495,
|
252 |
-
"acc_norm": 0.4444444444444444,
|
253 |
-
"acc_norm_stderr": 0.044444444444444495
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.48366013071895425,
|
257 |
-
"acc_stderr": 0.028614624752805407,
|
258 |
-
"acc_norm": 0.48366013071895425,
|
259 |
-
"acc_norm_stderr": 0.028614624752805407
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.5,
|
263 |
-
"acc_stderr": 0.050251890762960605,
|
264 |
-
"acc_norm": 0.5,
|
265 |
-
"acc_norm_stderr": 0.050251890762960605
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6198347107438017,
|
269 |
-
"acc_stderr": 0.04431324501968431,
|
270 |
-
"acc_norm": 0.6198347107438017,
|
271 |
-
"acc_norm_stderr": 0.04431324501968431
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.40131578947368424,
|
275 |
-
"acc_stderr": 0.03988903703336285,
|
276 |
-
"acc_norm": 0.40131578947368424,
|
277 |
-
"acc_norm_stderr": 0.03988903703336285
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.38562091503267976,
|
281 |
-
"acc_stderr": 0.019691459052354154,
|
282 |
-
"acc_norm": 0.38562091503267976,
|
283 |
-
"acc_norm_stderr": 0.019691459052354154
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3546099290780142,
|
287 |
-
"acc_stderr": 0.02853865002887864,
|
288 |
-
"acc_norm": 0.3546099290780142,
|
289 |
-
"acc_norm_stderr": 0.02853865002887864
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.38392857142857145,
|
293 |
-
"acc_stderr": 0.04616143075028546,
|
294 |
-
"acc_norm": 0.38392857142857145,
|
295 |
-
"acc_norm_stderr": 0.04616143075028546
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4074074074074074,
|
299 |
-
"acc_stderr": 0.033509916046960436,
|
300 |
-
"acc_norm": 0.4074074074074074,
|
301 |
-
"acc_norm_stderr": 0.033509916046960436
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2860335195530726,
|
305 |
-
"acc_stderr": 0.015113972129062129,
|
306 |
-
"acc_norm": 0.2860335195530726,
|
307 |
-
"acc_norm_stderr": 0.015113972129062129
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.38,
|
311 |
-
"acc_stderr": 0.04878317312145633,
|
312 |
-
"acc_norm": 0.38,
|
313 |
-
"acc_norm_stderr": 0.04878317312145633
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.67,
|
317 |
-
"acc_stderr": 0.04725815626252611,
|
318 |
-
"acc_norm": 0.67,
|
319 |
-
"acc_norm_stderr": 0.04725815626252611
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.35661764705882354,
|
323 |
-
"acc_stderr": 0.029097209568411945,
|
324 |
-
"acc_norm": 0.35661764705882354,
|
325 |
-
"acc_norm_stderr": 0.029097209568411945
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5306122448979592,
|
329 |
-
"acc_stderr": 0.031949171367580624,
|
330 |
-
"acc_norm": 0.5306122448979592,
|
331 |
-
"acc_norm_stderr": 0.031949171367580624
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6118143459915611,
|
335 |
-
"acc_stderr": 0.0317229500433233,
|
336 |
-
"acc_norm": 0.6118143459915611,
|
337 |
-
"acc_norm_stderr": 0.0317229500433233
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3500651890482399,
|
341 |
-
"acc_stderr": 0.012182552313215163,
|
342 |
-
"acc_norm": 0.3500651890482399,
|
343 |
-
"acc_norm_stderr": 0.012182552313215163
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.49019607843137253,
|
347 |
-
"acc_stderr": 0.03508637358630572,
|
348 |
-
"acc_norm": 0.49019607843137253,
|
349 |
-
"acc_norm_stderr": 0.03508637358630572
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5272727272727272,
|
353 |
-
"acc_stderr": 0.03898531605579419,
|
354 |
-
"acc_norm": 0.5272727272727272,
|
355 |
-
"acc_norm_stderr": 0.03898531605579419
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.4283965728274174,
|
359 |
-
"mc1_stderr": 0.017323088597314767,
|
360 |
-
"mc2": 0.5874844740097508,
|
361 |
-
"mc2_stderr": 0.01634195500942428
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.44155844155844154,
|
365 |
-
"acc_stderr": 0.017072525875563103,
|
366 |
-
"acc_norm": 0.448642266824085,
|
367 |
-
"acc_norm_stderr": 0.01709943051472578
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2",
|
436 |
-
"model_sha": "fb0f72b9914a81892bfeea5a04fcd9676c883d64",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SJ-Donald/SJ-SOLAR-10.7b-DPO/result_2024-01-25 00:56:50.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4718430034129693,
|
5 |
-
"acc_stderr": 0.014588204105102203,
|
6 |
-
"acc_norm": 0.5366894197952219,
|
7 |
-
"acc_norm_stderr": 0.014572000527756994
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4493128858793069,
|
11 |
-
"acc_stderr": 0.004964075870120345,
|
12 |
-
"acc_norm": 0.619896434973113,
|
13 |
-
"acc_norm_stderr": 0.004844199910173041
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5789473684210527,
|
17 |
-
"acc_stderr": 0.03786720706234214,
|
18 |
-
"acc_norm": 0.5789473684210527,
|
19 |
-
"acc_norm_stderr": 0.03786720706234214
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6213592233009708,
|
23 |
-
"acc_stderr": 0.04802694698258974,
|
24 |
-
"acc_norm": 0.6213592233009708,
|
25 |
-
"acc_norm_stderr": 0.04802694698258974
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6500638569604087,
|
29 |
-
"acc_stderr": 0.017055679797150426,
|
30 |
-
"acc_norm": 0.6500638569604087,
|
31 |
-
"acc_norm_stderr": 0.017055679797150426
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4740740740740741,
|
35 |
-
"acc_stderr": 0.04313531696750574,
|
36 |
-
"acc_norm": 0.4740740740740741,
|
37 |
-
"acc_norm_stderr": 0.04313531696750574
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.21,
|
41 |
-
"acc_stderr": 0.040936018074033256,
|
42 |
-
"acc_norm": 0.21,
|
43 |
-
"acc_norm_stderr": 0.040936018074033256
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4851063829787234,
|
47 |
-
"acc_stderr": 0.03267151848924777,
|
48 |
-
"acc_norm": 0.4851063829787234,
|
49 |
-
"acc_norm_stderr": 0.03267151848924777
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.5120481927710844,
|
53 |
-
"acc_stderr": 0.03891364495835817,
|
54 |
-
"acc_norm": 0.5120481927710844,
|
55 |
-
"acc_norm_stderr": 0.03891364495835817
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6366559485530546,
|
59 |
-
"acc_stderr": 0.02731684767419271,
|
60 |
-
"acc_norm": 0.6366559485530546,
|
61 |
-
"acc_norm_stderr": 0.02731684767419271
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5829596412556054,
|
65 |
-
"acc_stderr": 0.03309266936071721,
|
66 |
-
"acc_norm": 0.5829596412556054,
|
67 |
-
"acc_norm_stderr": 0.03309266936071721
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.6259541984732825,
|
71 |
-
"acc_stderr": 0.04243869242230524,
|
72 |
-
"acc_norm": 0.6259541984732825,
|
73 |
-
"acc_norm_stderr": 0.04243869242230524
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.48,
|
77 |
-
"acc_stderr": 0.05021167315686779,
|
78 |
-
"acc_norm": 0.48,
|
79 |
-
"acc_norm_stderr": 0.05021167315686779
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7373737373737373,
|
83 |
-
"acc_stderr": 0.03135305009533086,
|
84 |
-
"acc_norm": 0.7373737373737373,
|
85 |
-
"acc_norm_stderr": 0.03135305009533086
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3431372549019608,
|
95 |
-
"acc_stderr": 0.04724007352383888,
|
96 |
-
"acc_norm": 0.3431372549019608,
|
97 |
-
"acc_norm_stderr": 0.04724007352383888
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6260504201680672,
|
101 |
-
"acc_stderr": 0.03142946637883708,
|
102 |
-
"acc_norm": 0.6260504201680672,
|
103 |
-
"acc_norm_stderr": 0.03142946637883708
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5615384615384615,
|
107 |
-
"acc_stderr": 0.02515826601686861,
|
108 |
-
"acc_norm": 0.5615384615384615,
|
109 |
-
"acc_norm_stderr": 0.02515826601686861
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.6,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.6,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6296296296296297,
|
125 |
-
"acc_stderr": 0.04668408033024931,
|
126 |
-
"acc_norm": 0.6296296296296297,
|
127 |
-
"acc_norm_stderr": 0.04668408033024931
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4088669950738916,
|
131 |
-
"acc_stderr": 0.034590588158832314,
|
132 |
-
"acc_norm": 0.4088669950738916,
|
133 |
-
"acc_norm_stderr": 0.034590588158832314
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5903225806451613,
|
137 |
-
"acc_stderr": 0.02797605491534736,
|
138 |
-
"acc_norm": 0.5903225806451613,
|
139 |
-
"acc_norm_stderr": 0.02797605491534736
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7649572649572649,
|
143 |
-
"acc_stderr": 0.02777883590493543,
|
144 |
-
"acc_norm": 0.7649572649572649,
|
145 |
-
"acc_norm_stderr": 0.02777883590493543
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5283018867924528,
|
149 |
-
"acc_stderr": 0.0307235352490061,
|
150 |
-
"acc_norm": 0.5283018867924528,
|
151 |
-
"acc_norm_stderr": 0.0307235352490061
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5727272727272728,
|
155 |
-
"acc_stderr": 0.04738198703545483,
|
156 |
-
"acc_norm": 0.5727272727272728,
|
157 |
-
"acc_norm_stderr": 0.04738198703545483
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3296296296296296,
|
161 |
-
"acc_stderr": 0.028661201116524582,
|
162 |
-
"acc_norm": 0.3296296296296296,
|
163 |
-
"acc_norm_stderr": 0.028661201116524582
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.03757949922943343,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.03757949922943343
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7164179104477612,
|
173 |
-
"acc_stderr": 0.031871875379197945,
|
174 |
-
"acc_norm": 0.7164179104477612,
|
175 |
-
"acc_norm_stderr": 0.031871875379197945
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.47398843930635837,
|
179 |
-
"acc_stderr": 0.03807301726504511,
|
180 |
-
"acc_norm": 0.47398843930635837,
|
181 |
-
"acc_norm_stderr": 0.03807301726504511
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.43386243386243384,
|
185 |
-
"acc_stderr": 0.025525034382474887,
|
186 |
-
"acc_norm": 0.43386243386243384,
|
187 |
-
"acc_norm_stderr": 0.025525034382474887
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5069444444444444,
|
191 |
-
"acc_stderr": 0.04180806750294938,
|
192 |
-
"acc_norm": 0.5069444444444444,
|
193 |
-
"acc_norm_stderr": 0.04180806750294938
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.38,
|
197 |
-
"acc_stderr": 0.048783173121456316,
|
198 |
-
"acc_norm": 0.38,
|
199 |
-
"acc_norm_stderr": 0.048783173121456316
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.69,
|
203 |
-
"acc_stderr": 0.04648231987117316,
|
204 |
-
"acc_norm": 0.69,
|
205 |
-
"acc_norm_stderr": 0.04648231987117316
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5549132947976878,
|
209 |
-
"acc_stderr": 0.02675625512966377,
|
210 |
-
"acc_norm": 0.5549132947976878,
|
211 |
-
"acc_norm_stderr": 0.02675625512966377
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5521472392638037,
|
215 |
-
"acc_stderr": 0.03906947479456606,
|
216 |
-
"acc_norm": 0.5521472392638037,
|
217 |
-
"acc_norm_stderr": 0.03906947479456606
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6327160493827161,
|
221 |
-
"acc_stderr": 0.02682280175950789,
|
222 |
-
"acc_norm": 0.6327160493827161,
|
223 |
-
"acc_norm_stderr": 0.02682280175950789
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252604,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252604
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7253886010362695,
|
233 |
-
"acc_stderr": 0.032210245080411544,
|
234 |
-
"acc_norm": 0.7253886010362695,
|
235 |
-
"acc_norm_stderr": 0.032210245080411544
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.4473684210526316,
|
239 |
-
"acc_stderr": 0.046774730044912005,
|
240 |
-
"acc_norm": 0.4473684210526316,
|
241 |
-
"acc_norm_stderr": 0.046774730044912005
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6697247706422018,
|
245 |
-
"acc_stderr": 0.02016446633634298,
|
246 |
-
"acc_norm": 0.6697247706422018,
|
247 |
-
"acc_norm_stderr": 0.02016446633634298
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3492063492063492,
|
251 |
-
"acc_stderr": 0.04263906892795133,
|
252 |
-
"acc_norm": 0.3492063492063492,
|
253 |
-
"acc_norm_stderr": 0.04263906892795133
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5980392156862745,
|
257 |
-
"acc_stderr": 0.028074158947600656,
|
258 |
-
"acc_norm": 0.5980392156862745,
|
259 |
-
"acc_norm_stderr": 0.028074158947600656
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.57,
|
263 |
-
"acc_stderr": 0.049756985195624284,
|
264 |
-
"acc_norm": 0.57,
|
265 |
-
"acc_norm_stderr": 0.049756985195624284
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.04065578140908705,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.04065578140908705
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5394736842105263,
|
275 |
-
"acc_stderr": 0.04056242252249034,
|
276 |
-
"acc_norm": 0.5394736842105263,
|
277 |
-
"acc_norm_stderr": 0.04056242252249034
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5245098039215687,
|
281 |
-
"acc_stderr": 0.020203517280261447,
|
282 |
-
"acc_norm": 0.5245098039215687,
|
283 |
-
"acc_norm_stderr": 0.020203517280261447
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.38652482269503546,
|
287 |
-
"acc_stderr": 0.02904919034254346,
|
288 |
-
"acc_norm": 0.38652482269503546,
|
289 |
-
"acc_norm_stderr": 0.02904919034254346
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.39285714285714285,
|
293 |
-
"acc_stderr": 0.04635550135609976,
|
294 |
-
"acc_norm": 0.39285714285714285,
|
295 |
-
"acc_norm_stderr": 0.04635550135609976
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5231481481481481,
|
299 |
-
"acc_stderr": 0.034063153607115065,
|
300 |
-
"acc_norm": 0.5231481481481481,
|
301 |
-
"acc_norm_stderr": 0.034063153607115065
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.26033519553072626,
|
305 |
-
"acc_stderr": 0.014676252009319475,
|
306 |
-
"acc_norm": 0.26033519553072626,
|
307 |
-
"acc_norm_stderr": 0.014676252009319475
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.45,
|
311 |
-
"acc_stderr": 0.05,
|
312 |
-
"acc_norm": 0.45,
|
313 |
-
"acc_norm_stderr": 0.05
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.7,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.7,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5110294117647058,
|
323 |
-
"acc_stderr": 0.030365446477275675,
|
324 |
-
"acc_norm": 0.5110294117647058,
|
325 |
-
"acc_norm_stderr": 0.030365446477275675
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5836734693877551,
|
329 |
-
"acc_stderr": 0.03155782816556165,
|
330 |
-
"acc_norm": 0.5836734693877551,
|
331 |
-
"acc_norm_stderr": 0.03155782816556165
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.759493670886076,
|
335 |
-
"acc_stderr": 0.027820781981149678,
|
336 |
-
"acc_norm": 0.759493670886076,
|
337 |
-
"acc_norm_stderr": 0.027820781981149678
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.4152542372881356,
|
341 |
-
"acc_stderr": 0.012585471793400665,
|
342 |
-
"acc_norm": 0.4152542372881356,
|
343 |
-
"acc_norm_stderr": 0.012585471793400665
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6617647058823529,
|
347 |
-
"acc_stderr": 0.03320574612945431,
|
348 |
-
"acc_norm": 0.6617647058823529,
|
349 |
-
"acc_norm_stderr": 0.03320574612945431
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6787878787878788,
|
353 |
-
"acc_stderr": 0.03646204963253813,
|
354 |
-
"acc_norm": 0.6787878787878788,
|
355 |
-
"acc_norm_stderr": 0.03646204963253813
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.4039167686658507,
|
359 |
-
"mc1_stderr": 0.01717727682258428,
|
360 |
-
"mc2": 0.5720346967291646,
|
361 |
-
"mc2_stderr": 0.01586677021938394
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5702479338842975,
|
365 |
-
"acc_stderr": 0.01701984753597221,
|
366 |
-
"acc_norm": 0.5844155844155844,
|
367 |
-
"acc_norm_stderr": 0.01694358631307656
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SJ-Donald/SJ-SOLAR-10.7b-DPO",
|
436 |
-
"model_sha": "a0cb2fbc20b3b65e8e6c626893eaf5456054f49c",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SJ-Donald/SOLAR-10.7B-slerp/result_2024-01-11 05:42:26.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4726962457337884,
|
5 |
-
"acc_stderr": 0.014589589101985994,
|
6 |
-
"acc_norm": 0.5358361774744027,
|
7 |
-
"acc_norm_stderr": 0.014573813664735712
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4508066122286397,
|
11 |
-
"acc_stderr": 0.00496557224680386,
|
12 |
-
"acc_norm": 0.6202947619996017,
|
13 |
-
"acc_norm_stderr": 0.004843216325090246
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5730994152046783,
|
17 |
-
"acc_stderr": 0.03793620616529916,
|
18 |
-
"acc_norm": 0.5730994152046783,
|
19 |
-
"acc_norm_stderr": 0.03793620616529916
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6213592233009708,
|
23 |
-
"acc_stderr": 0.04802694698258974,
|
24 |
-
"acc_norm": 0.6213592233009708,
|
25 |
-
"acc_norm_stderr": 0.04802694698258974
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6526181353767561,
|
29 |
-
"acc_stderr": 0.017026671748655728,
|
30 |
-
"acc_norm": 0.6526181353767561,
|
31 |
-
"acc_norm_stderr": 0.017026671748655728
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4888888888888889,
|
35 |
-
"acc_stderr": 0.04318275491977976,
|
36 |
-
"acc_norm": 0.4888888888888889,
|
37 |
-
"acc_norm_stderr": 0.04318275491977976
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.21,
|
41 |
-
"acc_stderr": 0.040936018074033256,
|
42 |
-
"acc_norm": 0.21,
|
43 |
-
"acc_norm_stderr": 0.040936018074033256
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4851063829787234,
|
47 |
-
"acc_stderr": 0.032671518489247764,
|
48 |
-
"acc_norm": 0.4851063829787234,
|
49 |
-
"acc_norm_stderr": 0.032671518489247764
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.5120481927710844,
|
53 |
-
"acc_stderr": 0.03891364495835817,
|
54 |
-
"acc_norm": 0.5120481927710844,
|
55 |
-
"acc_norm_stderr": 0.03891364495835817
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6495176848874598,
|
59 |
-
"acc_stderr": 0.027098652621301747,
|
60 |
-
"acc_norm": 0.6495176848874598,
|
61 |
-
"acc_norm_stderr": 0.027098652621301747
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5829596412556054,
|
65 |
-
"acc_stderr": 0.03309266936071721,
|
66 |
-
"acc_norm": 0.5829596412556054,
|
67 |
-
"acc_norm_stderr": 0.03309266936071721
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.6259541984732825,
|
71 |
-
"acc_stderr": 0.04243869242230524,
|
72 |
-
"acc_norm": 0.6259541984732825,
|
73 |
-
"acc_norm_stderr": 0.04243869242230524
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.48,
|
77 |
-
"acc_stderr": 0.05021167315686779,
|
78 |
-
"acc_norm": 0.48,
|
79 |
-
"acc_norm_stderr": 0.05021167315686779
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7323232323232324,
|
83 |
-
"acc_stderr": 0.031544498882702866,
|
84 |
-
"acc_norm": 0.7323232323232324,
|
85 |
-
"acc_norm_stderr": 0.031544498882702866
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4689655172413793,
|
89 |
-
"acc_stderr": 0.04158632762097828,
|
90 |
-
"acc_norm": 0.4689655172413793,
|
91 |
-
"acc_norm_stderr": 0.04158632762097828
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3333333333333333,
|
95 |
-
"acc_stderr": 0.04690650298201943,
|
96 |
-
"acc_norm": 0.3333333333333333,
|
97 |
-
"acc_norm_stderr": 0.04690650298201943
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6218487394957983,
|
101 |
-
"acc_stderr": 0.031499305777849054,
|
102 |
-
"acc_norm": 0.6218487394957983,
|
103 |
-
"acc_norm_stderr": 0.031499305777849054
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5666666666666667,
|
107 |
-
"acc_stderr": 0.0251246535258851,
|
108 |
-
"acc_norm": 0.5666666666666667,
|
109 |
-
"acc_norm_stderr": 0.0251246535258851
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.6,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.6,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.35,
|
119 |
-
"acc_stderr": 0.04793724854411019,
|
120 |
-
"acc_norm": 0.35,
|
121 |
-
"acc_norm_stderr": 0.04793724854411019
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6296296296296297,
|
125 |
-
"acc_stderr": 0.04668408033024931,
|
126 |
-
"acc_norm": 0.6296296296296297,
|
127 |
-
"acc_norm_stderr": 0.04668408033024931
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4039408866995074,
|
131 |
-
"acc_stderr": 0.0345245390382204,
|
132 |
-
"acc_norm": 0.4039408866995074,
|
133 |
-
"acc_norm_stderr": 0.0345245390382204
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5870967741935483,
|
137 |
-
"acc_stderr": 0.02800913812540038,
|
138 |
-
"acc_norm": 0.5870967741935483,
|
139 |
-
"acc_norm_stderr": 0.02800913812540038
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7606837606837606,
|
143 |
-
"acc_stderr": 0.027951826808924336,
|
144 |
-
"acc_norm": 0.7606837606837606,
|
145 |
-
"acc_norm_stderr": 0.027951826808924336
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5358490566037736,
|
149 |
-
"acc_stderr": 0.030693675018458003,
|
150 |
-
"acc_norm": 0.5358490566037736,
|
151 |
-
"acc_norm_stderr": 0.030693675018458003
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5727272727272728,
|
155 |
-
"acc_stderr": 0.04738198703545483,
|
156 |
-
"acc_norm": 0.5727272727272728,
|
157 |
-
"acc_norm_stderr": 0.04738198703545483
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3333333333333333,
|
161 |
-
"acc_stderr": 0.028742040903948496,
|
162 |
-
"acc_norm": 0.3333333333333333,
|
163 |
-
"acc_norm_stderr": 0.028742040903948496
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526732,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526732
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7114427860696517,
|
173 |
-
"acc_stderr": 0.03203841040213319,
|
174 |
-
"acc_norm": 0.7114427860696517,
|
175 |
-
"acc_norm_stderr": 0.03203841040213319
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.47398843930635837,
|
179 |
-
"acc_stderr": 0.03807301726504511,
|
180 |
-
"acc_norm": 0.47398843930635837,
|
181 |
-
"acc_norm_stderr": 0.03807301726504511
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.43386243386243384,
|
185 |
-
"acc_stderr": 0.025525034382474887,
|
186 |
-
"acc_norm": 0.43386243386243384,
|
187 |
-
"acc_norm_stderr": 0.025525034382474887
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5,
|
191 |
-
"acc_stderr": 0.04181210050035455,
|
192 |
-
"acc_norm": 0.5,
|
193 |
-
"acc_norm_stderr": 0.04181210050035455
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001975,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001975
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.7,
|
203 |
-
"acc_stderr": 0.046056618647183814,
|
204 |
-
"acc_norm": 0.7,
|
205 |
-
"acc_norm_stderr": 0.046056618647183814
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5578034682080925,
|
209 |
-
"acc_stderr": 0.026738603643807403,
|
210 |
-
"acc_norm": 0.5578034682080925,
|
211 |
-
"acc_norm_stderr": 0.026738603643807403
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5460122699386503,
|
215 |
-
"acc_stderr": 0.0391170190467718,
|
216 |
-
"acc_norm": 0.5460122699386503,
|
217 |
-
"acc_norm_stderr": 0.0391170190467718
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6296296296296297,
|
221 |
-
"acc_stderr": 0.02686949074481526,
|
222 |
-
"acc_norm": 0.6296296296296297,
|
223 |
-
"acc_norm_stderr": 0.02686949074481526
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252604,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252604
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7150259067357513,
|
233 |
-
"acc_stderr": 0.032577140777096614,
|
234 |
-
"acc_norm": 0.7150259067357513,
|
235 |
-
"acc_norm_stderr": 0.032577140777096614
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.4473684210526316,
|
239 |
-
"acc_stderr": 0.046774730044912,
|
240 |
-
"acc_norm": 0.4473684210526316,
|
241 |
-
"acc_norm_stderr": 0.046774730044912
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6678899082568808,
|
245 |
-
"acc_stderr": 0.02019268298542334,
|
246 |
-
"acc_norm": 0.6678899082568808,
|
247 |
-
"acc_norm_stderr": 0.02019268298542334
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3492063492063492,
|
251 |
-
"acc_stderr": 0.04263906892795133,
|
252 |
-
"acc_norm": 0.3492063492063492,
|
253 |
-
"acc_norm_stderr": 0.04263906892795133
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5980392156862745,
|
257 |
-
"acc_stderr": 0.028074158947600653,
|
258 |
-
"acc_norm": 0.5980392156862745,
|
259 |
-
"acc_norm_stderr": 0.028074158947600653
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.57,
|
263 |
-
"acc_stderr": 0.049756985195624284,
|
264 |
-
"acc_norm": 0.57,
|
265 |
-
"acc_norm_stderr": 0.049756985195624284
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.04065578140908705,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.04065578140908705
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5328947368421053,
|
275 |
-
"acc_stderr": 0.040601270352363966,
|
276 |
-
"acc_norm": 0.5328947368421053,
|
277 |
-
"acc_norm_stderr": 0.040601270352363966
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5245098039215687,
|
281 |
-
"acc_stderr": 0.020203517280261447,
|
282 |
-
"acc_norm": 0.5245098039215687,
|
283 |
-
"acc_norm_stderr": 0.020203517280261447
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.38652482269503546,
|
287 |
-
"acc_stderr": 0.02904919034254346,
|
288 |
-
"acc_norm": 0.38652482269503546,
|
289 |
-
"acc_norm_stderr": 0.02904919034254346
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.375,
|
293 |
-
"acc_stderr": 0.04595091388086298,
|
294 |
-
"acc_norm": 0.375,
|
295 |
-
"acc_norm_stderr": 0.04595091388086298
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5185185185185185,
|
299 |
-
"acc_stderr": 0.03407632093854054,
|
300 |
-
"acc_norm": 0.5185185185185185,
|
301 |
-
"acc_norm_stderr": 0.03407632093854054
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.26145251396648045,
|
305 |
-
"acc_stderr": 0.014696599650364555,
|
306 |
-
"acc_norm": 0.26145251396648045,
|
307 |
-
"acc_norm_stderr": 0.014696599650364555
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.46,
|
311 |
-
"acc_stderr": 0.05009082659620332,
|
312 |
-
"acc_norm": 0.46,
|
313 |
-
"acc_norm_stderr": 0.05009082659620332
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.68,
|
317 |
-
"acc_stderr": 0.046882617226215034,
|
318 |
-
"acc_norm": 0.68,
|
319 |
-
"acc_norm_stderr": 0.046882617226215034
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5147058823529411,
|
323 |
-
"acc_stderr": 0.03035969707904611,
|
324 |
-
"acc_norm": 0.5147058823529411,
|
325 |
-
"acc_norm_stderr": 0.03035969707904611
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5836734693877551,
|
329 |
-
"acc_stderr": 0.03155782816556165,
|
330 |
-
"acc_norm": 0.5836734693877551,
|
331 |
-
"acc_norm_stderr": 0.03155782816556165
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7552742616033755,
|
335 |
-
"acc_stderr": 0.027985699387036416,
|
336 |
-
"acc_norm": 0.7552742616033755,
|
337 |
-
"acc_norm_stderr": 0.027985699387036416
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.41590612777053454,
|
341 |
-
"acc_stderr": 0.012588323850313592,
|
342 |
-
"acc_norm": 0.41590612777053454,
|
343 |
-
"acc_norm_stderr": 0.012588323850313592
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6617647058823529,
|
347 |
-
"acc_stderr": 0.03320574612945431,
|
348 |
-
"acc_norm": 0.6617647058823529,
|
349 |
-
"acc_norm_stderr": 0.03320574612945431
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6787878787878788,
|
353 |
-
"acc_stderr": 0.03646204963253813,
|
354 |
-
"acc_norm": 0.6787878787878788,
|
355 |
-
"acc_norm_stderr": 0.03646204963253813
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.41003671970624234,
|
359 |
-
"mc1_stderr": 0.017217844717449318,
|
360 |
-
"mc2": 0.5715844361100709,
|
361 |
-
"mc2_stderr": 0.015837361919137687
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.577331759149941,
|
365 |
-
"acc_stderr": 0.016983506079577604,
|
366 |
-
"acc_norm": 0.5855962219598583,
|
367 |
-
"acc_norm_stderr": 0.016936583383943608
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SJ-Donald/SOLAR-10.7B-slerp",
|
436 |
-
"model_sha": "d6c0e1eb5dc5c3c0f087e875b5e8d6962eb1a24e",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SJ-Donald/llama3-passthrough-chat/result_2024-05-17 07:48:22.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3643344709897611,
|
5 |
-
"acc_stderr": 0.014063260279882419,
|
6 |
-
"acc_norm": 0.4351535836177474,
|
7 |
-
"acc_norm_stderr": 0.014487986197186045
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.35570603465445133,
|
11 |
-
"acc_stderr": 0.004777483159634026,
|
12 |
-
"acc_norm": 0.4560844453296156,
|
13 |
-
"acc_norm_stderr": 0.004970497804772314
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.49122807017543857,
|
17 |
-
"acc_stderr": 0.038342347441649924,
|
18 |
-
"acc_norm": 0.49122807017543857,
|
19 |
-
"acc_norm_stderr": 0.038342347441649924
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5728155339805825,
|
23 |
-
"acc_stderr": 0.04897957737781168,
|
24 |
-
"acc_norm": 0.5728155339805825,
|
25 |
-
"acc_norm_stderr": 0.04897957737781168
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.454661558109834,
|
29 |
-
"acc_stderr": 0.017806304585052606,
|
30 |
-
"acc_norm": 0.454661558109834,
|
31 |
-
"acc_norm_stderr": 0.017806304585052606
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34814814814814815,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.34814814814814815,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.04725815626252604,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.04725815626252604
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.5063829787234042,
|
47 |
-
"acc_stderr": 0.03268335899936336,
|
48 |
-
"acc_norm": 0.5063829787234042,
|
49 |
-
"acc_norm_stderr": 0.03268335899936336
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.37349397590361444,
|
53 |
-
"acc_stderr": 0.037658451171688624,
|
54 |
-
"acc_norm": 0.37349397590361444,
|
55 |
-
"acc_norm_stderr": 0.037658451171688624
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5305466237942122,
|
59 |
-
"acc_stderr": 0.028345045864840636,
|
60 |
-
"acc_norm": 0.5305466237942122,
|
61 |
-
"acc_norm_stderr": 0.028345045864840636
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.484304932735426,
|
65 |
-
"acc_stderr": 0.0335412657542081,
|
66 |
-
"acc_norm": 0.484304932735426,
|
67 |
-
"acc_norm_stderr": 0.0335412657542081
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5114503816793893,
|
71 |
-
"acc_stderr": 0.043841400240780176,
|
72 |
-
"acc_norm": 0.5114503816793893,
|
73 |
-
"acc_norm_stderr": 0.043841400240780176
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.45,
|
77 |
-
"acc_stderr": 0.04999999999999999,
|
78 |
-
"acc_norm": 0.45,
|
79 |
-
"acc_norm_stderr": 0.04999999999999999
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5151515151515151,
|
83 |
-
"acc_stderr": 0.0356071651653106,
|
84 |
-
"acc_norm": 0.5151515151515151,
|
85 |
-
"acc_norm_stderr": 0.0356071651653106
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5793103448275863,
|
89 |
-
"acc_stderr": 0.0411391498118926,
|
90 |
-
"acc_norm": 0.5793103448275863,
|
91 |
-
"acc_norm_stderr": 0.0411391498118926
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.35294117647058826,
|
95 |
-
"acc_stderr": 0.04755129616062946,
|
96 |
-
"acc_norm": 0.35294117647058826,
|
97 |
-
"acc_norm_stderr": 0.04755129616062946
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5252100840336135,
|
101 |
-
"acc_stderr": 0.0324371805513741,
|
102 |
-
"acc_norm": 0.5252100840336135,
|
103 |
-
"acc_norm_stderr": 0.0324371805513741
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.49230769230769234,
|
107 |
-
"acc_stderr": 0.0253480060315348,
|
108 |
-
"acc_norm": 0.49230769230769234,
|
109 |
-
"acc_norm_stderr": 0.0253480060315348
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.58,
|
113 |
-
"acc_stderr": 0.04960449637488583,
|
114 |
-
"acc_norm": 0.58,
|
115 |
-
"acc_norm_stderr": 0.04960449637488583
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.42,
|
119 |
-
"acc_stderr": 0.049604496374885836,
|
120 |
-
"acc_norm": 0.42,
|
121 |
-
"acc_norm_stderr": 0.049604496374885836
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5833333333333334,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.5833333333333334,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.458128078817734,
|
131 |
-
"acc_stderr": 0.03505630140785741,
|
132 |
-
"acc_norm": 0.458128078817734,
|
133 |
-
"acc_norm_stderr": 0.03505630140785741
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5032258064516129,
|
137 |
-
"acc_stderr": 0.02844341422643831,
|
138 |
-
"acc_norm": 0.5032258064516129,
|
139 |
-
"acc_norm_stderr": 0.02844341422643831
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7606837606837606,
|
143 |
-
"acc_stderr": 0.027951826808924336,
|
144 |
-
"acc_norm": 0.7606837606837606,
|
145 |
-
"acc_norm_stderr": 0.027951826808924336
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4981132075471698,
|
149 |
-
"acc_stderr": 0.030772653642075664,
|
150 |
-
"acc_norm": 0.4981132075471698,
|
151 |
-
"acc_norm_stderr": 0.030772653642075664
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.509090909090909,
|
155 |
-
"acc_stderr": 0.04788339768702861,
|
156 |
-
"acc_norm": 0.509090909090909,
|
157 |
-
"acc_norm_stderr": 0.04788339768702861
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.31851851851851853,
|
161 |
-
"acc_stderr": 0.028406533090608463,
|
162 |
-
"acc_norm": 0.31851851851851853,
|
163 |
-
"acc_norm_stderr": 0.028406533090608463
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6467661691542289,
|
173 |
-
"acc_stderr": 0.03379790611796777,
|
174 |
-
"acc_norm": 0.6467661691542289,
|
175 |
-
"acc_norm_stderr": 0.03379790611796777
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4277456647398844,
|
179 |
-
"acc_stderr": 0.03772446857518027,
|
180 |
-
"acc_norm": 0.4277456647398844,
|
181 |
-
"acc_norm_stderr": 0.03772446857518027
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3783068783068783,
|
185 |
-
"acc_stderr": 0.024976954053155254,
|
186 |
-
"acc_norm": 0.3783068783068783,
|
187 |
-
"acc_norm_stderr": 0.024976954053155254
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4305555555555556,
|
191 |
-
"acc_stderr": 0.041406856391115014,
|
192 |
-
"acc_norm": 0.4305555555555556,
|
193 |
-
"acc_norm_stderr": 0.041406856391115014
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001975,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001975
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.68,
|
203 |
-
"acc_stderr": 0.04688261722621504,
|
204 |
-
"acc_norm": 0.68,
|
205 |
-
"acc_norm_stderr": 0.04688261722621504
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5375722543352601,
|
209 |
-
"acc_stderr": 0.026842985519615375,
|
210 |
-
"acc_norm": 0.5375722543352601,
|
211 |
-
"acc_norm_stderr": 0.026842985519615375
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4294478527607362,
|
215 |
-
"acc_stderr": 0.038890666191127216,
|
216 |
-
"acc_norm": 0.4294478527607362,
|
217 |
-
"acc_norm_stderr": 0.038890666191127216
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4845679012345679,
|
221 |
-
"acc_stderr": 0.027807490044276198,
|
222 |
-
"acc_norm": 0.4845679012345679,
|
223 |
-
"acc_norm_stderr": 0.027807490044276198
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.047937248544110196,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.047937248544110196
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5492227979274611,
|
233 |
-
"acc_stderr": 0.03590910952235524,
|
234 |
-
"acc_norm": 0.5492227979274611,
|
235 |
-
"acc_norm_stderr": 0.03590910952235524
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.35964912280701755,
|
239 |
-
"acc_stderr": 0.04514496132873633,
|
240 |
-
"acc_norm": 0.35964912280701755,
|
241 |
-
"acc_norm_stderr": 0.04514496132873633
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5577981651376147,
|
245 |
-
"acc_stderr": 0.0212936132075202,
|
246 |
-
"acc_norm": 0.5577981651376147,
|
247 |
-
"acc_norm_stderr": 0.0212936132075202
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.40476190476190477,
|
251 |
-
"acc_stderr": 0.043902592653775614,
|
252 |
-
"acc_norm": 0.40476190476190477,
|
253 |
-
"acc_norm_stderr": 0.043902592653775614
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5,
|
257 |
-
"acc_stderr": 0.028629916715693413,
|
258 |
-
"acc_norm": 0.5,
|
259 |
-
"acc_norm_stderr": 0.028629916715693413
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.57,
|
263 |
-
"acc_stderr": 0.049756985195624284,
|
264 |
-
"acc_norm": 0.57,
|
265 |
-
"acc_norm_stderr": 0.049756985195624284
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6859504132231405,
|
269 |
-
"acc_stderr": 0.04236964753041018,
|
270 |
-
"acc_norm": 0.6859504132231405,
|
271 |
-
"acc_norm_stderr": 0.04236964753041018
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4342105263157895,
|
275 |
-
"acc_stderr": 0.040335656678483184,
|
276 |
-
"acc_norm": 0.4342105263157895,
|
277 |
-
"acc_norm_stderr": 0.040335656678483184
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.434640522875817,
|
281 |
-
"acc_stderr": 0.020054269200726452,
|
282 |
-
"acc_norm": 0.434640522875817,
|
283 |
-
"acc_norm_stderr": 0.020054269200726452
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3617021276595745,
|
287 |
-
"acc_stderr": 0.028663820147199492,
|
288 |
-
"acc_norm": 0.3617021276595745,
|
289 |
-
"acc_norm_stderr": 0.028663820147199492
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4375,
|
293 |
-
"acc_stderr": 0.04708567521880525,
|
294 |
-
"acc_norm": 0.4375,
|
295 |
-
"acc_norm_stderr": 0.04708567521880525
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.39351851851851855,
|
299 |
-
"acc_stderr": 0.03331747876370312,
|
300 |
-
"acc_norm": 0.39351851851851855,
|
301 |
-
"acc_norm_stderr": 0.03331747876370312
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.311731843575419,
|
305 |
-
"acc_stderr": 0.015491756531894637,
|
306 |
-
"acc_norm": 0.311731843575419,
|
307 |
-
"acc_norm_stderr": 0.015491756531894637
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.44,
|
311 |
-
"acc_stderr": 0.0498887651569859,
|
312 |
-
"acc_norm": 0.44,
|
313 |
-
"acc_norm_stderr": 0.0498887651569859
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.67,
|
317 |
-
"acc_stderr": 0.047258156262526094,
|
318 |
-
"acc_norm": 0.67,
|
319 |
-
"acc_norm_stderr": 0.047258156262526094
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3786764705882353,
|
323 |
-
"acc_stderr": 0.02946513363977613,
|
324 |
-
"acc_norm": 0.3786764705882353,
|
325 |
-
"acc_norm_stderr": 0.02946513363977613
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6408163265306123,
|
329 |
-
"acc_stderr": 0.03071356045510849,
|
330 |
-
"acc_norm": 0.6408163265306123,
|
331 |
-
"acc_norm_stderr": 0.03071356045510849
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6371308016877637,
|
335 |
-
"acc_stderr": 0.03129920825530213,
|
336 |
-
"acc_norm": 0.6371308016877637,
|
337 |
-
"acc_norm_stderr": 0.03129920825530213
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3617992177314211,
|
341 |
-
"acc_stderr": 0.012272736233262943,
|
342 |
-
"acc_norm": 0.3617992177314211,
|
343 |
-
"acc_norm_stderr": 0.012272736233262943
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5343137254901961,
|
347 |
-
"acc_stderr": 0.03501038327635897,
|
348 |
-
"acc_norm": 0.5343137254901961,
|
349 |
-
"acc_norm_stderr": 0.03501038327635897
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6303030303030303,
|
353 |
-
"acc_stderr": 0.03769430314512568,
|
354 |
-
"acc_norm": 0.6303030303030303,
|
355 |
-
"acc_norm_stderr": 0.03769430314512568
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.32313341493268055,
|
359 |
-
"mc1_stderr": 0.016371836286454607,
|
360 |
-
"mc2": 0.5049311098779597,
|
361 |
-
"mc2_stderr": 0.016074222030752545
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4639905548996458,
|
365 |
-
"acc_stderr": 0.017145715365486664,
|
366 |
-
"acc_norm": 0.5194805194805194,
|
367 |
-
"acc_norm_stderr": 0.017177301992342544
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SJ-Donald/llama3-passthrough-chat",
|
436 |
-
"model_sha": "ac11fd8473e7e057c7b1ec8abc30e201867be6ec",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SJ-Donald/llama3-passthrough/result_2024-05-16 12:56:07.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3532423208191126,
|
5 |
-
"acc_stderr": 0.013967822714840055,
|
6 |
-
"acc_norm": 0.4129692832764505,
|
7 |
-
"acc_norm_stderr": 0.014388344935398324
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.37821151165106554,
|
11 |
-
"acc_stderr": 0.004839497020536609,
|
12 |
-
"acc_norm": 0.5052778331009758,
|
13 |
-
"acc_norm_stderr": 0.004989503417767287
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6549707602339181,
|
17 |
-
"acc_stderr": 0.036459813773888065,
|
18 |
-
"acc_norm": 0.6549707602339181,
|
19 |
-
"acc_norm_stderr": 0.036459813773888065
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.7087378640776699,
|
23 |
-
"acc_stderr": 0.044986763205729224,
|
24 |
-
"acc_norm": 0.7087378640776699,
|
25 |
-
"acc_norm_stderr": 0.044986763205729224
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.561941251596424,
|
29 |
-
"acc_stderr": 0.017742232238257244,
|
30 |
-
"acc_norm": 0.561941251596424,
|
31 |
-
"acc_norm_stderr": 0.017742232238257244
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37777777777777777,
|
35 |
-
"acc_stderr": 0.04188307537595853,
|
36 |
-
"acc_norm": 0.37777777777777777,
|
37 |
-
"acc_norm_stderr": 0.04188307537595853
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.04725815626252603,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.04725815626252603
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.49361702127659574,
|
47 |
-
"acc_stderr": 0.03268335899936337,
|
48 |
-
"acc_norm": 0.49361702127659574,
|
49 |
-
"acc_norm_stderr": 0.03268335899936337
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4397590361445783,
|
53 |
-
"acc_stderr": 0.03864139923699121,
|
54 |
-
"acc_norm": 0.4397590361445783,
|
55 |
-
"acc_norm_stderr": 0.03864139923699121
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5048231511254019,
|
59 |
-
"acc_stderr": 0.028396770444111298,
|
60 |
-
"acc_norm": 0.5048231511254019,
|
61 |
-
"acc_norm_stderr": 0.028396770444111298
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5381165919282511,
|
65 |
-
"acc_stderr": 0.03346015011973228,
|
66 |
-
"acc_norm": 0.5381165919282511,
|
67 |
-
"acc_norm_stderr": 0.03346015011973228
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5343511450381679,
|
71 |
-
"acc_stderr": 0.043749285605997376,
|
72 |
-
"acc_norm": 0.5343511450381679,
|
73 |
-
"acc_norm_stderr": 0.043749285605997376
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.44,
|
77 |
-
"acc_stderr": 0.049888765156985905,
|
78 |
-
"acc_norm": 0.44,
|
79 |
-
"acc_norm_stderr": 0.049888765156985905
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5454545454545454,
|
83 |
-
"acc_stderr": 0.0354760149400694,
|
84 |
-
"acc_norm": 0.5454545454545454,
|
85 |
-
"acc_norm_stderr": 0.0354760149400694
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5103448275862069,
|
89 |
-
"acc_stderr": 0.04165774775728762,
|
90 |
-
"acc_norm": 0.5103448275862069,
|
91 |
-
"acc_norm_stderr": 0.04165774775728762
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.27450980392156865,
|
95 |
-
"acc_stderr": 0.044405219061793275,
|
96 |
-
"acc_norm": 0.27450980392156865,
|
97 |
-
"acc_norm_stderr": 0.044405219061793275
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5882352941176471,
|
101 |
-
"acc_stderr": 0.03196876989195779,
|
102 |
-
"acc_norm": 0.5882352941176471,
|
103 |
-
"acc_norm_stderr": 0.03196876989195779
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5076923076923077,
|
107 |
-
"acc_stderr": 0.025348006031534757,
|
108 |
-
"acc_norm": 0.5076923076923077,
|
109 |
-
"acc_norm_stderr": 0.025348006031534757
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.62,
|
113 |
-
"acc_stderr": 0.04878317312145632,
|
114 |
-
"acc_norm": 0.62,
|
115 |
-
"acc_norm_stderr": 0.04878317312145632
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6296296296296297,
|
125 |
-
"acc_stderr": 0.04668408033024931,
|
126 |
-
"acc_norm": 0.6296296296296297,
|
127 |
-
"acc_norm_stderr": 0.04668408033024931
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3891625615763547,
|
131 |
-
"acc_stderr": 0.034304624161038716,
|
132 |
-
"acc_norm": 0.3891625615763547,
|
133 |
-
"acc_norm_stderr": 0.034304624161038716
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5258064516129032,
|
137 |
-
"acc_stderr": 0.028406095057653326,
|
138 |
-
"acc_norm": 0.5258064516129032,
|
139 |
-
"acc_norm_stderr": 0.028406095057653326
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.782051282051282,
|
143 |
-
"acc_stderr": 0.027046857630716667,
|
144 |
-
"acc_norm": 0.782051282051282,
|
145 |
-
"acc_norm_stderr": 0.027046857630716667
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4830188679245283,
|
149 |
-
"acc_stderr": 0.030755120364119898,
|
150 |
-
"acc_norm": 0.4830188679245283,
|
151 |
-
"acc_norm_stderr": 0.030755120364119898
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5363636363636364,
|
155 |
-
"acc_stderr": 0.04776449162396197,
|
156 |
-
"acc_norm": 0.5363636363636364,
|
157 |
-
"acc_norm_stderr": 0.04776449162396197
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.36666666666666664,
|
161 |
-
"acc_stderr": 0.029381620726465076,
|
162 |
-
"acc_norm": 0.36666666666666664,
|
163 |
-
"acc_norm_stderr": 0.029381620726465076
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33774834437086093,
|
167 |
-
"acc_stderr": 0.038615575462551684,
|
168 |
-
"acc_norm": 0.33774834437086093,
|
169 |
-
"acc_norm_stderr": 0.038615575462551684
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6467661691542289,
|
173 |
-
"acc_stderr": 0.03379790611796777,
|
174 |
-
"acc_norm": 0.6467661691542289,
|
175 |
-
"acc_norm_stderr": 0.03379790611796777
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.44508670520231214,
|
179 |
-
"acc_stderr": 0.03789401760283648,
|
180 |
-
"acc_norm": 0.44508670520231214,
|
181 |
-
"acc_norm_stderr": 0.03789401760283648
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3492063492063492,
|
185 |
-
"acc_stderr": 0.024552292209342658,
|
186 |
-
"acc_norm": 0.3492063492063492,
|
187 |
-
"acc_norm_stderr": 0.024552292209342658
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4930555555555556,
|
191 |
-
"acc_stderr": 0.04180806750294938,
|
192 |
-
"acc_norm": 0.4930555555555556,
|
193 |
-
"acc_norm_stderr": 0.04180806750294938
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.43,
|
197 |
-
"acc_stderr": 0.049756985195624284,
|
198 |
-
"acc_norm": 0.43,
|
199 |
-
"acc_norm_stderr": 0.049756985195624284
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.7,
|
203 |
-
"acc_stderr": 0.046056618647183814,
|
204 |
-
"acc_norm": 0.7,
|
205 |
-
"acc_norm_stderr": 0.046056618647183814
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.523121387283237,
|
209 |
-
"acc_stderr": 0.026890297881303118,
|
210 |
-
"acc_norm": 0.523121387283237,
|
211 |
-
"acc_norm_stderr": 0.026890297881303118
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.49079754601226994,
|
215 |
-
"acc_stderr": 0.039277056007874414,
|
216 |
-
"acc_norm": 0.49079754601226994,
|
217 |
-
"acc_norm_stderr": 0.039277056007874414
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.49074074074074076,
|
221 |
-
"acc_stderr": 0.027815973433878014,
|
222 |
-
"acc_norm": 0.49074074074074076,
|
223 |
-
"acc_norm_stderr": 0.027815973433878014
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.36,
|
227 |
-
"acc_stderr": 0.04824181513244218,
|
228 |
-
"acc_norm": 0.36,
|
229 |
-
"acc_norm_stderr": 0.04824181513244218
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5906735751295337,
|
233 |
-
"acc_stderr": 0.03548608168860806,
|
234 |
-
"acc_norm": 0.5906735751295337,
|
235 |
-
"acc_norm_stderr": 0.03548608168860806
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3684210526315789,
|
239 |
-
"acc_stderr": 0.04537815354939391,
|
240 |
-
"acc_norm": 0.3684210526315789,
|
241 |
-
"acc_norm_stderr": 0.04537815354939391
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5743119266055046,
|
245 |
-
"acc_stderr": 0.021199235972470802,
|
246 |
-
"acc_norm": 0.5743119266055046,
|
247 |
-
"acc_norm_stderr": 0.021199235972470802
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4126984126984127,
|
251 |
-
"acc_stderr": 0.04403438954768177,
|
252 |
-
"acc_norm": 0.4126984126984127,
|
253 |
-
"acc_norm_stderr": 0.04403438954768177
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5294117647058824,
|
257 |
-
"acc_stderr": 0.028580341065138296,
|
258 |
-
"acc_norm": 0.5294117647058824,
|
259 |
-
"acc_norm_stderr": 0.028580341065138296
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.54,
|
263 |
-
"acc_stderr": 0.05009082659620332,
|
264 |
-
"acc_norm": 0.54,
|
265 |
-
"acc_norm_stderr": 0.05009082659620332
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6859504132231405,
|
269 |
-
"acc_stderr": 0.042369647530410184,
|
270 |
-
"acc_norm": 0.6859504132231405,
|
271 |
-
"acc_norm_stderr": 0.042369647530410184
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5394736842105263,
|
275 |
-
"acc_stderr": 0.04056242252249036,
|
276 |
-
"acc_norm": 0.5394736842105263,
|
277 |
-
"acc_norm_stderr": 0.04056242252249036
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.43790849673202614,
|
281 |
-
"acc_stderr": 0.020071257886886525,
|
282 |
-
"acc_norm": 0.43790849673202614,
|
283 |
-
"acc_norm_stderr": 0.020071257886886525
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3191489361702128,
|
287 |
-
"acc_stderr": 0.027807990141320196,
|
288 |
-
"acc_norm": 0.3191489361702128,
|
289 |
-
"acc_norm_stderr": 0.027807990141320196
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.5267857142857143,
|
293 |
-
"acc_stderr": 0.047389751192741546,
|
294 |
-
"acc_norm": 0.5267857142857143,
|
295 |
-
"acc_norm_stderr": 0.047389751192741546
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5138888888888888,
|
299 |
-
"acc_stderr": 0.03408655867977749,
|
300 |
-
"acc_norm": 0.5138888888888888,
|
301 |
-
"acc_norm_stderr": 0.03408655867977749
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.32513966480446926,
|
305 |
-
"acc_stderr": 0.015666542785053562,
|
306 |
-
"acc_norm": 0.32513966480446926,
|
307 |
-
"acc_norm_stderr": 0.015666542785053562
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.41,
|
311 |
-
"acc_stderr": 0.049431107042371025,
|
312 |
-
"acc_norm": 0.41,
|
313 |
-
"acc_norm_stderr": 0.049431107042371025
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.65,
|
317 |
-
"acc_stderr": 0.0479372485441102,
|
318 |
-
"acc_norm": 0.65,
|
319 |
-
"acc_norm_stderr": 0.0479372485441102
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.39705882352941174,
|
323 |
-
"acc_stderr": 0.029722152099280058,
|
324 |
-
"acc_norm": 0.39705882352941174,
|
325 |
-
"acc_norm_stderr": 0.029722152099280058
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5918367346938775,
|
329 |
-
"acc_stderr": 0.03146465712827424,
|
330 |
-
"acc_norm": 0.5918367346938775,
|
331 |
-
"acc_norm_stderr": 0.03146465712827424
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6497890295358649,
|
335 |
-
"acc_stderr": 0.03105239193758435,
|
336 |
-
"acc_norm": 0.6497890295358649,
|
337 |
-
"acc_norm_stderr": 0.03105239193758435
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.35984354628422427,
|
341 |
-
"acc_stderr": 0.012258260483689803,
|
342 |
-
"acc_norm": 0.35984354628422427,
|
343 |
-
"acc_norm_stderr": 0.012258260483689803
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5637254901960784,
|
347 |
-
"acc_stderr": 0.03480693138457039,
|
348 |
-
"acc_norm": 0.5637254901960784,
|
349 |
-
"acc_norm_stderr": 0.03480693138457039
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5878787878787879,
|
353 |
-
"acc_stderr": 0.038435669935887186,
|
354 |
-
"acc_norm": 0.5878787878787879,
|
355 |
-
"acc_norm_stderr": 0.038435669935887186
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.26560587515299877,
|
359 |
-
"mc1_stderr": 0.0154610276272536,
|
360 |
-
"mc2": 0.436366604752637,
|
361 |
-
"mc2_stderr": 0.01527303310492667
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4628099173553719,
|
365 |
-
"acc_stderr": 0.0171427361176433,
|
366 |
-
"acc_norm": 0.58913813459268,
|
367 |
-
"acc_norm_stderr": 0.01691497276784106
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SJ-Donald/llama3-passthrough",
|
436 |
-
"model_sha": "ecd852966d5bc748196022aa1991d315844aa57e",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:16:04.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3464163822525597,
|
5 |
-
"acc_stderr": 0.013905011180063254,
|
6 |
-
"acc_norm": 0.38054607508532423,
|
7 |
-
"acc_norm_stderr": 0.014188277712349828
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.350726946823342,
|
11 |
-
"acc_stderr": 0.004762223492435252,
|
12 |
-
"acc_norm": 0.45140410276837284,
|
13 |
-
"acc_norm_stderr": 0.004966158142645415
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.49122807017543857,
|
17 |
-
"acc_stderr": 0.038342347441649924,
|
18 |
-
"acc_norm": 0.49122807017543857,
|
19 |
-
"acc_norm_stderr": 0.038342347441649924
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.4368932038834951,
|
23 |
-
"acc_stderr": 0.04911147107365777,
|
24 |
-
"acc_norm": 0.4368932038834951,
|
25 |
-
"acc_norm_stderr": 0.04911147107365777
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4278416347381865,
|
29 |
-
"acc_stderr": 0.01769278792780373,
|
30 |
-
"acc_norm": 0.4278416347381865,
|
31 |
-
"acc_norm_stderr": 0.01769278792780373
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4074074074074074,
|
35 |
-
"acc_stderr": 0.04244633238353229,
|
36 |
-
"acc_norm": 0.4074074074074074,
|
37 |
-
"acc_norm_stderr": 0.04244633238353229
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.32,
|
41 |
-
"acc_stderr": 0.04688261722621503,
|
42 |
-
"acc_norm": 0.32,
|
43 |
-
"acc_norm_stderr": 0.04688261722621503
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4,
|
47 |
-
"acc_stderr": 0.03202563076101735,
|
48 |
-
"acc_norm": 0.4,
|
49 |
-
"acc_norm_stderr": 0.03202563076101735
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.35542168674698793,
|
53 |
-
"acc_stderr": 0.03726214354322415,
|
54 |
-
"acc_norm": 0.35542168674698793,
|
55 |
-
"acc_norm_stderr": 0.03726214354322415
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.40836012861736337,
|
59 |
-
"acc_stderr": 0.027917050748484634,
|
60 |
-
"acc_norm": 0.40836012861736337,
|
61 |
-
"acc_norm_stderr": 0.027917050748484634
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.37668161434977576,
|
65 |
-
"acc_stderr": 0.03252113489929189,
|
66 |
-
"acc_norm": 0.37668161434977576,
|
67 |
-
"acc_norm_stderr": 0.03252113489929189
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4198473282442748,
|
71 |
-
"acc_stderr": 0.04328577215262971,
|
72 |
-
"acc_norm": 0.4198473282442748,
|
73 |
-
"acc_norm_stderr": 0.04328577215262971
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.47,
|
77 |
-
"acc_stderr": 0.05016135580465919,
|
78 |
-
"acc_norm": 0.47,
|
79 |
-
"acc_norm_stderr": 0.05016135580465919
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3787878787878788,
|
83 |
-
"acc_stderr": 0.03456088731993747,
|
84 |
-
"acc_norm": 0.3787878787878788,
|
85 |
-
"acc_norm_stderr": 0.03456088731993747
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.46206896551724136,
|
89 |
-
"acc_stderr": 0.041546596717075474,
|
90 |
-
"acc_norm": 0.46206896551724136,
|
91 |
-
"acc_norm_stderr": 0.041546596717075474
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171452,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171452
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.36134453781512604,
|
101 |
-
"acc_stderr": 0.031204691225150013,
|
102 |
-
"acc_norm": 0.36134453781512604,
|
103 |
-
"acc_norm_stderr": 0.031204691225150013
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.33589743589743587,
|
107 |
-
"acc_stderr": 0.023946724741563962,
|
108 |
-
"acc_norm": 0.33589743589743587,
|
109 |
-
"acc_norm_stderr": 0.023946724741563962
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.49,
|
113 |
-
"acc_stderr": 0.05024183937956911,
|
114 |
-
"acc_norm": 0.49,
|
115 |
-
"acc_norm_stderr": 0.05024183937956911
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.0446196043338474,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.0446196043338474
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4444444444444444,
|
125 |
-
"acc_stderr": 0.04803752235190192,
|
126 |
-
"acc_norm": 0.4444444444444444,
|
127 |
-
"acc_norm_stderr": 0.04803752235190192
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.270935960591133,
|
131 |
-
"acc_stderr": 0.03127090713297698,
|
132 |
-
"acc_norm": 0.270935960591133,
|
133 |
-
"acc_norm_stderr": 0.03127090713297698
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.44193548387096776,
|
137 |
-
"acc_stderr": 0.028251557906849734,
|
138 |
-
"acc_norm": 0.44193548387096776,
|
139 |
-
"acc_norm_stderr": 0.028251557906849734
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5854700854700855,
|
143 |
-
"acc_stderr": 0.03227396567623779,
|
144 |
-
"acc_norm": 0.5854700854700855,
|
145 |
-
"acc_norm_stderr": 0.03227396567623779
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.42641509433962266,
|
149 |
-
"acc_stderr": 0.030437794342983045,
|
150 |
-
"acc_norm": 0.42641509433962266,
|
151 |
-
"acc_norm_stderr": 0.030437794342983045
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.43636363636363634,
|
155 |
-
"acc_stderr": 0.04750185058907297,
|
156 |
-
"acc_norm": 0.43636363636363634,
|
157 |
-
"acc_norm_stderr": 0.04750185058907297
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2962962962962963,
|
161 |
-
"acc_stderr": 0.027840811495871916,
|
162 |
-
"acc_norm": 0.2962962962962963,
|
163 |
-
"acc_norm_stderr": 0.027840811495871916
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4577114427860697,
|
173 |
-
"acc_stderr": 0.035228658640995975,
|
174 |
-
"acc_norm": 0.4577114427860697,
|
175 |
-
"acc_norm_stderr": 0.035228658640995975
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2947976878612717,
|
179 |
-
"acc_stderr": 0.034765996075164785,
|
180 |
-
"acc_norm": 0.2947976878612717,
|
181 |
-
"acc_norm_stderr": 0.034765996075164785
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2724867724867725,
|
185 |
-
"acc_stderr": 0.022930973071633345,
|
186 |
-
"acc_norm": 0.2724867724867725,
|
187 |
-
"acc_norm_stderr": 0.022930973071633345
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3680555555555556,
|
191 |
-
"acc_stderr": 0.04032999053960718,
|
192 |
-
"acc_norm": 0.3680555555555556,
|
193 |
-
"acc_norm_stderr": 0.04032999053960718
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.29,
|
197 |
-
"acc_stderr": 0.045604802157206845,
|
198 |
-
"acc_norm": 0.29,
|
199 |
-
"acc_norm_stderr": 0.045604802157206845
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.46,
|
203 |
-
"acc_stderr": 0.05009082659620332,
|
204 |
-
"acc_norm": 0.46,
|
205 |
-
"acc_norm_stderr": 0.05009082659620332
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3670520231213873,
|
209 |
-
"acc_stderr": 0.02595005433765408,
|
210 |
-
"acc_norm": 0.3670520231213873,
|
211 |
-
"acc_norm_stderr": 0.02595005433765408
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3312883435582822,
|
215 |
-
"acc_stderr": 0.03697983910025588,
|
216 |
-
"acc_norm": 0.3312883435582822,
|
217 |
-
"acc_norm_stderr": 0.03697983910025588
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.35802469135802467,
|
221 |
-
"acc_stderr": 0.026675611926037086,
|
222 |
-
"acc_norm": 0.35802469135802467,
|
223 |
-
"acc_norm_stderr": 0.026675611926037086
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695235,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695235
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.35751295336787564,
|
233 |
-
"acc_stderr": 0.034588160421810066,
|
234 |
-
"acc_norm": 0.35751295336787564,
|
235 |
-
"acc_norm_stderr": 0.034588160421810066
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2543859649122807,
|
239 |
-
"acc_stderr": 0.040969851398436695,
|
240 |
-
"acc_norm": 0.2543859649122807,
|
241 |
-
"acc_norm_stderr": 0.040969851398436695
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3394495412844037,
|
245 |
-
"acc_stderr": 0.02030210934266235,
|
246 |
-
"acc_norm": 0.3394495412844037,
|
247 |
-
"acc_norm_stderr": 0.02030210934266235
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23015873015873015,
|
251 |
-
"acc_stderr": 0.03764950879790607,
|
252 |
-
"acc_norm": 0.23015873015873015,
|
253 |
-
"acc_norm_stderr": 0.03764950879790607
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.39215686274509803,
|
257 |
-
"acc_stderr": 0.02795604616542451,
|
258 |
-
"acc_norm": 0.39215686274509803,
|
259 |
-
"acc_norm_stderr": 0.02795604616542451
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.47,
|
263 |
-
"acc_stderr": 0.050161355804659205,
|
264 |
-
"acc_norm": 0.47,
|
265 |
-
"acc_norm_stderr": 0.050161355804659205
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.48760330578512395,
|
269 |
-
"acc_stderr": 0.04562951548180765,
|
270 |
-
"acc_norm": 0.48760330578512395,
|
271 |
-
"acc_norm_stderr": 0.04562951548180765
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.23026315789473684,
|
275 |
-
"acc_stderr": 0.03426059424403164,
|
276 |
-
"acc_norm": 0.23026315789473684,
|
277 |
-
"acc_norm_stderr": 0.03426059424403164
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3382352941176471,
|
281 |
-
"acc_stderr": 0.019139943748487025,
|
282 |
-
"acc_norm": 0.3382352941176471,
|
283 |
-
"acc_norm_stderr": 0.019139943748487025
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3120567375886525,
|
287 |
-
"acc_stderr": 0.027640120545169917,
|
288 |
-
"acc_norm": 0.3120567375886525,
|
289 |
-
"acc_norm_stderr": 0.027640120545169917
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.24074074074074073,
|
299 |
-
"acc_stderr": 0.029157522184605607,
|
300 |
-
"acc_norm": 0.24074074074074073,
|
301 |
-
"acc_norm_stderr": 0.029157522184605607
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.35,
|
311 |
-
"acc_stderr": 0.0479372485441102,
|
312 |
-
"acc_norm": 0.35,
|
313 |
-
"acc_norm_stderr": 0.0479372485441102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.38,
|
317 |
-
"acc_stderr": 0.04878317312145634,
|
318 |
-
"acc_norm": 0.38,
|
319 |
-
"acc_norm_stderr": 0.04878317312145634
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3014705882352941,
|
323 |
-
"acc_stderr": 0.027875982114273168,
|
324 |
-
"acc_norm": 0.3014705882352941,
|
325 |
-
"acc_norm_stderr": 0.027875982114273168
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.22040816326530613,
|
329 |
-
"acc_stderr": 0.02653704531214529,
|
330 |
-
"acc_norm": 0.22040816326530613,
|
331 |
-
"acc_norm_stderr": 0.02653704531214529
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4641350210970464,
|
335 |
-
"acc_stderr": 0.03246338898055659,
|
336 |
-
"acc_norm": 0.4641350210970464,
|
337 |
-
"acc_norm_stderr": 0.03246338898055659
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27509778357235987,
|
341 |
-
"acc_stderr": 0.011405443620996924,
|
342 |
-
"acc_norm": 0.27509778357235987,
|
343 |
-
"acc_norm_stderr": 0.011405443620996924
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3088235294117647,
|
347 |
-
"acc_stderr": 0.03242661719827218,
|
348 |
-
"acc_norm": 0.3088235294117647,
|
349 |
-
"acc_norm_stderr": 0.03242661719827218
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.34545454545454546,
|
353 |
-
"acc_stderr": 0.03713158067481912,
|
354 |
-
"acc_norm": 0.34545454545454546,
|
355 |
-
"acc_norm_stderr": 0.03713158067481912
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.29253365973072215,
|
359 |
-
"mc1_stderr": 0.015925597445286165,
|
360 |
-
"mc2": 0.468616885925621,
|
361 |
-
"mc2_stderr": 0.015261121105350028
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.36481700118063753,
|
365 |
-
"acc_stderr": 0.01655014433704659,
|
366 |
-
"acc_norm": 0.43211334120425027,
|
367 |
-
"acc_norm_stderr": 0.017031170198851753
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SKYEEEE/llama3-Ko-3-8B-finetuned_ver2",
|
436 |
-
"model_sha": "338d770ca78f9aeef89675578142349c39d7c195",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:35:13.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3464163822525597,
|
5 |
-
"acc_stderr": 0.013905011180063254,
|
6 |
-
"acc_norm": 0.38054607508532423,
|
7 |
-
"acc_norm_stderr": 0.014188277712349828
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.350726946823342,
|
11 |
-
"acc_stderr": 0.004762223492435252,
|
12 |
-
"acc_norm": 0.45140410276837284,
|
13 |
-
"acc_norm_stderr": 0.004966158142645415
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.49122807017543857,
|
17 |
-
"acc_stderr": 0.038342347441649924,
|
18 |
-
"acc_norm": 0.49122807017543857,
|
19 |
-
"acc_norm_stderr": 0.038342347441649924
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.4368932038834951,
|
23 |
-
"acc_stderr": 0.04911147107365777,
|
24 |
-
"acc_norm": 0.4368932038834951,
|
25 |
-
"acc_norm_stderr": 0.04911147107365777
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4278416347381865,
|
29 |
-
"acc_stderr": 0.01769278792780373,
|
30 |
-
"acc_norm": 0.4278416347381865,
|
31 |
-
"acc_norm_stderr": 0.01769278792780373
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4074074074074074,
|
35 |
-
"acc_stderr": 0.04244633238353229,
|
36 |
-
"acc_norm": 0.4074074074074074,
|
37 |
-
"acc_norm_stderr": 0.04244633238353229
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.32,
|
41 |
-
"acc_stderr": 0.04688261722621503,
|
42 |
-
"acc_norm": 0.32,
|
43 |
-
"acc_norm_stderr": 0.04688261722621503
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4,
|
47 |
-
"acc_stderr": 0.03202563076101735,
|
48 |
-
"acc_norm": 0.4,
|
49 |
-
"acc_norm_stderr": 0.03202563076101735
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.35542168674698793,
|
53 |
-
"acc_stderr": 0.03726214354322415,
|
54 |
-
"acc_norm": 0.35542168674698793,
|
55 |
-
"acc_norm_stderr": 0.03726214354322415
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.40836012861736337,
|
59 |
-
"acc_stderr": 0.027917050748484634,
|
60 |
-
"acc_norm": 0.40836012861736337,
|
61 |
-
"acc_norm_stderr": 0.027917050748484634
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.37668161434977576,
|
65 |
-
"acc_stderr": 0.03252113489929189,
|
66 |
-
"acc_norm": 0.37668161434977576,
|
67 |
-
"acc_norm_stderr": 0.03252113489929189
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4198473282442748,
|
71 |
-
"acc_stderr": 0.04328577215262971,
|
72 |
-
"acc_norm": 0.4198473282442748,
|
73 |
-
"acc_norm_stderr": 0.04328577215262971
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.47,
|
77 |
-
"acc_stderr": 0.05016135580465919,
|
78 |
-
"acc_norm": 0.47,
|
79 |
-
"acc_norm_stderr": 0.05016135580465919
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3787878787878788,
|
83 |
-
"acc_stderr": 0.03456088731993747,
|
84 |
-
"acc_norm": 0.3787878787878788,
|
85 |
-
"acc_norm_stderr": 0.03456088731993747
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.46206896551724136,
|
89 |
-
"acc_stderr": 0.041546596717075474,
|
90 |
-
"acc_norm": 0.46206896551724136,
|
91 |
-
"acc_norm_stderr": 0.041546596717075474
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171452,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171452
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.36134453781512604,
|
101 |
-
"acc_stderr": 0.031204691225150013,
|
102 |
-
"acc_norm": 0.36134453781512604,
|
103 |
-
"acc_norm_stderr": 0.031204691225150013
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.33589743589743587,
|
107 |
-
"acc_stderr": 0.023946724741563962,
|
108 |
-
"acc_norm": 0.33589743589743587,
|
109 |
-
"acc_norm_stderr": 0.023946724741563962
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.49,
|
113 |
-
"acc_stderr": 0.05024183937956911,
|
114 |
-
"acc_norm": 0.49,
|
115 |
-
"acc_norm_stderr": 0.05024183937956911
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.0446196043338474,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.0446196043338474
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4444444444444444,
|
125 |
-
"acc_stderr": 0.04803752235190192,
|
126 |
-
"acc_norm": 0.4444444444444444,
|
127 |
-
"acc_norm_stderr": 0.04803752235190192
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.270935960591133,
|
131 |
-
"acc_stderr": 0.03127090713297698,
|
132 |
-
"acc_norm": 0.270935960591133,
|
133 |
-
"acc_norm_stderr": 0.03127090713297698
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.44193548387096776,
|
137 |
-
"acc_stderr": 0.028251557906849734,
|
138 |
-
"acc_norm": 0.44193548387096776,
|
139 |
-
"acc_norm_stderr": 0.028251557906849734
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5854700854700855,
|
143 |
-
"acc_stderr": 0.03227396567623779,
|
144 |
-
"acc_norm": 0.5854700854700855,
|
145 |
-
"acc_norm_stderr": 0.03227396567623779
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.42641509433962266,
|
149 |
-
"acc_stderr": 0.030437794342983045,
|
150 |
-
"acc_norm": 0.42641509433962266,
|
151 |
-
"acc_norm_stderr": 0.030437794342983045
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.43636363636363634,
|
155 |
-
"acc_stderr": 0.04750185058907297,
|
156 |
-
"acc_norm": 0.43636363636363634,
|
157 |
-
"acc_norm_stderr": 0.04750185058907297
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2962962962962963,
|
161 |
-
"acc_stderr": 0.027840811495871916,
|
162 |
-
"acc_norm": 0.2962962962962963,
|
163 |
-
"acc_norm_stderr": 0.027840811495871916
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4577114427860697,
|
173 |
-
"acc_stderr": 0.035228658640995975,
|
174 |
-
"acc_norm": 0.4577114427860697,
|
175 |
-
"acc_norm_stderr": 0.035228658640995975
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2947976878612717,
|
179 |
-
"acc_stderr": 0.034765996075164785,
|
180 |
-
"acc_norm": 0.2947976878612717,
|
181 |
-
"acc_norm_stderr": 0.034765996075164785
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2724867724867725,
|
185 |
-
"acc_stderr": 0.022930973071633345,
|
186 |
-
"acc_norm": 0.2724867724867725,
|
187 |
-
"acc_norm_stderr": 0.022930973071633345
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3680555555555556,
|
191 |
-
"acc_stderr": 0.04032999053960718,
|
192 |
-
"acc_norm": 0.3680555555555556,
|
193 |
-
"acc_norm_stderr": 0.04032999053960718
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.29,
|
197 |
-
"acc_stderr": 0.045604802157206845,
|
198 |
-
"acc_norm": 0.29,
|
199 |
-
"acc_norm_stderr": 0.045604802157206845
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.46,
|
203 |
-
"acc_stderr": 0.05009082659620332,
|
204 |
-
"acc_norm": 0.46,
|
205 |
-
"acc_norm_stderr": 0.05009082659620332
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3670520231213873,
|
209 |
-
"acc_stderr": 0.02595005433765408,
|
210 |
-
"acc_norm": 0.3670520231213873,
|
211 |
-
"acc_norm_stderr": 0.02595005433765408
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3312883435582822,
|
215 |
-
"acc_stderr": 0.03697983910025588,
|
216 |
-
"acc_norm": 0.3312883435582822,
|
217 |
-
"acc_norm_stderr": 0.03697983910025588
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.35802469135802467,
|
221 |
-
"acc_stderr": 0.026675611926037086,
|
222 |
-
"acc_norm": 0.35802469135802467,
|
223 |
-
"acc_norm_stderr": 0.026675611926037086
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695235,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695235
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.35751295336787564,
|
233 |
-
"acc_stderr": 0.034588160421810066,
|
234 |
-
"acc_norm": 0.35751295336787564,
|
235 |
-
"acc_norm_stderr": 0.034588160421810066
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2543859649122807,
|
239 |
-
"acc_stderr": 0.040969851398436695,
|
240 |
-
"acc_norm": 0.2543859649122807,
|
241 |
-
"acc_norm_stderr": 0.040969851398436695
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3394495412844037,
|
245 |
-
"acc_stderr": 0.02030210934266235,
|
246 |
-
"acc_norm": 0.3394495412844037,
|
247 |
-
"acc_norm_stderr": 0.02030210934266235
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23015873015873015,
|
251 |
-
"acc_stderr": 0.03764950879790607,
|
252 |
-
"acc_norm": 0.23015873015873015,
|
253 |
-
"acc_norm_stderr": 0.03764950879790607
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.39215686274509803,
|
257 |
-
"acc_stderr": 0.02795604616542451,
|
258 |
-
"acc_norm": 0.39215686274509803,
|
259 |
-
"acc_norm_stderr": 0.02795604616542451
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.47,
|
263 |
-
"acc_stderr": 0.050161355804659205,
|
264 |
-
"acc_norm": 0.47,
|
265 |
-
"acc_norm_stderr": 0.050161355804659205
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.48760330578512395,
|
269 |
-
"acc_stderr": 0.04562951548180765,
|
270 |
-
"acc_norm": 0.48760330578512395,
|
271 |
-
"acc_norm_stderr": 0.04562951548180765
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.23026315789473684,
|
275 |
-
"acc_stderr": 0.03426059424403164,
|
276 |
-
"acc_norm": 0.23026315789473684,
|
277 |
-
"acc_norm_stderr": 0.03426059424403164
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3382352941176471,
|
281 |
-
"acc_stderr": 0.019139943748487025,
|
282 |
-
"acc_norm": 0.3382352941176471,
|
283 |
-
"acc_norm_stderr": 0.019139943748487025
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3120567375886525,
|
287 |
-
"acc_stderr": 0.027640120545169917,
|
288 |
-
"acc_norm": 0.3120567375886525,
|
289 |
-
"acc_norm_stderr": 0.027640120545169917
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.24074074074074073,
|
299 |
-
"acc_stderr": 0.029157522184605607,
|
300 |
-
"acc_norm": 0.24074074074074073,
|
301 |
-
"acc_norm_stderr": 0.029157522184605607
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.35,
|
311 |
-
"acc_stderr": 0.0479372485441102,
|
312 |
-
"acc_norm": 0.35,
|
313 |
-
"acc_norm_stderr": 0.0479372485441102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.38,
|
317 |
-
"acc_stderr": 0.04878317312145634,
|
318 |
-
"acc_norm": 0.38,
|
319 |
-
"acc_norm_stderr": 0.04878317312145634
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3014705882352941,
|
323 |
-
"acc_stderr": 0.027875982114273168,
|
324 |
-
"acc_norm": 0.3014705882352941,
|
325 |
-
"acc_norm_stderr": 0.027875982114273168
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.22040816326530613,
|
329 |
-
"acc_stderr": 0.02653704531214529,
|
330 |
-
"acc_norm": 0.22040816326530613,
|
331 |
-
"acc_norm_stderr": 0.02653704531214529
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4641350210970464,
|
335 |
-
"acc_stderr": 0.03246338898055659,
|
336 |
-
"acc_norm": 0.4641350210970464,
|
337 |
-
"acc_norm_stderr": 0.03246338898055659
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27509778357235987,
|
341 |
-
"acc_stderr": 0.011405443620996924,
|
342 |
-
"acc_norm": 0.27509778357235987,
|
343 |
-
"acc_norm_stderr": 0.011405443620996924
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3088235294117647,
|
347 |
-
"acc_stderr": 0.03242661719827218,
|
348 |
-
"acc_norm": 0.3088235294117647,
|
349 |
-
"acc_norm_stderr": 0.03242661719827218
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.34545454545454546,
|
353 |
-
"acc_stderr": 0.03713158067481912,
|
354 |
-
"acc_norm": 0.34545454545454546,
|
355 |
-
"acc_norm_stderr": 0.03713158067481912
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.29253365973072215,
|
359 |
-
"mc1_stderr": 0.015925597445286165,
|
360 |
-
"mc2": 0.468616885925621,
|
361 |
-
"mc2_stderr": 0.015261121105350028
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.36481700118063753,
|
365 |
-
"acc_stderr": 0.01655014433704659,
|
366 |
-
"acc_norm": 0.43211334120425027,
|
367 |
-
"acc_norm_stderr": 0.017031170198851753
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SKYEEEE/llama3-Ko-3-8B-finetuned_ver2",
|
436 |
-
"model_sha": "338d770ca78f9aeef89675578142349c39d7c195",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SakanaAI/DiscoPOP-zephyr-7b-gemma/result_2024-06-17 05:07:55.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3856655290102389,
|
5 |
-
"acc_stderr": 0.01422425097325718,
|
6 |
-
"acc_norm": 0.4129692832764505,
|
7 |
-
"acc_norm_stderr": 0.014388344935398326
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3800039832702649,
|
11 |
-
"acc_stderr": 0.0048439543384514415,
|
12 |
-
"acc_norm": 0.49083847839075884,
|
13 |
-
"acc_norm_stderr": 0.0049889437217112125
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.543859649122807,
|
17 |
-
"acc_stderr": 0.03820042586602966,
|
18 |
-
"acc_norm": 0.543859649122807,
|
19 |
-
"acc_norm_stderr": 0.03820042586602966
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6310679611650486,
|
23 |
-
"acc_stderr": 0.0477761518115674,
|
24 |
-
"acc_norm": 0.6310679611650486,
|
25 |
-
"acc_norm_stderr": 0.0477761518115674
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.48659003831417624,
|
29 |
-
"acc_stderr": 0.01787353173651038,
|
30 |
-
"acc_norm": 0.48659003831417624,
|
31 |
-
"acc_norm_stderr": 0.01787353173651038
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34814814814814815,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.34814814814814815,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.26,
|
41 |
-
"acc_stderr": 0.0440844002276808,
|
42 |
-
"acc_norm": 0.26,
|
43 |
-
"acc_norm_stderr": 0.0440844002276808
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.48936170212765956,
|
47 |
-
"acc_stderr": 0.03267862331014063,
|
48 |
-
"acc_norm": 0.48936170212765956,
|
49 |
-
"acc_norm_stderr": 0.03267862331014063
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.42771084337349397,
|
53 |
-
"acc_stderr": 0.03851597683718533,
|
54 |
-
"acc_norm": 0.42771084337349397,
|
55 |
-
"acc_norm_stderr": 0.03851597683718533
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.43729903536977494,
|
59 |
-
"acc_stderr": 0.02817391776176288,
|
60 |
-
"acc_norm": 0.43729903536977494,
|
61 |
-
"acc_norm_stderr": 0.02817391776176288
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.49327354260089684,
|
65 |
-
"acc_stderr": 0.033554765962343545,
|
66 |
-
"acc_norm": 0.49327354260089684,
|
67 |
-
"acc_norm_stderr": 0.033554765962343545
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.45038167938931295,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.45038167938931295,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.45,
|
77 |
-
"acc_stderr": 0.04999999999999998,
|
78 |
-
"acc_norm": 0.45,
|
79 |
-
"acc_norm_stderr": 0.04999999999999998
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5404040404040404,
|
83 |
-
"acc_stderr": 0.035507024651313425,
|
84 |
-
"acc_norm": 0.5404040404040404,
|
85 |
-
"acc_norm_stderr": 0.035507024651313425
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5379310344827586,
|
89 |
-
"acc_stderr": 0.04154659671707548,
|
90 |
-
"acc_norm": 0.5379310344827586,
|
91 |
-
"acc_norm_stderr": 0.04154659671707548
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.27450980392156865,
|
95 |
-
"acc_stderr": 0.044405219061793275,
|
96 |
-
"acc_norm": 0.27450980392156865,
|
97 |
-
"acc_norm_stderr": 0.044405219061793275
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.47478991596638653,
|
101 |
-
"acc_stderr": 0.03243718055137411,
|
102 |
-
"acc_norm": 0.47478991596638653,
|
103 |
-
"acc_norm_stderr": 0.03243718055137411
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4641025641025641,
|
107 |
-
"acc_stderr": 0.02528558599001784,
|
108 |
-
"acc_norm": 0.4641025641025641,
|
109 |
-
"acc_norm_stderr": 0.02528558599001784
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.52,
|
113 |
-
"acc_stderr": 0.050211673156867795,
|
114 |
-
"acc_norm": 0.52,
|
115 |
-
"acc_norm_stderr": 0.050211673156867795
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.49074074074074076,
|
125 |
-
"acc_stderr": 0.04832853553437055,
|
126 |
-
"acc_norm": 0.49074074074074076,
|
127 |
-
"acc_norm_stderr": 0.04832853553437055
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4187192118226601,
|
131 |
-
"acc_stderr": 0.03471192860518468,
|
132 |
-
"acc_norm": 0.4187192118226601,
|
133 |
-
"acc_norm_stderr": 0.03471192860518468
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5032258064516129,
|
137 |
-
"acc_stderr": 0.02844341422643831,
|
138 |
-
"acc_norm": 0.5032258064516129,
|
139 |
-
"acc_norm_stderr": 0.02844341422643831
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6923076923076923,
|
143 |
-
"acc_stderr": 0.0302363899421731,
|
144 |
-
"acc_norm": 0.6923076923076923,
|
145 |
-
"acc_norm_stderr": 0.0302363899421731
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4641509433962264,
|
149 |
-
"acc_stderr": 0.030693675018458003,
|
150 |
-
"acc_norm": 0.4641509433962264,
|
151 |
-
"acc_norm_stderr": 0.030693675018458003
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5545454545454546,
|
155 |
-
"acc_stderr": 0.047605488214603246,
|
156 |
-
"acc_norm": 0.5545454545454546,
|
157 |
-
"acc_norm_stderr": 0.047605488214603246
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.362962962962963,
|
161 |
-
"acc_stderr": 0.02931820364520686,
|
162 |
-
"acc_norm": 0.362962962962963,
|
163 |
-
"acc_norm_stderr": 0.02931820364520686
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.036586032627637426,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.036586032627637426
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6169154228855721,
|
173 |
-
"acc_stderr": 0.0343751933733825,
|
174 |
-
"acc_norm": 0.6169154228855721,
|
175 |
-
"acc_norm_stderr": 0.0343751933733825
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.47398843930635837,
|
179 |
-
"acc_stderr": 0.03807301726504511,
|
180 |
-
"acc_norm": 0.47398843930635837,
|
181 |
-
"acc_norm_stderr": 0.03807301726504511
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.4126984126984127,
|
185 |
-
"acc_stderr": 0.02535574126305528,
|
186 |
-
"acc_norm": 0.4126984126984127,
|
187 |
-
"acc_norm_stderr": 0.02535574126305528
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4583333333333333,
|
191 |
-
"acc_stderr": 0.04166666666666665,
|
192 |
-
"acc_norm": 0.4583333333333333,
|
193 |
-
"acc_norm_stderr": 0.04166666666666665
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.38,
|
197 |
-
"acc_stderr": 0.048783173121456344,
|
198 |
-
"acc_norm": 0.38,
|
199 |
-
"acc_norm_stderr": 0.048783173121456344
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.6,
|
203 |
-
"acc_stderr": 0.04923659639173309,
|
204 |
-
"acc_norm": 0.6,
|
205 |
-
"acc_norm_stderr": 0.04923659639173309
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4508670520231214,
|
209 |
-
"acc_stderr": 0.02678881193156276,
|
210 |
-
"acc_norm": 0.4508670520231214,
|
211 |
-
"acc_norm_stderr": 0.02678881193156276
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.43558282208588955,
|
215 |
-
"acc_stderr": 0.03895632464138936,
|
216 |
-
"acc_norm": 0.43558282208588955,
|
217 |
-
"acc_norm_stderr": 0.03895632464138936
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4876543209876543,
|
221 |
-
"acc_stderr": 0.027812262269327242,
|
222 |
-
"acc_norm": 0.4876543209876543,
|
223 |
-
"acc_norm_stderr": 0.027812262269327242
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252606,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252606
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5233160621761658,
|
233 |
-
"acc_stderr": 0.036045136724422014,
|
234 |
-
"acc_norm": 0.5233160621761658,
|
235 |
-
"acc_norm_stderr": 0.036045136724422014
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.35964912280701755,
|
239 |
-
"acc_stderr": 0.04514496132873633,
|
240 |
-
"acc_norm": 0.35964912280701755,
|
241 |
-
"acc_norm_stderr": 0.04514496132873633
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5743119266055046,
|
245 |
-
"acc_stderr": 0.0211992359724708,
|
246 |
-
"acc_norm": 0.5743119266055046,
|
247 |
-
"acc_norm_stderr": 0.0211992359724708
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.30158730158730157,
|
251 |
-
"acc_stderr": 0.04104947269903394,
|
252 |
-
"acc_norm": 0.30158730158730157,
|
253 |
-
"acc_norm_stderr": 0.04104947269903394
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.49019607843137253,
|
257 |
-
"acc_stderr": 0.028624412550167958,
|
258 |
-
"acc_norm": 0.49019607843137253,
|
259 |
-
"acc_norm_stderr": 0.028624412550167958
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.56,
|
263 |
-
"acc_stderr": 0.04988876515698589,
|
264 |
-
"acc_norm": 0.56,
|
265 |
-
"acc_norm_stderr": 0.04988876515698589
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5867768595041323,
|
269 |
-
"acc_stderr": 0.04495087843548408,
|
270 |
-
"acc_norm": 0.5867768595041323,
|
271 |
-
"acc_norm_stderr": 0.04495087843548408
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5789473684210527,
|
275 |
-
"acc_stderr": 0.040179012759817494,
|
276 |
-
"acc_norm": 0.5789473684210527,
|
277 |
-
"acc_norm_stderr": 0.040179012759817494
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3937908496732026,
|
281 |
-
"acc_stderr": 0.01976621199107307,
|
282 |
-
"acc_norm": 0.3937908496732026,
|
283 |
-
"acc_norm_stderr": 0.01976621199107307
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2801418439716312,
|
287 |
-
"acc_stderr": 0.02678917235114025,
|
288 |
-
"acc_norm": 0.2801418439716312,
|
289 |
-
"acc_norm_stderr": 0.02678917235114025
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4375,
|
293 |
-
"acc_stderr": 0.04708567521880525,
|
294 |
-
"acc_norm": 0.4375,
|
295 |
-
"acc_norm_stderr": 0.04708567521880525
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.375,
|
299 |
-
"acc_stderr": 0.033016908987210894,
|
300 |
-
"acc_norm": 0.375,
|
301 |
-
"acc_norm_stderr": 0.033016908987210894
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2536312849162011,
|
305 |
-
"acc_stderr": 0.014551553659369918,
|
306 |
-
"acc_norm": 0.2536312849162011,
|
307 |
-
"acc_norm_stderr": 0.014551553659369918
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.42,
|
311 |
-
"acc_stderr": 0.049604496374885836,
|
312 |
-
"acc_norm": 0.42,
|
313 |
-
"acc_norm_stderr": 0.049604496374885836
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.57,
|
317 |
-
"acc_stderr": 0.04975698519562428,
|
318 |
-
"acc_norm": 0.57,
|
319 |
-
"acc_norm_stderr": 0.04975698519562428
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4007352941176471,
|
323 |
-
"acc_stderr": 0.02976826352893311,
|
324 |
-
"acc_norm": 0.4007352941176471,
|
325 |
-
"acc_norm_stderr": 0.02976826352893311
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.49795918367346936,
|
329 |
-
"acc_stderr": 0.0320089533497105,
|
330 |
-
"acc_norm": 0.49795918367346936,
|
331 |
-
"acc_norm_stderr": 0.0320089533497105
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5780590717299579,
|
335 |
-
"acc_stderr": 0.032148146302403695,
|
336 |
-
"acc_norm": 0.5780590717299579,
|
337 |
-
"acc_norm_stderr": 0.032148146302403695
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3344198174706649,
|
341 |
-
"acc_stderr": 0.012049668983214945,
|
342 |
-
"acc_norm": 0.3344198174706649,
|
343 |
-
"acc_norm_stderr": 0.012049668983214945
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5196078431372549,
|
347 |
-
"acc_stderr": 0.03506612560524866,
|
348 |
-
"acc_norm": 0.5196078431372549,
|
349 |
-
"acc_norm_stderr": 0.03506612560524866
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.47878787878787876,
|
353 |
-
"acc_stderr": 0.03900828913737301,
|
354 |
-
"acc_norm": 0.47878787878787876,
|
355 |
-
"acc_norm_stderr": 0.03900828913737301
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.28518971848225216,
|
359 |
-
"mc1_stderr": 0.015805827874454895,
|
360 |
-
"mc2": 0.46098554675551745,
|
361 |
-
"mc2_stderr": 0.01607738581359731
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4380165289256198,
|
365 |
-
"acc_stderr": 0.017057753702160283,
|
366 |
-
"acc_norm": 0.45808736717827625,
|
367 |
-
"acc_norm_stderr": 0.017129852117911147
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SakanaAI/DiscoPOP-zephyr-7b-gemma",
|
436 |
-
"model_sha": "161d63fca6218a102cbbbbd55ebdc0517eafe42d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1/result_2024-03-12 18:32:39.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3848122866894198,
|
5 |
-
"acc_stderr": 0.014218371065251102,
|
6 |
-
"acc_norm": 0.439419795221843,
|
7 |
-
"acc_norm_stderr": 0.014503747823580125
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.41495717984465247,
|
11 |
-
"acc_stderr": 0.0049170767266237935,
|
12 |
-
"acc_norm": 0.561840270862378,
|
13 |
-
"acc_norm_stderr": 0.004951470301995878
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4327485380116959,
|
17 |
-
"acc_stderr": 0.03799978644370607,
|
18 |
-
"acc_norm": 0.4327485380116959,
|
19 |
-
"acc_norm_stderr": 0.03799978644370607
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.34951456310679613,
|
23 |
-
"acc_stderr": 0.047211885060971716,
|
24 |
-
"acc_norm": 0.34951456310679613,
|
25 |
-
"acc_norm_stderr": 0.047211885060971716
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5134099616858238,
|
29 |
-
"acc_stderr": 0.017873531736510385,
|
30 |
-
"acc_norm": 0.5134099616858238,
|
31 |
-
"acc_norm_stderr": 0.017873531736510385
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.45185185185185184,
|
35 |
-
"acc_stderr": 0.04299268905480863,
|
36 |
-
"acc_norm": 0.45185185185185184,
|
37 |
-
"acc_norm_stderr": 0.04299268905480863
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.40425531914893614,
|
47 |
-
"acc_stderr": 0.032081157507886836,
|
48 |
-
"acc_norm": 0.40425531914893614,
|
49 |
-
"acc_norm_stderr": 0.032081157507886836
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.37349397590361444,
|
53 |
-
"acc_stderr": 0.03765845117168863,
|
54 |
-
"acc_norm": 0.37349397590361444,
|
55 |
-
"acc_norm_stderr": 0.03765845117168863
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4533762057877814,
|
59 |
-
"acc_stderr": 0.02827435985489425,
|
60 |
-
"acc_norm": 0.4533762057877814,
|
61 |
-
"acc_norm_stderr": 0.02827435985489425
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4798206278026906,
|
65 |
-
"acc_stderr": 0.033530461674123005,
|
66 |
-
"acc_norm": 0.4798206278026906,
|
67 |
-
"acc_norm_stderr": 0.033530461674123005
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4122137404580153,
|
71 |
-
"acc_stderr": 0.04317171194870255,
|
72 |
-
"acc_norm": 0.4122137404580153,
|
73 |
-
"acc_norm_stderr": 0.04317171194870255
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.33,
|
77 |
-
"acc_stderr": 0.04725815626252605,
|
78 |
-
"acc_norm": 0.33,
|
79 |
-
"acc_norm_stderr": 0.04725815626252605
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.43434343434343436,
|
83 |
-
"acc_stderr": 0.03531505879359183,
|
84 |
-
"acc_norm": 0.43434343434343436,
|
85 |
-
"acc_norm_stderr": 0.03531505879359183
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3793103448275862,
|
89 |
-
"acc_stderr": 0.04043461861916747,
|
90 |
-
"acc_norm": 0.3793103448275862,
|
91 |
-
"acc_norm_stderr": 0.04043461861916747
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.20588235294117646,
|
95 |
-
"acc_stderr": 0.04023382273617747,
|
96 |
-
"acc_norm": 0.20588235294117646,
|
97 |
-
"acc_norm_stderr": 0.04023382273617747
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.41596638655462187,
|
101 |
-
"acc_stderr": 0.03201650100739615,
|
102 |
-
"acc_norm": 0.41596638655462187,
|
103 |
-
"acc_norm_stderr": 0.03201650100739615
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.37435897435897436,
|
107 |
-
"acc_stderr": 0.024537591572830517,
|
108 |
-
"acc_norm": 0.37435897435897436,
|
109 |
-
"acc_norm_stderr": 0.024537591572830517
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.52,
|
113 |
-
"acc_stderr": 0.050211673156867795,
|
114 |
-
"acc_norm": 0.52,
|
115 |
-
"acc_norm_stderr": 0.050211673156867795
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.39,
|
119 |
-
"acc_stderr": 0.04902071300001975,
|
120 |
-
"acc_norm": 0.39,
|
121 |
-
"acc_norm_stderr": 0.04902071300001975
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4166666666666667,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.4166666666666667,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.35960591133004927,
|
131 |
-
"acc_stderr": 0.03376458246509567,
|
132 |
-
"acc_norm": 0.35960591133004927,
|
133 |
-
"acc_norm_stderr": 0.03376458246509567
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3903225806451613,
|
137 |
-
"acc_stderr": 0.027751256636969576,
|
138 |
-
"acc_norm": 0.3903225806451613,
|
139 |
-
"acc_norm_stderr": 0.027751256636969576
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5598290598290598,
|
143 |
-
"acc_stderr": 0.032520741720630506,
|
144 |
-
"acc_norm": 0.5598290598290598,
|
145 |
-
"acc_norm_stderr": 0.032520741720630506
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4226415094339623,
|
149 |
-
"acc_stderr": 0.03040233144576954,
|
150 |
-
"acc_norm": 0.4226415094339623,
|
151 |
-
"acc_norm_stderr": 0.03040233144576954
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4636363636363636,
|
155 |
-
"acc_stderr": 0.047764491623961985,
|
156 |
-
"acc_norm": 0.4636363636363636,
|
157 |
-
"acc_norm_stderr": 0.047764491623961985
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25555555555555554,
|
161 |
-
"acc_stderr": 0.02659393910184406,
|
162 |
-
"acc_norm": 0.25555555555555554,
|
163 |
-
"acc_norm_stderr": 0.02659393910184406
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2913907284768212,
|
167 |
-
"acc_stderr": 0.037101857261199946,
|
168 |
-
"acc_norm": 0.2913907284768212,
|
169 |
-
"acc_norm_stderr": 0.037101857261199946
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5174129353233831,
|
173 |
-
"acc_stderr": 0.03533389234739245,
|
174 |
-
"acc_norm": 0.5174129353233831,
|
175 |
-
"acc_norm_stderr": 0.03533389234739245
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3352601156069364,
|
179 |
-
"acc_stderr": 0.03599586301247078,
|
180 |
-
"acc_norm": 0.3352601156069364,
|
181 |
-
"acc_norm_stderr": 0.03599586301247078
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2830687830687831,
|
185 |
-
"acc_stderr": 0.023201392938194974,
|
186 |
-
"acc_norm": 0.2830687830687831,
|
187 |
-
"acc_norm_stderr": 0.023201392938194974
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3402777777777778,
|
191 |
-
"acc_stderr": 0.03962135573486219,
|
192 |
-
"acc_norm": 0.3402777777777778,
|
193 |
-
"acc_norm_stderr": 0.03962135573486219
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.27,
|
197 |
-
"acc_stderr": 0.0446196043338474,
|
198 |
-
"acc_norm": 0.27,
|
199 |
-
"acc_norm_stderr": 0.0446196043338474
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.58,
|
203 |
-
"acc_stderr": 0.049604496374885836,
|
204 |
-
"acc_norm": 0.58,
|
205 |
-
"acc_norm_stderr": 0.049604496374885836
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.41329479768786126,
|
209 |
-
"acc_stderr": 0.026511261369409237,
|
210 |
-
"acc_norm": 0.41329479768786126,
|
211 |
-
"acc_norm_stderr": 0.026511261369409237
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4049079754601227,
|
215 |
-
"acc_stderr": 0.03856672163548914,
|
216 |
-
"acc_norm": 0.4049079754601227,
|
217 |
-
"acc_norm_stderr": 0.03856672163548914
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.41358024691358025,
|
221 |
-
"acc_stderr": 0.027402042040269955,
|
222 |
-
"acc_norm": 0.41358024691358025,
|
223 |
-
"acc_norm_stderr": 0.027402042040269955
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.045604802157206845,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.045604802157206845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.37305699481865284,
|
233 |
-
"acc_stderr": 0.034902055920485744,
|
234 |
-
"acc_norm": 0.37305699481865284,
|
235 |
-
"acc_norm_stderr": 0.034902055920485744
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.21929824561403508,
|
239 |
-
"acc_stderr": 0.03892431106518754,
|
240 |
-
"acc_norm": 0.21929824561403508,
|
241 |
-
"acc_norm_stderr": 0.03892431106518754
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.47339449541284406,
|
245 |
-
"acc_stderr": 0.02140695268815158,
|
246 |
-
"acc_norm": 0.47339449541284406,
|
247 |
-
"acc_norm_stderr": 0.02140695268815158
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.03893259610604675,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.03893259610604675
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3562091503267974,
|
257 |
-
"acc_stderr": 0.027420477662629242,
|
258 |
-
"acc_norm": 0.3562091503267974,
|
259 |
-
"acc_norm_stderr": 0.027420477662629242
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.4,
|
263 |
-
"acc_stderr": 0.04923659639173309,
|
264 |
-
"acc_norm": 0.4,
|
265 |
-
"acc_norm_stderr": 0.04923659639173309
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5785123966942148,
|
269 |
-
"acc_stderr": 0.045077322787750874,
|
270 |
-
"acc_norm": 0.5785123966942148,
|
271 |
-
"acc_norm_stderr": 0.045077322787750874
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3092105263157895,
|
275 |
-
"acc_stderr": 0.03761070869867479,
|
276 |
-
"acc_norm": 0.3092105263157895,
|
277 |
-
"acc_norm_stderr": 0.03761070869867479
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3415032679738562,
|
281 |
-
"acc_stderr": 0.019184639328092487,
|
282 |
-
"acc_norm": 0.3415032679738562,
|
283 |
-
"acc_norm_stderr": 0.019184639328092487
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.25886524822695034,
|
287 |
-
"acc_stderr": 0.026129572527180844,
|
288 |
-
"acc_norm": 0.25886524822695034,
|
289 |
-
"acc_norm_stderr": 0.026129572527180844
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.26785714285714285,
|
293 |
-
"acc_stderr": 0.042032772914677614,
|
294 |
-
"acc_norm": 0.26785714285714285,
|
295 |
-
"acc_norm_stderr": 0.042032772914677614
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.21296296296296297,
|
299 |
-
"acc_stderr": 0.027920963147993666,
|
300 |
-
"acc_norm": 0.21296296296296297,
|
301 |
-
"acc_norm_stderr": 0.027920963147993666
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.29,
|
311 |
-
"acc_stderr": 0.045604802157206845,
|
312 |
-
"acc_norm": 0.29,
|
313 |
-
"acc_norm_stderr": 0.045604802157206845
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.37,
|
317 |
-
"acc_stderr": 0.04852365870939099,
|
318 |
-
"acc_norm": 0.37,
|
319 |
-
"acc_norm_stderr": 0.04852365870939099
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.30514705882352944,
|
323 |
-
"acc_stderr": 0.0279715413701706,
|
324 |
-
"acc_norm": 0.30514705882352944,
|
325 |
-
"acc_norm_stderr": 0.0279715413701706
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.32653061224489793,
|
329 |
-
"acc_stderr": 0.03002105623844031,
|
330 |
-
"acc_norm": 0.32653061224489793,
|
331 |
-
"acc_norm_stderr": 0.03002105623844031
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5358649789029536,
|
335 |
-
"acc_stderr": 0.03246338898055659,
|
336 |
-
"acc_norm": 0.5358649789029536,
|
337 |
-
"acc_norm_stderr": 0.03246338898055659
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2803129074315515,
|
341 |
-
"acc_stderr": 0.01147155594495862,
|
342 |
-
"acc_norm": 0.2803129074315515,
|
343 |
-
"acc_norm_stderr": 0.01147155594495862
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3480392156862745,
|
347 |
-
"acc_stderr": 0.03343311240488419,
|
348 |
-
"acc_norm": 0.3480392156862745,
|
349 |
-
"acc_norm_stderr": 0.03343311240488419
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.46060606060606063,
|
353 |
-
"acc_stderr": 0.03892207016552013,
|
354 |
-
"acc_norm": 0.46060606060606063,
|
355 |
-
"acc_norm_stderr": 0.03892207016552013
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2594859241126071,
|
359 |
-
"mc1_stderr": 0.015345409485557964,
|
360 |
-
"mc2": 0.42044312118862787,
|
361 |
-
"mc2_stderr": 0.014880041238971613
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5277449822904369,
|
365 |
-
"acc_stderr": 0.017163867979456016,
|
366 |
-
"acc_norm": 0.6127508854781583,
|
367 |
-
"acc_norm_stderr": 0.01674757799164278
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1",
|
436 |
-
"model_sha": "8175d2e66bd6d45185e2f6d80cef1fd1f7b3b86b",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SeaLLMs/SeaLLM-7B-v2.5/result_2024-05-13 17:46:50.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.19539249146757678,
|
5 |
-
"acc_stderr": 0.011586907189952911,
|
6 |
-
"acc_norm": 0.24744027303754265,
|
7 |
-
"acc_norm_stderr": 0.012610352663292673
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.25403306114319857,
|
11 |
-
"acc_stderr": 0.00434426617963492,
|
12 |
-
"acc_norm": 0.2504481179047998,
|
13 |
-
"acc_norm_stderr": 0.004323856300539175
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.3216374269005848,
|
17 |
-
"acc_stderr": 0.03582529442573122,
|
18 |
-
"acc_norm": 0.3216374269005848,
|
19 |
-
"acc_norm_stderr": 0.03582529442573122
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.1941747572815534,
|
23 |
-
"acc_stderr": 0.03916667762822582,
|
24 |
-
"acc_norm": 0.1941747572815534,
|
25 |
-
"acc_norm_stderr": 0.03916667762822582
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.25287356321839083,
|
29 |
-
"acc_stderr": 0.015543377313719681,
|
30 |
-
"acc_norm": 0.25287356321839083,
|
31 |
-
"acc_norm_stderr": 0.015543377313719681
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2074074074074074,
|
35 |
-
"acc_stderr": 0.03502553170678319,
|
36 |
-
"acc_norm": 0.2074074074074074,
|
37 |
-
"acc_norm_stderr": 0.03502553170678319
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.24,
|
41 |
-
"acc_stderr": 0.042923469599092816,
|
42 |
-
"acc_norm": 0.24,
|
43 |
-
"acc_norm_stderr": 0.042923469599092816
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.19574468085106383,
|
47 |
-
"acc_stderr": 0.025937853139977148,
|
48 |
-
"acc_norm": 0.19574468085106383,
|
49 |
-
"acc_norm_stderr": 0.025937853139977148
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.25301204819277107,
|
53 |
-
"acc_stderr": 0.03384429155233135,
|
54 |
-
"acc_norm": 0.25301204819277107,
|
55 |
-
"acc_norm_stderr": 0.03384429155233135
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.26366559485530544,
|
59 |
-
"acc_stderr": 0.02502553850053234,
|
60 |
-
"acc_norm": 0.26366559485530544,
|
61 |
-
"acc_norm_stderr": 0.02502553850053234
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2062780269058296,
|
65 |
-
"acc_stderr": 0.02715715047956382,
|
66 |
-
"acc_norm": 0.2062780269058296,
|
67 |
-
"acc_norm_stderr": 0.02715715047956382
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2366412213740458,
|
71 |
-
"acc_stderr": 0.03727673575596918,
|
72 |
-
"acc_norm": 0.2366412213740458,
|
73 |
-
"acc_norm_stderr": 0.03727673575596918
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.23,
|
77 |
-
"acc_stderr": 0.04229525846816506,
|
78 |
-
"acc_norm": 0.23,
|
79 |
-
"acc_norm_stderr": 0.04229525846816506
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.1919191919191919,
|
83 |
-
"acc_stderr": 0.02805779167298901,
|
84 |
-
"acc_norm": 0.1919191919191919,
|
85 |
-
"acc_norm_stderr": 0.02805779167298901
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.23448275862068965,
|
89 |
-
"acc_stderr": 0.035306258743465914,
|
90 |
-
"acc_norm": 0.23448275862068965,
|
91 |
-
"acc_norm_stderr": 0.035306258743465914
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.20588235294117646,
|
95 |
-
"acc_stderr": 0.04023382273617746,
|
96 |
-
"acc_norm": 0.20588235294117646,
|
97 |
-
"acc_norm_stderr": 0.04023382273617746
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.21008403361344538,
|
101 |
-
"acc_stderr": 0.026461398717471874,
|
102 |
-
"acc_norm": 0.21008403361344538,
|
103 |
-
"acc_norm_stderr": 0.026461398717471874
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.21025641025641026,
|
107 |
-
"acc_stderr": 0.02066059748502692,
|
108 |
-
"acc_norm": 0.21025641025641026,
|
109 |
-
"acc_norm_stderr": 0.02066059748502692
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.29,
|
113 |
-
"acc_stderr": 0.045604802157206845,
|
114 |
-
"acc_norm": 0.29,
|
115 |
-
"acc_norm_stderr": 0.045604802157206845
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.33,
|
119 |
-
"acc_stderr": 0.04725815626252604,
|
120 |
-
"acc_norm": 0.33,
|
121 |
-
"acc_norm_stderr": 0.04725815626252604
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2222222222222222,
|
125 |
-
"acc_stderr": 0.0401910747255735,
|
126 |
-
"acc_norm": 0.2222222222222222,
|
127 |
-
"acc_norm_stderr": 0.0401910747255735
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.23645320197044334,
|
131 |
-
"acc_stderr": 0.029896114291733552,
|
132 |
-
"acc_norm": 0.23645320197044334,
|
133 |
-
"acc_norm_stderr": 0.029896114291733552
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.22258064516129034,
|
137 |
-
"acc_stderr": 0.023664216671642518,
|
138 |
-
"acc_norm": 0.22258064516129034,
|
139 |
-
"acc_norm_stderr": 0.023664216671642518
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.2863247863247863,
|
143 |
-
"acc_stderr": 0.02961432369045665,
|
144 |
-
"acc_norm": 0.2863247863247863,
|
145 |
-
"acc_norm_stderr": 0.02961432369045665
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.23018867924528302,
|
149 |
-
"acc_stderr": 0.025907897122408173,
|
150 |
-
"acc_norm": 0.23018867924528302,
|
151 |
-
"acc_norm_stderr": 0.025907897122408173
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.21818181818181817,
|
155 |
-
"acc_stderr": 0.03955932861795833,
|
156 |
-
"acc_norm": 0.21818181818181817,
|
157 |
-
"acc_norm_stderr": 0.03955932861795833
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.026719240783712177,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.026719240783712177
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.271523178807947,
|
167 |
-
"acc_stderr": 0.036313298039696545,
|
168 |
-
"acc_norm": 0.271523178807947,
|
169 |
-
"acc_norm_stderr": 0.036313298039696545
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.2537313432835821,
|
173 |
-
"acc_stderr": 0.03076944496729602,
|
174 |
-
"acc_norm": 0.2537313432835821,
|
175 |
-
"acc_norm_stderr": 0.03076944496729602
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.24277456647398843,
|
179 |
-
"acc_stderr": 0.0326926380614177,
|
180 |
-
"acc_norm": 0.24277456647398843,
|
181 |
-
"acc_norm_stderr": 0.0326926380614177
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.26455026455026454,
|
185 |
-
"acc_stderr": 0.022717467897708607,
|
186 |
-
"acc_norm": 0.26455026455026454,
|
187 |
-
"acc_norm_stderr": 0.022717467897708607
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2708333333333333,
|
191 |
-
"acc_stderr": 0.03716177437566017,
|
192 |
-
"acc_norm": 0.2708333333333333,
|
193 |
-
"acc_norm_stderr": 0.03716177437566017
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.25,
|
197 |
-
"acc_stderr": 0.04351941398892446,
|
198 |
-
"acc_norm": 0.25,
|
199 |
-
"acc_norm_stderr": 0.04351941398892446
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.24,
|
203 |
-
"acc_stderr": 0.042923469599092816,
|
204 |
-
"acc_norm": 0.24,
|
205 |
-
"acc_norm_stderr": 0.042923469599092816
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.21965317919075145,
|
209 |
-
"acc_stderr": 0.022289638852617904,
|
210 |
-
"acc_norm": 0.21965317919075145,
|
211 |
-
"acc_norm_stderr": 0.022289638852617904
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3067484662576687,
|
215 |
-
"acc_stderr": 0.036230899157241474,
|
216 |
-
"acc_norm": 0.3067484662576687,
|
217 |
-
"acc_norm_stderr": 0.036230899157241474
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2839506172839506,
|
221 |
-
"acc_stderr": 0.02508947852376513,
|
222 |
-
"acc_norm": 0.2839506172839506,
|
223 |
-
"acc_norm_stderr": 0.02508947852376513
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.2,
|
227 |
-
"acc_stderr": 0.04020151261036845,
|
228 |
-
"acc_norm": 0.2,
|
229 |
-
"acc_norm_stderr": 0.04020151261036845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.22797927461139897,
|
233 |
-
"acc_stderr": 0.030276909945178256,
|
234 |
-
"acc_norm": 0.22797927461139897,
|
235 |
-
"acc_norm_stderr": 0.030276909945178256
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.22807017543859648,
|
239 |
-
"acc_stderr": 0.03947152782669415,
|
240 |
-
"acc_norm": 0.22807017543859648,
|
241 |
-
"acc_norm_stderr": 0.03947152782669415
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.21284403669724772,
|
245 |
-
"acc_stderr": 0.017549376389313694,
|
246 |
-
"acc_norm": 0.21284403669724772,
|
247 |
-
"acc_norm_stderr": 0.017549376389313694
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.15079365079365079,
|
251 |
-
"acc_stderr": 0.03200686497287392,
|
252 |
-
"acc_norm": 0.15079365079365079,
|
253 |
-
"acc_norm_stderr": 0.03200686497287392
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.21241830065359477,
|
257 |
-
"acc_stderr": 0.02342037547829613,
|
258 |
-
"acc_norm": 0.21241830065359477,
|
259 |
-
"acc_norm_stderr": 0.02342037547829613
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.26,
|
263 |
-
"acc_stderr": 0.044084400227680794,
|
264 |
-
"acc_norm": 0.26,
|
265 |
-
"acc_norm_stderr": 0.044084400227680794
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.371900826446281,
|
269 |
-
"acc_stderr": 0.04412015806624503,
|
270 |
-
"acc_norm": 0.371900826446281,
|
271 |
-
"acc_norm_stderr": 0.04412015806624503
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3092105263157895,
|
275 |
-
"acc_stderr": 0.03761070869867479,
|
276 |
-
"acc_norm": 0.3092105263157895,
|
277 |
-
"acc_norm_stderr": 0.03761070869867479
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2679738562091503,
|
281 |
-
"acc_stderr": 0.017917974069594722,
|
282 |
-
"acc_norm": 0.2679738562091503,
|
283 |
-
"acc_norm_stderr": 0.017917974069594722
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2695035460992908,
|
287 |
-
"acc_stderr": 0.026469036818590638,
|
288 |
-
"acc_norm": 0.2695035460992908,
|
289 |
-
"acc_norm_stderr": 0.026469036818590638
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.23214285714285715,
|
293 |
-
"acc_stderr": 0.04007341809755807,
|
294 |
-
"acc_norm": 0.23214285714285715,
|
295 |
-
"acc_norm_stderr": 0.04007341809755807
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2037037037037037,
|
299 |
-
"acc_stderr": 0.027467401804057993,
|
300 |
-
"acc_norm": 0.2037037037037037,
|
301 |
-
"acc_norm_stderr": 0.027467401804057993
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24692737430167597,
|
305 |
-
"acc_stderr": 0.014422292204808852,
|
306 |
-
"acc_norm": 0.24692737430167597,
|
307 |
-
"acc_norm_stderr": 0.014422292204808852
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.26,
|
311 |
-
"acc_stderr": 0.04408440022768079,
|
312 |
-
"acc_norm": 0.26,
|
313 |
-
"acc_norm_stderr": 0.04408440022768079
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.34,
|
317 |
-
"acc_stderr": 0.04760952285695235,
|
318 |
-
"acc_norm": 0.34,
|
319 |
-
"acc_norm_stderr": 0.04760952285695235
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.16176470588235295,
|
323 |
-
"acc_stderr": 0.02236867256288675,
|
324 |
-
"acc_norm": 0.16176470588235295,
|
325 |
-
"acc_norm_stderr": 0.02236867256288675
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.24897959183673468,
|
329 |
-
"acc_stderr": 0.027682979522960227,
|
330 |
-
"acc_norm": 0.24897959183673468,
|
331 |
-
"acc_norm_stderr": 0.027682979522960227
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.26582278481012656,
|
335 |
-
"acc_stderr": 0.028756799629658332,
|
336 |
-
"acc_norm": 0.26582278481012656,
|
337 |
-
"acc_norm_stderr": 0.028756799629658332
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27183833116036504,
|
341 |
-
"acc_stderr": 0.011363135278651411,
|
342 |
-
"acc_norm": 0.27183833116036504,
|
343 |
-
"acc_norm_stderr": 0.011363135278651411
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25980392156862747,
|
347 |
-
"acc_stderr": 0.030778554678693264,
|
348 |
-
"acc_norm": 0.25980392156862747,
|
349 |
-
"acc_norm_stderr": 0.030778554678693264
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.28484848484848485,
|
353 |
-
"acc_stderr": 0.035243908445117836,
|
354 |
-
"acc_norm": 0.28484848484848485,
|
355 |
-
"acc_norm_stderr": 0.035243908445117836
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2533659730722154,
|
359 |
-
"mc1_stderr": 0.015225899340826842,
|
360 |
-
"mc2": 0.48553928017206527,
|
361 |
-
"mc2_stderr": 0.017277450887367634
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.08146399055489964,
|
365 |
-
"acc_stderr": 0.009404717441946264,
|
366 |
-
"acc_norm": 0.1959858323494687,
|
367 |
-
"acc_norm_stderr": 0.013647685567768873
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SeaLLMs/SeaLLM-7B-v2.5",
|
436 |
-
"model_sha": "78fcb9db2398fefe0919e510861ea23c6b2d1cb6",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SeaLLMs/SeaLLM-7B-v2/result_2024-08-06 11:14:54.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3361774744027304,
|
5 |
-
"acc_stderr": 0.013804855026205763,
|
6 |
-
"acc_norm": 0.37372013651877134,
|
7 |
-
"acc_norm_stderr": 0.014137708601759086
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.36217884883489343,
|
11 |
-
"acc_stderr": 0.004796478664403843,
|
12 |
-
"acc_norm": 0.4576777534355706,
|
13 |
-
"acc_norm_stderr": 0.004971874159777694
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4444444444444444,
|
17 |
-
"acc_stderr": 0.03811079669833531,
|
18 |
-
"acc_norm": 0.4444444444444444,
|
19 |
-
"acc_norm_stderr": 0.03811079669833531
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6019417475728155,
|
23 |
-
"acc_stderr": 0.048467482539772386,
|
24 |
-
"acc_norm": 0.6019417475728155,
|
25 |
-
"acc_norm_stderr": 0.048467482539772386
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4661558109833972,
|
29 |
-
"acc_stderr": 0.017838956009136802,
|
30 |
-
"acc_norm": 0.4661558109833972,
|
31 |
-
"acc_norm_stderr": 0.017838956009136802
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3333333333333333,
|
35 |
-
"acc_stderr": 0.04072314811876837,
|
36 |
-
"acc_norm": 0.3333333333333333,
|
37 |
-
"acc_norm_stderr": 0.04072314811876837
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.31,
|
41 |
-
"acc_stderr": 0.04648231987117316,
|
42 |
-
"acc_norm": 0.31,
|
43 |
-
"acc_norm_stderr": 0.04648231987117316
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.41702127659574467,
|
47 |
-
"acc_stderr": 0.03223276266711712,
|
48 |
-
"acc_norm": 0.41702127659574467,
|
49 |
-
"acc_norm_stderr": 0.03223276266711712
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.37349397590361444,
|
53 |
-
"acc_stderr": 0.03765845117168863,
|
54 |
-
"acc_norm": 0.37349397590361444,
|
55 |
-
"acc_norm_stderr": 0.03765845117168863
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.42443729903536975,
|
59 |
-
"acc_stderr": 0.028071928247946205,
|
60 |
-
"acc_norm": 0.42443729903536975,
|
61 |
-
"acc_norm_stderr": 0.028071928247946205
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4798206278026906,
|
65 |
-
"acc_stderr": 0.03353046167412299,
|
66 |
-
"acc_norm": 0.4798206278026906,
|
67 |
-
"acc_norm_stderr": 0.03353046167412299
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4580152671755725,
|
71 |
-
"acc_stderr": 0.04369802690578756,
|
72 |
-
"acc_norm": 0.4580152671755725,
|
73 |
-
"acc_norm_stderr": 0.04369802690578756
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.38,
|
77 |
-
"acc_stderr": 0.048783173121456316,
|
78 |
-
"acc_norm": 0.38,
|
79 |
-
"acc_norm_stderr": 0.048783173121456316
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5353535353535354,
|
83 |
-
"acc_stderr": 0.03553436368828061,
|
84 |
-
"acc_norm": 0.5353535353535354,
|
85 |
-
"acc_norm_stderr": 0.03553436368828061
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5103448275862069,
|
89 |
-
"acc_stderr": 0.04165774775728763,
|
90 |
-
"acc_norm": 0.5103448275862069,
|
91 |
-
"acc_norm_stderr": 0.04165774775728763
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.22549019607843138,
|
95 |
-
"acc_stderr": 0.041583075330832865,
|
96 |
-
"acc_norm": 0.22549019607843138,
|
97 |
-
"acc_norm_stderr": 0.041583075330832865
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5168067226890757,
|
101 |
-
"acc_stderr": 0.03246013680375308,
|
102 |
-
"acc_norm": 0.5168067226890757,
|
103 |
-
"acc_norm_stderr": 0.03246013680375308
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4128205128205128,
|
107 |
-
"acc_stderr": 0.024962683564331813,
|
108 |
-
"acc_norm": 0.4128205128205128,
|
109 |
-
"acc_norm_stderr": 0.024962683564331813
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.55,
|
113 |
-
"acc_stderr": 0.04999999999999999,
|
114 |
-
"acc_norm": 0.55,
|
115 |
-
"acc_norm_stderr": 0.04999999999999999
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.33,
|
119 |
-
"acc_stderr": 0.04725815626252604,
|
120 |
-
"acc_norm": 0.33,
|
121 |
-
"acc_norm_stderr": 0.04725815626252604
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5277777777777778,
|
125 |
-
"acc_stderr": 0.048262172941398944,
|
126 |
-
"acc_norm": 0.5277777777777778,
|
127 |
-
"acc_norm_stderr": 0.048262172941398944
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3842364532019704,
|
131 |
-
"acc_stderr": 0.034223985656575515,
|
132 |
-
"acc_norm": 0.3842364532019704,
|
133 |
-
"acc_norm_stderr": 0.034223985656575515
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.43548387096774194,
|
137 |
-
"acc_stderr": 0.028206225591502744,
|
138 |
-
"acc_norm": 0.43548387096774194,
|
139 |
-
"acc_norm_stderr": 0.028206225591502744
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7222222222222222,
|
143 |
-
"acc_stderr": 0.029343114798094448,
|
144 |
-
"acc_norm": 0.7222222222222222,
|
145 |
-
"acc_norm_stderr": 0.029343114798094448
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.42641509433962266,
|
149 |
-
"acc_stderr": 0.030437794342983045,
|
150 |
-
"acc_norm": 0.42641509433962266,
|
151 |
-
"acc_norm_stderr": 0.030437794342983045
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.509090909090909,
|
155 |
-
"acc_stderr": 0.04788339768702861,
|
156 |
-
"acc_norm": 0.509090909090909,
|
157 |
-
"acc_norm_stderr": 0.04788339768702861
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3592592592592593,
|
161 |
-
"acc_stderr": 0.029252905927251972,
|
162 |
-
"acc_norm": 0.3592592592592593,
|
163 |
-
"acc_norm_stderr": 0.029252905927251972
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389024,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5870646766169154,
|
173 |
-
"acc_stderr": 0.03481520803367348,
|
174 |
-
"acc_norm": 0.5870646766169154,
|
175 |
-
"acc_norm_stderr": 0.03481520803367348
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.32947976878612717,
|
179 |
-
"acc_stderr": 0.03583901754736411,
|
180 |
-
"acc_norm": 0.32947976878612717,
|
181 |
-
"acc_norm_stderr": 0.03583901754736411
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3544973544973545,
|
185 |
-
"acc_stderr": 0.024636830602842,
|
186 |
-
"acc_norm": 0.3544973544973545,
|
187 |
-
"acc_norm_stderr": 0.024636830602842
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3541666666666667,
|
191 |
-
"acc_stderr": 0.039994111357535424,
|
192 |
-
"acc_norm": 0.3541666666666667,
|
193 |
-
"acc_norm_stderr": 0.039994111357535424
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001975,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001975
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.58,
|
203 |
-
"acc_stderr": 0.04960449637488584,
|
204 |
-
"acc_norm": 0.58,
|
205 |
-
"acc_norm_stderr": 0.04960449637488584
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5,
|
209 |
-
"acc_stderr": 0.026919095102908273,
|
210 |
-
"acc_norm": 0.5,
|
211 |
-
"acc_norm_stderr": 0.026919095102908273
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.50920245398773,
|
215 |
-
"acc_stderr": 0.03927705600787443,
|
216 |
-
"acc_norm": 0.50920245398773,
|
217 |
-
"acc_norm_stderr": 0.03927705600787443
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4567901234567901,
|
221 |
-
"acc_stderr": 0.02771666165019404,
|
222 |
-
"acc_norm": 0.4567901234567901,
|
223 |
-
"acc_norm_stderr": 0.02771666165019404
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.32,
|
227 |
-
"acc_stderr": 0.04688261722621505,
|
228 |
-
"acc_norm": 0.32,
|
229 |
-
"acc_norm_stderr": 0.04688261722621505
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5181347150259067,
|
233 |
-
"acc_stderr": 0.036060650018329185,
|
234 |
-
"acc_norm": 0.5181347150259067,
|
235 |
-
"acc_norm_stderr": 0.036060650018329185
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022056,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022056
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.48256880733944957,
|
245 |
-
"acc_stderr": 0.021424291871853147,
|
246 |
-
"acc_norm": 0.48256880733944957,
|
247 |
-
"acc_norm_stderr": 0.021424291871853147
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.36507936507936506,
|
251 |
-
"acc_stderr": 0.043062412591271526,
|
252 |
-
"acc_norm": 0.36507936507936506,
|
253 |
-
"acc_norm_stderr": 0.043062412591271526
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.48366013071895425,
|
257 |
-
"acc_stderr": 0.028614624752805407,
|
258 |
-
"acc_norm": 0.48366013071895425,
|
259 |
-
"acc_norm_stderr": 0.028614624752805407
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.47,
|
263 |
-
"acc_stderr": 0.050161355804659205,
|
264 |
-
"acc_norm": 0.47,
|
265 |
-
"acc_norm_stderr": 0.050161355804659205
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6611570247933884,
|
269 |
-
"acc_stderr": 0.0432076780753667,
|
270 |
-
"acc_norm": 0.6611570247933884,
|
271 |
-
"acc_norm_stderr": 0.0432076780753667
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4276315789473684,
|
275 |
-
"acc_stderr": 0.040260970832965585,
|
276 |
-
"acc_norm": 0.4276315789473684,
|
277 |
-
"acc_norm_stderr": 0.040260970832965585
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4084967320261438,
|
281 |
-
"acc_stderr": 0.01988622103750187,
|
282 |
-
"acc_norm": 0.4084967320261438,
|
283 |
-
"acc_norm_stderr": 0.01988622103750187
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3475177304964539,
|
287 |
-
"acc_stderr": 0.02840662780959095,
|
288 |
-
"acc_norm": 0.3475177304964539,
|
289 |
-
"acc_norm_stderr": 0.02840662780959095
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4107142857142857,
|
293 |
-
"acc_stderr": 0.04669510663875191,
|
294 |
-
"acc_norm": 0.4107142857142857,
|
295 |
-
"acc_norm_stderr": 0.04669510663875191
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.39351851851851855,
|
299 |
-
"acc_stderr": 0.03331747876370312,
|
300 |
-
"acc_norm": 0.39351851851851855,
|
301 |
-
"acc_norm_stderr": 0.03331747876370312
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24692737430167597,
|
305 |
-
"acc_stderr": 0.014422292204808845,
|
306 |
-
"acc_norm": 0.24692737430167597,
|
307 |
-
"acc_norm_stderr": 0.014422292204808845
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.48,
|
311 |
-
"acc_stderr": 0.050211673156867795,
|
312 |
-
"acc_norm": 0.48,
|
313 |
-
"acc_norm_stderr": 0.050211673156867795
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.58,
|
317 |
-
"acc_stderr": 0.049604496374885836,
|
318 |
-
"acc_norm": 0.58,
|
319 |
-
"acc_norm_stderr": 0.049604496374885836
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3713235294117647,
|
323 |
-
"acc_stderr": 0.02934980313976587,
|
324 |
-
"acc_norm": 0.3713235294117647,
|
325 |
-
"acc_norm_stderr": 0.02934980313976587
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5428571428571428,
|
329 |
-
"acc_stderr": 0.03189141832421396,
|
330 |
-
"acc_norm": 0.5428571428571428,
|
331 |
-
"acc_norm_stderr": 0.03189141832421396
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6075949367088608,
|
335 |
-
"acc_stderr": 0.0317847187456473,
|
336 |
-
"acc_norm": 0.6075949367088608,
|
337 |
-
"acc_norm_stderr": 0.0317847187456473
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3070404172099087,
|
341 |
-
"acc_stderr": 0.011780959114513783,
|
342 |
-
"acc_norm": 0.3070404172099087,
|
343 |
-
"acc_norm_stderr": 0.011780959114513783
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.47549019607843135,
|
347 |
-
"acc_stderr": 0.03505093194348798,
|
348 |
-
"acc_norm": 0.47549019607843135,
|
349 |
-
"acc_norm_stderr": 0.03505093194348798
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.34545454545454546,
|
353 |
-
"acc_stderr": 0.03713158067481913,
|
354 |
-
"acc_norm": 0.34545454545454546,
|
355 |
-
"acc_norm_stderr": 0.03713158067481913
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3157894736842105,
|
359 |
-
"mc1_stderr": 0.016272287957916923,
|
360 |
-
"mc2": 0.49266426314708567,
|
361 |
-
"mc2_stderr": 0.015737068435096736
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.39669421487603307,
|
365 |
-
"acc_stderr": 0.016819438642971408,
|
366 |
-
"acc_norm": 0.46871310507674147,
|
367 |
-
"acc_norm_stderr": 0.01715666685978546
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SeaLLMs/SeaLLM-7B-v2",
|
436 |
-
"model_sha": "04d5d6102eb0865f4a0ca55fe8d12478605748f8",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SeaLLMs/SeaLLMs-v3-7B-Chat/result_2024-07-28 03:06:24.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3890784982935154,
|
5 |
-
"acc_stderr": 0.014247309976045607,
|
6 |
-
"acc_norm": 0.4402730375426621,
|
7 |
-
"acc_norm_stderr": 0.014506769524804243
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2504481179047998,
|
11 |
-
"acc_stderr": 0.004323856300539177,
|
12 |
-
"acc_norm": 0.2504481179047998,
|
13 |
-
"acc_norm_stderr": 0.004323856300539177
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6842105263157895,
|
17 |
-
"acc_stderr": 0.03565079670708311,
|
18 |
-
"acc_norm": 0.6842105263157895,
|
19 |
-
"acc_norm_stderr": 0.03565079670708311
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.7864077669902912,
|
23 |
-
"acc_stderr": 0.04058042015646036,
|
24 |
-
"acc_norm": 0.7864077669902912,
|
25 |
-
"acc_norm_stderr": 0.04058042015646036
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6615581098339719,
|
29 |
-
"acc_stderr": 0.016920869586210675,
|
30 |
-
"acc_norm": 0.6615581098339719,
|
31 |
-
"acc_norm_stderr": 0.016920869586210675
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3851851851851852,
|
35 |
-
"acc_stderr": 0.042039210401562783,
|
36 |
-
"acc_norm": 0.3851851851851852,
|
37 |
-
"acc_norm_stderr": 0.042039210401562783
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.32,
|
41 |
-
"acc_stderr": 0.046882617226215034,
|
42 |
-
"acc_norm": 0.32,
|
43 |
-
"acc_norm_stderr": 0.046882617226215034
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.6085106382978723,
|
47 |
-
"acc_stderr": 0.031907012423268113,
|
48 |
-
"acc_norm": 0.6085106382978723,
|
49 |
-
"acc_norm_stderr": 0.031907012423268113
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4939759036144578,
|
53 |
-
"acc_stderr": 0.03892212195333047,
|
54 |
-
"acc_norm": 0.4939759036144578,
|
55 |
-
"acc_norm_stderr": 0.03892212195333047
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6559485530546624,
|
59 |
-
"acc_stderr": 0.026981478043648047,
|
60 |
-
"acc_norm": 0.6559485530546624,
|
61 |
-
"acc_norm_stderr": 0.026981478043648047
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6098654708520179,
|
65 |
-
"acc_stderr": 0.03273766725459156,
|
66 |
-
"acc_norm": 0.6098654708520179,
|
67 |
-
"acc_norm_stderr": 0.03273766725459156
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5954198473282443,
|
71 |
-
"acc_stderr": 0.043046937953806645,
|
72 |
-
"acc_norm": 0.5954198473282443,
|
73 |
-
"acc_norm_stderr": 0.043046937953806645
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.55,
|
77 |
-
"acc_stderr": 0.05,
|
78 |
-
"acc_norm": 0.55,
|
79 |
-
"acc_norm_stderr": 0.05
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7373737373737373,
|
83 |
-
"acc_stderr": 0.031353050095330855,
|
84 |
-
"acc_norm": 0.7373737373737373,
|
85 |
-
"acc_norm_stderr": 0.031353050095330855
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.6344827586206897,
|
89 |
-
"acc_stderr": 0.04013124195424386,
|
90 |
-
"acc_norm": 0.6344827586206897,
|
91 |
-
"acc_norm_stderr": 0.04013124195424386
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3235294117647059,
|
95 |
-
"acc_stderr": 0.046550104113196177,
|
96 |
-
"acc_norm": 0.3235294117647059,
|
97 |
-
"acc_norm_stderr": 0.046550104113196177
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.7016806722689075,
|
101 |
-
"acc_stderr": 0.029719142876342853,
|
102 |
-
"acc_norm": 0.7016806722689075,
|
103 |
-
"acc_norm_stderr": 0.029719142876342853
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.6461538461538462,
|
107 |
-
"acc_stderr": 0.02424378399406214,
|
108 |
-
"acc_norm": 0.6461538461538462,
|
109 |
-
"acc_norm_stderr": 0.02424378399406214
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.74,
|
113 |
-
"acc_stderr": 0.0440844002276808,
|
114 |
-
"acc_norm": 0.74,
|
115 |
-
"acc_norm_stderr": 0.0440844002276808
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.7222222222222222,
|
125 |
-
"acc_stderr": 0.043300437496507437,
|
126 |
-
"acc_norm": 0.7222222222222222,
|
127 |
-
"acc_norm_stderr": 0.043300437496507437
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.5566502463054187,
|
131 |
-
"acc_stderr": 0.03495334582162933,
|
132 |
-
"acc_norm": 0.5566502463054187,
|
133 |
-
"acc_norm_stderr": 0.03495334582162933
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6774193548387096,
|
137 |
-
"acc_stderr": 0.026593084516572277,
|
138 |
-
"acc_norm": 0.6774193548387096,
|
139 |
-
"acc_norm_stderr": 0.026593084516572277
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8247863247863247,
|
143 |
-
"acc_stderr": 0.02490443909891822,
|
144 |
-
"acc_norm": 0.8247863247863247,
|
145 |
-
"acc_norm_stderr": 0.02490443909891822
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.6226415094339622,
|
149 |
-
"acc_stderr": 0.029832808114796005,
|
150 |
-
"acc_norm": 0.6226415094339622,
|
151 |
-
"acc_norm_stderr": 0.029832808114796005
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5636363636363636,
|
155 |
-
"acc_stderr": 0.04750185058907297,
|
156 |
-
"acc_norm": 0.5636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04750185058907297
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.5148148148148148,
|
161 |
-
"acc_stderr": 0.030472153249328598,
|
162 |
-
"acc_norm": 0.5148148148148148,
|
163 |
-
"acc_norm_stderr": 0.030472153249328598
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3576158940397351,
|
167 |
-
"acc_stderr": 0.03913453431177258,
|
168 |
-
"acc_norm": 0.3576158940397351,
|
169 |
-
"acc_norm_stderr": 0.03913453431177258
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7512437810945274,
|
173 |
-
"acc_stderr": 0.030567675938916707,
|
174 |
-
"acc_norm": 0.7512437810945274,
|
175 |
-
"acc_norm_stderr": 0.030567675938916707
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5780346820809249,
|
179 |
-
"acc_stderr": 0.03765746693865151,
|
180 |
-
"acc_norm": 0.5780346820809249,
|
181 |
-
"acc_norm_stderr": 0.03765746693865151
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.58994708994709,
|
185 |
-
"acc_stderr": 0.025331202438944423,
|
186 |
-
"acc_norm": 0.58994708994709,
|
187 |
-
"acc_norm_stderr": 0.025331202438944423
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5763888888888888,
|
191 |
-
"acc_stderr": 0.041321250197233685,
|
192 |
-
"acc_norm": 0.5763888888888888,
|
193 |
-
"acc_norm_stderr": 0.041321250197233685
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.44,
|
197 |
-
"acc_stderr": 0.04988876515698589,
|
198 |
-
"acc_norm": 0.44,
|
199 |
-
"acc_norm_stderr": 0.04988876515698589
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.75,
|
203 |
-
"acc_stderr": 0.04351941398892446,
|
204 |
-
"acc_norm": 0.75,
|
205 |
-
"acc_norm_stderr": 0.04351941398892446
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.6213872832369942,
|
209 |
-
"acc_stderr": 0.02611374936131034,
|
210 |
-
"acc_norm": 0.6213872832369942,
|
211 |
-
"acc_norm_stderr": 0.02611374936131034
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5950920245398773,
|
215 |
-
"acc_stderr": 0.03856672163548913,
|
216 |
-
"acc_norm": 0.5950920245398773,
|
217 |
-
"acc_norm_stderr": 0.03856672163548913
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6450617283950617,
|
221 |
-
"acc_stderr": 0.02662415247884585,
|
222 |
-
"acc_norm": 0.6450617283950617,
|
223 |
-
"acc_norm_stderr": 0.02662415247884585
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.41,
|
227 |
-
"acc_stderr": 0.049431107042371025,
|
228 |
-
"acc_norm": 0.41,
|
229 |
-
"acc_norm_stderr": 0.049431107042371025
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6476683937823834,
|
233 |
-
"acc_stderr": 0.03447478286414357,
|
234 |
-
"acc_norm": 0.6476683937823834,
|
235 |
-
"acc_norm_stderr": 0.03447478286414357
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.5526315789473685,
|
239 |
-
"acc_stderr": 0.04677473004491199,
|
240 |
-
"acc_norm": 0.5526315789473685,
|
241 |
-
"acc_norm_stderr": 0.04677473004491199
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.710091743119266,
|
245 |
-
"acc_stderr": 0.0194530666092016,
|
246 |
-
"acc_norm": 0.710091743119266,
|
247 |
-
"acc_norm_stderr": 0.0194530666092016
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.5238095238095238,
|
251 |
-
"acc_stderr": 0.04467062628403273,
|
252 |
-
"acc_norm": 0.5238095238095238,
|
253 |
-
"acc_norm_stderr": 0.04467062628403273
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.6209150326797386,
|
257 |
-
"acc_stderr": 0.027780141207023355,
|
258 |
-
"acc_norm": 0.6209150326797386,
|
259 |
-
"acc_norm_stderr": 0.027780141207023355
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.7,
|
263 |
-
"acc_stderr": 0.046056618647183814,
|
264 |
-
"acc_norm": 0.7,
|
265 |
-
"acc_norm_stderr": 0.046056618647183814
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7520661157024794,
|
269 |
-
"acc_stderr": 0.039418975265163025,
|
270 |
-
"acc_norm": 0.7520661157024794,
|
271 |
-
"acc_norm_stderr": 0.039418975265163025
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.6842105263157895,
|
275 |
-
"acc_stderr": 0.03782728980865469,
|
276 |
-
"acc_norm": 0.6842105263157895,
|
277 |
-
"acc_norm_stderr": 0.03782728980865469
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5310457516339869,
|
281 |
-
"acc_stderr": 0.020188804456361887,
|
282 |
-
"acc_norm": 0.5310457516339869,
|
283 |
-
"acc_norm_stderr": 0.020188804456361887
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.4326241134751773,
|
287 |
-
"acc_stderr": 0.02955545423677885,
|
288 |
-
"acc_norm": 0.4326241134751773,
|
289 |
-
"acc_norm_stderr": 0.02955545423677885
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.5,
|
293 |
-
"acc_stderr": 0.04745789978762494,
|
294 |
-
"acc_norm": 0.5,
|
295 |
-
"acc_norm_stderr": 0.04745789978762494
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5555555555555556,
|
299 |
-
"acc_stderr": 0.03388857118502325,
|
300 |
-
"acc_norm": 0.5555555555555556,
|
301 |
-
"acc_norm_stderr": 0.03388857118502325
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.4491620111731844,
|
305 |
-
"acc_stderr": 0.01663583834163193,
|
306 |
-
"acc_norm": 0.4491620111731844,
|
307 |
-
"acc_norm_stderr": 0.01663583834163193
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.51,
|
311 |
-
"acc_stderr": 0.05024183937956912,
|
312 |
-
"acc_norm": 0.51,
|
313 |
-
"acc_norm_stderr": 0.05024183937956912
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.75,
|
317 |
-
"acc_stderr": 0.04351941398892446,
|
318 |
-
"acc_norm": 0.75,
|
319 |
-
"acc_norm_stderr": 0.04351941398892446
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5661764705882353,
|
323 |
-
"acc_stderr": 0.030105636570016636,
|
324 |
-
"acc_norm": 0.5661764705882353,
|
325 |
-
"acc_norm_stderr": 0.030105636570016636
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6653061224489796,
|
329 |
-
"acc_stderr": 0.030209235226242307,
|
330 |
-
"acc_norm": 0.6653061224489796,
|
331 |
-
"acc_norm_stderr": 0.030209235226242307
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7257383966244726,
|
335 |
-
"acc_stderr": 0.029041333510598025,
|
336 |
-
"acc_norm": 0.7257383966244726,
|
337 |
-
"acc_norm_stderr": 0.029041333510598025
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.4178617992177314,
|
341 |
-
"acc_stderr": 0.012596744108998564,
|
342 |
-
"acc_norm": 0.4178617992177314,
|
343 |
-
"acc_norm_stderr": 0.012596744108998564
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.7352941176470589,
|
347 |
-
"acc_stderr": 0.030964517926923382,
|
348 |
-
"acc_norm": 0.7352941176470589,
|
349 |
-
"acc_norm_stderr": 0.030964517926923382
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.7090909090909091,
|
353 |
-
"acc_stderr": 0.03546563019624335,
|
354 |
-
"acc_norm": 0.7090909090909091,
|
355 |
-
"acc_norm_stderr": 0.03546563019624335
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.31946144430844553,
|
359 |
-
"mc1_stderr": 0.016322644182960498,
|
360 |
-
"mc2": 0.48779238416457826,
|
361 |
-
"mc2_stderr": 0.015735783220329043
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5914994096812278,
|
365 |
-
"acc_stderr": 0.016900062879427115,
|
366 |
-
"acc_norm": 0.6375442739079102,
|
367 |
-
"acc_norm_stderr": 0.016527131240453703
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SeaLLMs/SeaLLMs-v3-7B-Chat",
|
436 |
-
"model_sha": "67ef6dfd0a5df7af4be7a325786105a2ba4cbaf7",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SkyOrbis/SKY-Ko-Llama3-8B-lora/result_2024-06-23 07:48:25.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3720136518771331,
|
5 |
-
"acc_stderr": 0.014124597881844461,
|
6 |
-
"acc_norm": 0.40187713310580203,
|
7 |
-
"acc_norm_stderr": 0.01432726861457828
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3761202947619996,
|
11 |
-
"acc_stderr": 0.004834207964061322,
|
12 |
-
"acc_norm": 0.49063931487751444,
|
13 |
-
"acc_norm_stderr": 0.004988906901307734
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6608187134502924,
|
17 |
-
"acc_stderr": 0.03631053496488905,
|
18 |
-
"acc_norm": 0.6608187134502924,
|
19 |
-
"acc_norm_stderr": 0.03631053496488905
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280041,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280041
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5517241379310345,
|
29 |
-
"acc_stderr": 0.017784034534992433,
|
30 |
-
"acc_norm": 0.5517241379310345,
|
31 |
-
"acc_norm_stderr": 0.017784034534992433
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4444444444444444,
|
35 |
-
"acc_stderr": 0.04292596718256981,
|
36 |
-
"acc_norm": 0.4444444444444444,
|
37 |
-
"acc_norm_stderr": 0.04292596718256981
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.25,
|
41 |
-
"acc_stderr": 0.04351941398892446,
|
42 |
-
"acc_norm": 0.25,
|
43 |
-
"acc_norm_stderr": 0.04351941398892446
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46382978723404256,
|
47 |
-
"acc_stderr": 0.032600385118357715,
|
48 |
-
"acc_norm": 0.46382978723404256,
|
49 |
-
"acc_norm_stderr": 0.032600385118357715
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.43373493975903615,
|
53 |
-
"acc_stderr": 0.03858158940685515,
|
54 |
-
"acc_norm": 0.43373493975903615,
|
55 |
-
"acc_norm_stderr": 0.03858158940685515
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5273311897106109,
|
59 |
-
"acc_stderr": 0.028355633568328174,
|
60 |
-
"acc_norm": 0.5273311897106109,
|
61 |
-
"acc_norm_stderr": 0.028355633568328174
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.47085201793721976,
|
65 |
-
"acc_stderr": 0.03350073248773403,
|
66 |
-
"acc_norm": 0.47085201793721976,
|
67 |
-
"acc_norm_stderr": 0.03350073248773403
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5725190839694656,
|
71 |
-
"acc_stderr": 0.04338920305792401,
|
72 |
-
"acc_norm": 0.5725190839694656,
|
73 |
-
"acc_norm_stderr": 0.04338920305792401
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.51,
|
77 |
-
"acc_stderr": 0.05024183937956911,
|
78 |
-
"acc_norm": 0.51,
|
79 |
-
"acc_norm_stderr": 0.05024183937956911
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.601010101010101,
|
83 |
-
"acc_stderr": 0.03488901616852731,
|
84 |
-
"acc_norm": 0.601010101010101,
|
85 |
-
"acc_norm_stderr": 0.03488901616852731
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5724137931034483,
|
89 |
-
"acc_stderr": 0.041227371113703316,
|
90 |
-
"acc_norm": 0.5724137931034483,
|
91 |
-
"acc_norm_stderr": 0.041227371113703316
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2647058823529412,
|
95 |
-
"acc_stderr": 0.043898699568087764,
|
96 |
-
"acc_norm": 0.2647058823529412,
|
97 |
-
"acc_norm_stderr": 0.043898699568087764
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5378151260504201,
|
101 |
-
"acc_stderr": 0.0323854694875898,
|
102 |
-
"acc_norm": 0.5378151260504201,
|
103 |
-
"acc_norm_stderr": 0.0323854694875898
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5282051282051282,
|
107 |
-
"acc_stderr": 0.025310639254933865,
|
108 |
-
"acc_norm": 0.5282051282051282,
|
109 |
-
"acc_norm_stderr": 0.025310639254933865
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.59,
|
113 |
-
"acc_stderr": 0.04943110704237102,
|
114 |
-
"acc_norm": 0.59,
|
115 |
-
"acc_norm_stderr": 0.04943110704237102
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.37,
|
119 |
-
"acc_stderr": 0.048523658709391,
|
120 |
-
"acc_norm": 0.37,
|
121 |
-
"acc_norm_stderr": 0.048523658709391
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5833333333333334,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.5833333333333334,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.458128078817734,
|
131 |
-
"acc_stderr": 0.03505630140785741,
|
132 |
-
"acc_norm": 0.458128078817734,
|
133 |
-
"acc_norm_stderr": 0.03505630140785741
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5225806451612903,
|
137 |
-
"acc_stderr": 0.02841498501970786,
|
138 |
-
"acc_norm": 0.5225806451612903,
|
139 |
-
"acc_norm_stderr": 0.02841498501970786
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7564102564102564,
|
143 |
-
"acc_stderr": 0.028120966503914387,
|
144 |
-
"acc_norm": 0.7564102564102564,
|
145 |
-
"acc_norm_stderr": 0.028120966503914387
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4981132075471698,
|
149 |
-
"acc_stderr": 0.030772653642075664,
|
150 |
-
"acc_norm": 0.4981132075471698,
|
151 |
-
"acc_norm_stderr": 0.030772653642075664
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.509090909090909,
|
155 |
-
"acc_stderr": 0.0478833976870286,
|
156 |
-
"acc_norm": 0.509090909090909,
|
157 |
-
"acc_norm_stderr": 0.0478833976870286
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.36666666666666664,
|
161 |
-
"acc_stderr": 0.029381620726465073,
|
162 |
-
"acc_norm": 0.36666666666666664,
|
163 |
-
"acc_norm_stderr": 0.029381620726465073
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3841059602649007,
|
167 |
-
"acc_stderr": 0.03971301814719197,
|
168 |
-
"acc_norm": 0.3841059602649007,
|
169 |
-
"acc_norm_stderr": 0.03971301814719197
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6467661691542289,
|
173 |
-
"acc_stderr": 0.03379790611796777,
|
174 |
-
"acc_norm": 0.6467661691542289,
|
175 |
-
"acc_norm_stderr": 0.03379790611796777
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.44508670520231214,
|
179 |
-
"acc_stderr": 0.03789401760283647,
|
180 |
-
"acc_norm": 0.44508670520231214,
|
181 |
-
"acc_norm_stderr": 0.03789401760283647
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.37037037037037035,
|
185 |
-
"acc_stderr": 0.02487081525105709,
|
186 |
-
"acc_norm": 0.37037037037037035,
|
187 |
-
"acc_norm_stderr": 0.02487081525105709
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4444444444444444,
|
191 |
-
"acc_stderr": 0.041553199555931467,
|
192 |
-
"acc_norm": 0.4444444444444444,
|
193 |
-
"acc_norm_stderr": 0.041553199555931467
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.42,
|
197 |
-
"acc_stderr": 0.049604496374885836,
|
198 |
-
"acc_norm": 0.42,
|
199 |
-
"acc_norm_stderr": 0.049604496374885836
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.66,
|
203 |
-
"acc_stderr": 0.04760952285695237,
|
204 |
-
"acc_norm": 0.66,
|
205 |
-
"acc_norm_stderr": 0.04760952285695237
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5606936416184971,
|
209 |
-
"acc_stderr": 0.026720034380514998,
|
210 |
-
"acc_norm": 0.5606936416184971,
|
211 |
-
"acc_norm_stderr": 0.026720034380514998
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.49079754601226994,
|
215 |
-
"acc_stderr": 0.039277056007874414,
|
216 |
-
"acc_norm": 0.49079754601226994,
|
217 |
-
"acc_norm_stderr": 0.039277056007874414
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5123456790123457,
|
221 |
-
"acc_stderr": 0.027812262269327242,
|
222 |
-
"acc_norm": 0.5123456790123457,
|
223 |
-
"acc_norm_stderr": 0.027812262269327242
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.37,
|
227 |
-
"acc_stderr": 0.048523658709390974,
|
228 |
-
"acc_norm": 0.37,
|
229 |
-
"acc_norm_stderr": 0.048523658709390974
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5854922279792746,
|
233 |
-
"acc_stderr": 0.035553003195576686,
|
234 |
-
"acc_norm": 0.5854922279792746,
|
235 |
-
"acc_norm_stderr": 0.035553003195576686
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3508771929824561,
|
239 |
-
"acc_stderr": 0.044895393502706986,
|
240 |
-
"acc_norm": 0.3508771929824561,
|
241 |
-
"acc_norm_stderr": 0.044895393502706986
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5486238532110091,
|
245 |
-
"acc_stderr": 0.02133571471126879,
|
246 |
-
"acc_norm": 0.5486238532110091,
|
247 |
-
"acc_norm_stderr": 0.02133571471126879
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3888888888888889,
|
251 |
-
"acc_stderr": 0.04360314860077459,
|
252 |
-
"acc_norm": 0.3888888888888889,
|
253 |
-
"acc_norm_stderr": 0.04360314860077459
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5228758169934641,
|
257 |
-
"acc_stderr": 0.028599936776089782,
|
258 |
-
"acc_norm": 0.5228758169934641,
|
259 |
-
"acc_norm_stderr": 0.028599936776089782
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.58,
|
263 |
-
"acc_stderr": 0.049604496374885836,
|
264 |
-
"acc_norm": 0.58,
|
265 |
-
"acc_norm_stderr": 0.049604496374885836
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6859504132231405,
|
269 |
-
"acc_stderr": 0.04236964753041018,
|
270 |
-
"acc_norm": 0.6859504132231405,
|
271 |
-
"acc_norm_stderr": 0.04236964753041018
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5197368421052632,
|
275 |
-
"acc_stderr": 0.04065771002562603,
|
276 |
-
"acc_norm": 0.5197368421052632,
|
277 |
-
"acc_norm_stderr": 0.04065771002562603
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4166666666666667,
|
281 |
-
"acc_stderr": 0.019944914136873586,
|
282 |
-
"acc_norm": 0.4166666666666667,
|
283 |
-
"acc_norm_stderr": 0.019944914136873586
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3617021276595745,
|
287 |
-
"acc_stderr": 0.028663820147199492,
|
288 |
-
"acc_norm": 0.3617021276595745,
|
289 |
-
"acc_norm_stderr": 0.028663820147199492
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.38392857142857145,
|
293 |
-
"acc_stderr": 0.046161430750285455,
|
294 |
-
"acc_norm": 0.38392857142857145,
|
295 |
-
"acc_norm_stderr": 0.046161430750285455
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4583333333333333,
|
299 |
-
"acc_stderr": 0.033981108902946366,
|
300 |
-
"acc_norm": 0.4583333333333333,
|
301 |
-
"acc_norm_stderr": 0.033981108902946366
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27150837988826815,
|
305 |
-
"acc_stderr": 0.014874252168095277,
|
306 |
-
"acc_norm": 0.27150837988826815,
|
307 |
-
"acc_norm_stderr": 0.014874252168095277
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.48,
|
311 |
-
"acc_stderr": 0.05021167315686779,
|
312 |
-
"acc_norm": 0.48,
|
313 |
-
"acc_norm_stderr": 0.05021167315686779
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.63,
|
317 |
-
"acc_stderr": 0.04852365870939099,
|
318 |
-
"acc_norm": 0.63,
|
319 |
-
"acc_norm_stderr": 0.04852365870939099
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.39338235294117646,
|
323 |
-
"acc_stderr": 0.02967428828131118,
|
324 |
-
"acc_norm": 0.39338235294117646,
|
325 |
-
"acc_norm_stderr": 0.02967428828131118
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5918367346938775,
|
329 |
-
"acc_stderr": 0.03146465712827424,
|
330 |
-
"acc_norm": 0.5918367346938775,
|
331 |
-
"acc_norm_stderr": 0.03146465712827424
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6582278481012658,
|
335 |
-
"acc_stderr": 0.03087453753755362,
|
336 |
-
"acc_norm": 0.6582278481012658,
|
337 |
-
"acc_norm_stderr": 0.03087453753755362
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3644067796610169,
|
341 |
-
"acc_stderr": 0.012291694983056477,
|
342 |
-
"acc_norm": 0.3644067796610169,
|
343 |
-
"acc_norm_stderr": 0.012291694983056477
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5588235294117647,
|
347 |
-
"acc_stderr": 0.034849415144292316,
|
348 |
-
"acc_norm": 0.5588235294117647,
|
349 |
-
"acc_norm_stderr": 0.034849415144292316
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6303030303030303,
|
353 |
-
"acc_stderr": 0.03769430314512568,
|
354 |
-
"acc_norm": 0.6303030303030303,
|
355 |
-
"acc_norm_stderr": 0.03769430314512568
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2717258261933905,
|
359 |
-
"mc1_stderr": 0.01557284045287583,
|
360 |
-
"mc2": 0.44183716295863174,
|
361 |
-
"mc2_stderr": 0.015484493265241027
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4474616292798111,
|
365 |
-
"acc_stderr": 0.01709519030150058,
|
366 |
-
"acc_norm": 0.5584415584415584,
|
367 |
-
"acc_norm_stderr": 0.017072525875563103
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SkyOrbis/SKY-Ko-Llama3-8B-lora",
|
436 |
-
"model_sha": "de6afeb74d39dbcad6f5dba6d3334f5297a7cd33",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SkyOrbis/SKY-Ko-Solar-10.7B-lora/result_2024-06-23 08:12:36.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.41723549488054607,
|
5 |
-
"acc_stderr": 0.014409825518403082,
|
6 |
-
"acc_norm": 0.4726962457337884,
|
7 |
-
"acc_norm_stderr": 0.014589589101985994
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.425911173073093,
|
11 |
-
"acc_stderr": 0.004934698012050244,
|
12 |
-
"acc_norm": 0.5734913363871739,
|
13 |
-
"acc_norm_stderr": 0.004935587729948866
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5906432748538012,
|
17 |
-
"acc_stderr": 0.03771283107626545,
|
18 |
-
"acc_norm": 0.5906432748538012,
|
19 |
-
"acc_norm_stderr": 0.03771283107626545
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6407766990291263,
|
23 |
-
"acc_stderr": 0.04750458399041696,
|
24 |
-
"acc_norm": 0.6407766990291263,
|
25 |
-
"acc_norm_stderr": 0.04750458399041696
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6155810983397191,
|
29 |
-
"acc_stderr": 0.017395688742819618,
|
30 |
-
"acc_norm": 0.6155810983397191,
|
31 |
-
"acc_norm_stderr": 0.017395688742819618
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.42962962962962964,
|
35 |
-
"acc_stderr": 0.04276349494376599,
|
36 |
-
"acc_norm": 0.42962962962962964,
|
37 |
-
"acc_norm_stderr": 0.04276349494376599
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.044619604333847394,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.044619604333847394
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4425531914893617,
|
47 |
-
"acc_stderr": 0.03246956919789958,
|
48 |
-
"acc_norm": 0.4425531914893617,
|
49 |
-
"acc_norm_stderr": 0.03246956919789958
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.46987951807228917,
|
53 |
-
"acc_stderr": 0.03885425420866766,
|
54 |
-
"acc_norm": 0.46987951807228917,
|
55 |
-
"acc_norm_stderr": 0.03885425420866766
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.594855305466238,
|
59 |
-
"acc_stderr": 0.027882383791325956,
|
60 |
-
"acc_norm": 0.594855305466238,
|
61 |
-
"acc_norm_stderr": 0.027882383791325956
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5515695067264574,
|
65 |
-
"acc_stderr": 0.03337883736255098,
|
66 |
-
"acc_norm": 0.5515695067264574,
|
67 |
-
"acc_norm_stderr": 0.03337883736255098
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.549618320610687,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.549618320610687,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.45,
|
77 |
-
"acc_stderr": 0.04999999999999999,
|
78 |
-
"acc_norm": 0.45,
|
79 |
-
"acc_norm_stderr": 0.04999999999999999
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6767676767676768,
|
83 |
-
"acc_stderr": 0.03332299921070645,
|
84 |
-
"acc_norm": 0.6767676767676768,
|
85 |
-
"acc_norm_stderr": 0.03332299921070645
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4689655172413793,
|
89 |
-
"acc_stderr": 0.04158632762097828,
|
90 |
-
"acc_norm": 0.4689655172413793,
|
91 |
-
"acc_norm_stderr": 0.04158632762097828
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3235294117647059,
|
95 |
-
"acc_stderr": 0.04655010411319616,
|
96 |
-
"acc_norm": 0.3235294117647059,
|
97 |
-
"acc_norm_stderr": 0.04655010411319616
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5630252100840336,
|
101 |
-
"acc_stderr": 0.03221943636566196,
|
102 |
-
"acc_norm": 0.5630252100840336,
|
103 |
-
"acc_norm_stderr": 0.03221943636566196
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5076923076923077,
|
107 |
-
"acc_stderr": 0.025348006031534743,
|
108 |
-
"acc_norm": 0.5076923076923077,
|
109 |
-
"acc_norm_stderr": 0.025348006031534743
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.58,
|
113 |
-
"acc_stderr": 0.04960449637488583,
|
114 |
-
"acc_norm": 0.58,
|
115 |
-
"acc_norm_stderr": 0.04960449637488583
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.29,
|
119 |
-
"acc_stderr": 0.045604802157206845,
|
120 |
-
"acc_norm": 0.29,
|
121 |
-
"acc_norm_stderr": 0.045604802157206845
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5833333333333334,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.5833333333333334,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.45320197044334976,
|
131 |
-
"acc_stderr": 0.03502544650845872,
|
132 |
-
"acc_norm": 0.45320197044334976,
|
133 |
-
"acc_norm_stderr": 0.03502544650845872
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5612903225806452,
|
137 |
-
"acc_stderr": 0.028229497320317206,
|
138 |
-
"acc_norm": 0.5612903225806452,
|
139 |
-
"acc_norm_stderr": 0.028229497320317206
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7991452991452992,
|
143 |
-
"acc_stderr": 0.026246772946890474,
|
144 |
-
"acc_norm": 0.7991452991452992,
|
145 |
-
"acc_norm_stderr": 0.026246772946890474
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4716981132075472,
|
149 |
-
"acc_stderr": 0.030723535249006107,
|
150 |
-
"acc_norm": 0.4716981132075472,
|
151 |
-
"acc_norm_stderr": 0.030723535249006107
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5727272727272728,
|
155 |
-
"acc_stderr": 0.04738198703545483,
|
156 |
-
"acc_norm": 0.5727272727272728,
|
157 |
-
"acc_norm_stderr": 0.04738198703545483
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3296296296296296,
|
161 |
-
"acc_stderr": 0.02866120111652459,
|
162 |
-
"acc_norm": 0.3296296296296296,
|
163 |
-
"acc_norm_stderr": 0.02866120111652459
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33774834437086093,
|
167 |
-
"acc_stderr": 0.03861557546255168,
|
168 |
-
"acc_norm": 0.33774834437086093,
|
169 |
-
"acc_norm_stderr": 0.03861557546255168
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7014925373134329,
|
173 |
-
"acc_stderr": 0.03235743789355043,
|
174 |
-
"acc_norm": 0.7014925373134329,
|
175 |
-
"acc_norm_stderr": 0.03235743789355043
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4624277456647399,
|
179 |
-
"acc_stderr": 0.0380168510452446,
|
180 |
-
"acc_norm": 0.4624277456647399,
|
181 |
-
"acc_norm_stderr": 0.0380168510452446
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3783068783068783,
|
185 |
-
"acc_stderr": 0.024976954053155257,
|
186 |
-
"acc_norm": 0.3783068783068783,
|
187 |
-
"acc_norm_stderr": 0.024976954053155257
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4166666666666667,
|
191 |
-
"acc_stderr": 0.04122728707651282,
|
192 |
-
"acc_norm": 0.4166666666666667,
|
193 |
-
"acc_norm_stderr": 0.04122728707651282
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001975,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001975
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.69,
|
203 |
-
"acc_stderr": 0.04648231987117316,
|
204 |
-
"acc_norm": 0.69,
|
205 |
-
"acc_norm_stderr": 0.04648231987117316
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5780346820809249,
|
209 |
-
"acc_stderr": 0.02658923114217426,
|
210 |
-
"acc_norm": 0.5780346820809249,
|
211 |
-
"acc_norm_stderr": 0.02658923114217426
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.558282208588957,
|
215 |
-
"acc_stderr": 0.03901591825836184,
|
216 |
-
"acc_norm": 0.558282208588957,
|
217 |
-
"acc_norm_stderr": 0.03901591825836184
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5709876543209876,
|
221 |
-
"acc_stderr": 0.027538925613470863,
|
222 |
-
"acc_norm": 0.5709876543209876,
|
223 |
-
"acc_norm_stderr": 0.027538925613470863
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.39,
|
227 |
-
"acc_stderr": 0.04902071300001974,
|
228 |
-
"acc_norm": 0.39,
|
229 |
-
"acc_norm_stderr": 0.04902071300001974
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.694300518134715,
|
233 |
-
"acc_stderr": 0.03324837939758159,
|
234 |
-
"acc_norm": 0.694300518134715,
|
235 |
-
"acc_norm_stderr": 0.03324837939758159
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.43859649122807015,
|
239 |
-
"acc_stderr": 0.04668000738510455,
|
240 |
-
"acc_norm": 0.43859649122807015,
|
241 |
-
"acc_norm_stderr": 0.04668000738510455
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6293577981651376,
|
245 |
-
"acc_stderr": 0.020707458164352984,
|
246 |
-
"acc_norm": 0.6293577981651376,
|
247 |
-
"acc_norm_stderr": 0.020707458164352984
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.373015873015873,
|
251 |
-
"acc_stderr": 0.04325506042017086,
|
252 |
-
"acc_norm": 0.373015873015873,
|
253 |
-
"acc_norm_stderr": 0.04325506042017086
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5490196078431373,
|
257 |
-
"acc_stderr": 0.028491993586171563,
|
258 |
-
"acc_norm": 0.5490196078431373,
|
259 |
-
"acc_norm_stderr": 0.028491993586171563
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.58,
|
263 |
-
"acc_stderr": 0.04960449637488583,
|
264 |
-
"acc_norm": 0.58,
|
265 |
-
"acc_norm_stderr": 0.04960449637488583
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.04065578140908705,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.04065578140908705
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5657894736842105,
|
275 |
-
"acc_stderr": 0.04033565667848319,
|
276 |
-
"acc_norm": 0.5657894736842105,
|
277 |
-
"acc_norm_stderr": 0.04033565667848319
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4673202614379085,
|
281 |
-
"acc_stderr": 0.020184583359102202,
|
282 |
-
"acc_norm": 0.4673202614379085,
|
283 |
-
"acc_norm_stderr": 0.020184583359102202
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3475177304964539,
|
287 |
-
"acc_stderr": 0.028406627809590947,
|
288 |
-
"acc_norm": 0.3475177304964539,
|
289 |
-
"acc_norm_stderr": 0.028406627809590947
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.38392857142857145,
|
293 |
-
"acc_stderr": 0.046161430750285455,
|
294 |
-
"acc_norm": 0.38392857142857145,
|
295 |
-
"acc_norm_stderr": 0.046161430750285455
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5416666666666666,
|
299 |
-
"acc_stderr": 0.03398110890294636,
|
300 |
-
"acc_norm": 0.5416666666666666,
|
301 |
-
"acc_norm_stderr": 0.03398110890294636
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23128491620111732,
|
305 |
-
"acc_stderr": 0.014102223623152594,
|
306 |
-
"acc_norm": 0.23128491620111732,
|
307 |
-
"acc_norm_stderr": 0.014102223623152594
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.46,
|
311 |
-
"acc_stderr": 0.05009082659620332,
|
312 |
-
"acc_norm": 0.46,
|
313 |
-
"acc_norm_stderr": 0.05009082659620332
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.65,
|
317 |
-
"acc_stderr": 0.0479372485441102,
|
318 |
-
"acc_norm": 0.65,
|
319 |
-
"acc_norm_stderr": 0.0479372485441102
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.45588235294117646,
|
323 |
-
"acc_stderr": 0.03025437257397669,
|
324 |
-
"acc_norm": 0.45588235294117646,
|
325 |
-
"acc_norm_stderr": 0.03025437257397669
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5551020408163265,
|
329 |
-
"acc_stderr": 0.031814251181977865,
|
330 |
-
"acc_norm": 0.5551020408163265,
|
331 |
-
"acc_norm_stderr": 0.031814251181977865
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6835443037974683,
|
335 |
-
"acc_stderr": 0.03027497488021898,
|
336 |
-
"acc_norm": 0.6835443037974683,
|
337 |
-
"acc_norm_stderr": 0.03027497488021898
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3728813559322034,
|
341 |
-
"acc_stderr": 0.012350630058333362,
|
342 |
-
"acc_norm": 0.3728813559322034,
|
343 |
-
"acc_norm_stderr": 0.012350630058333362
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5686274509803921,
|
347 |
-
"acc_stderr": 0.03476099060501636,
|
348 |
-
"acc_norm": 0.5686274509803921,
|
349 |
-
"acc_norm_stderr": 0.03476099060501636
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5878787878787879,
|
353 |
-
"acc_stderr": 0.03843566993588718,
|
354 |
-
"acc_norm": 0.5878787878787879,
|
355 |
-
"acc_norm_stderr": 0.03843566993588718
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2607099143206854,
|
359 |
-
"mc1_stderr": 0.015368841620766373,
|
360 |
-
"mc2": 0.42390975148946014,
|
361 |
-
"mc2_stderr": 0.015001490344964346
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.46871310507674147,
|
365 |
-
"acc_stderr": 0.017156666859785456,
|
366 |
-
"acc_norm": 0.4923258559622196,
|
367 |
-
"acc_norm_stderr": 0.01718832921965428
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SkyOrbis/SKY-Ko-Solar-10.7B-lora",
|
436 |
-
"model_sha": "e75e3c33b7385c513686435c497f05121fec370a",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
StatPan/SinGung7B-DPO-v0.1-12600c/result_2024-01-06 08:57:13.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3651877133105802,
|
5 |
-
"acc_stderr": 0.0140702655192688,
|
6 |
-
"acc_norm": 0.4129692832764505,
|
7 |
-
"acc_norm_stderr": 0.014388344935398322
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.37422824138617805,
|
11 |
-
"acc_stderr": 0.00482933992638833,
|
12 |
-
"acc_norm": 0.477096195976897,
|
13 |
-
"acc_norm_stderr": 0.004984543540932338
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4152046783625731,
|
17 |
-
"acc_stderr": 0.03779275945503201,
|
18 |
-
"acc_norm": 0.4152046783625731,
|
19 |
-
"acc_norm_stderr": 0.03779275945503201
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5242718446601942,
|
23 |
-
"acc_stderr": 0.049449010929737795,
|
24 |
-
"acc_norm": 0.5242718446601942,
|
25 |
-
"acc_norm_stderr": 0.049449010929737795
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4559386973180077,
|
29 |
-
"acc_stderr": 0.017810403925435363,
|
30 |
-
"acc_norm": 0.4559386973180077,
|
31 |
-
"acc_norm_stderr": 0.017810403925435363
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37777777777777777,
|
35 |
-
"acc_stderr": 0.04188307537595853,
|
36 |
-
"acc_norm": 0.37777777777777777,
|
37 |
-
"acc_norm_stderr": 0.04188307537595853
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.29,
|
41 |
-
"acc_stderr": 0.04560480215720683,
|
42 |
-
"acc_norm": 0.29,
|
43 |
-
"acc_norm_stderr": 0.04560480215720683
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.34893617021276596,
|
47 |
-
"acc_stderr": 0.031158522131357762,
|
48 |
-
"acc_norm": 0.34893617021276596,
|
49 |
-
"acc_norm_stderr": 0.031158522131357762
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.37349397590361444,
|
53 |
-
"acc_stderr": 0.03765845117168863,
|
54 |
-
"acc_norm": 0.37349397590361444,
|
55 |
-
"acc_norm_stderr": 0.03765845117168863
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4212218649517685,
|
59 |
-
"acc_stderr": 0.02804339985821063,
|
60 |
-
"acc_norm": 0.4212218649517685,
|
61 |
-
"acc_norm_stderr": 0.02804339985821063
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4484304932735426,
|
65 |
-
"acc_stderr": 0.03337883736255099,
|
66 |
-
"acc_norm": 0.4484304932735426,
|
67 |
-
"acc_norm_stderr": 0.03337883736255099
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3893129770992366,
|
71 |
-
"acc_stderr": 0.04276486542814591,
|
72 |
-
"acc_norm": 0.3893129770992366,
|
73 |
-
"acc_norm_stderr": 0.04276486542814591
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.42,
|
77 |
-
"acc_stderr": 0.049604496374885836,
|
78 |
-
"acc_norm": 0.42,
|
79 |
-
"acc_norm_stderr": 0.049604496374885836
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.4898989898989899,
|
83 |
-
"acc_stderr": 0.035616254886737454,
|
84 |
-
"acc_norm": 0.4898989898989899,
|
85 |
-
"acc_norm_stderr": 0.035616254886737454
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.36551724137931035,
|
89 |
-
"acc_stderr": 0.04013124195424385,
|
90 |
-
"acc_norm": 0.36551724137931035,
|
91 |
-
"acc_norm_stderr": 0.04013124195424385
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.14705882352941177,
|
95 |
-
"acc_stderr": 0.0352406895156745,
|
96 |
-
"acc_norm": 0.14705882352941177,
|
97 |
-
"acc_norm_stderr": 0.0352406895156745
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.42857142857142855,
|
101 |
-
"acc_stderr": 0.032145368597886394,
|
102 |
-
"acc_norm": 0.42857142857142855,
|
103 |
-
"acc_norm_stderr": 0.032145368597886394
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4205128205128205,
|
107 |
-
"acc_stderr": 0.025028610276710855,
|
108 |
-
"acc_norm": 0.4205128205128205,
|
109 |
-
"acc_norm_stderr": 0.025028610276710855
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.52,
|
113 |
-
"acc_stderr": 0.05021167315686779,
|
114 |
-
"acc_norm": 0.52,
|
115 |
-
"acc_norm_stderr": 0.05021167315686779
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.49074074074074076,
|
125 |
-
"acc_stderr": 0.04832853553437056,
|
126 |
-
"acc_norm": 0.49074074074074076,
|
127 |
-
"acc_norm_stderr": 0.04832853553437056
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3694581280788177,
|
131 |
-
"acc_stderr": 0.033959703819985726,
|
132 |
-
"acc_norm": 0.3694581280788177,
|
133 |
-
"acc_norm_stderr": 0.033959703819985726
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.42258064516129035,
|
137 |
-
"acc_stderr": 0.02810096472427264,
|
138 |
-
"acc_norm": 0.42258064516129035,
|
139 |
-
"acc_norm_stderr": 0.02810096472427264
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.717948717948718,
|
143 |
-
"acc_stderr": 0.02948036054954119,
|
144 |
-
"acc_norm": 0.717948717948718,
|
145 |
-
"acc_norm_stderr": 0.02948036054954119
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4528301886792453,
|
149 |
-
"acc_stderr": 0.030635627957961823,
|
150 |
-
"acc_norm": 0.4528301886792453,
|
151 |
-
"acc_norm_stderr": 0.030635627957961823
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.44545454545454544,
|
155 |
-
"acc_stderr": 0.047605488214603246,
|
156 |
-
"acc_norm": 0.44545454545454544,
|
157 |
-
"acc_norm_stderr": 0.047605488214603246
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.337037037037037,
|
161 |
-
"acc_stderr": 0.028820884666253255,
|
162 |
-
"acc_norm": 0.337037037037037,
|
163 |
-
"acc_norm_stderr": 0.028820884666253255
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.03757949922943343,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.03757949922943343
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.48756218905472637,
|
173 |
-
"acc_stderr": 0.03534439848539579,
|
174 |
-
"acc_norm": 0.48756218905472637,
|
175 |
-
"acc_norm_stderr": 0.03534439848539579
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3815028901734104,
|
179 |
-
"acc_stderr": 0.03703851193099521,
|
180 |
-
"acc_norm": 0.3815028901734104,
|
181 |
-
"acc_norm_stderr": 0.03703851193099521
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.37037037037037035,
|
185 |
-
"acc_stderr": 0.024870815251057103,
|
186 |
-
"acc_norm": 0.37037037037037035,
|
187 |
-
"acc_norm_stderr": 0.024870815251057103
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3472222222222222,
|
191 |
-
"acc_stderr": 0.039812405437178615,
|
192 |
-
"acc_norm": 0.3472222222222222,
|
193 |
-
"acc_norm_stderr": 0.039812405437178615
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.04512608598542128,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.04512608598542128
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.54,
|
203 |
-
"acc_stderr": 0.05009082659620333,
|
204 |
-
"acc_norm": 0.54,
|
205 |
-
"acc_norm_stderr": 0.05009082659620333
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.44508670520231214,
|
209 |
-
"acc_stderr": 0.026756255129663762,
|
210 |
-
"acc_norm": 0.44508670520231214,
|
211 |
-
"acc_norm_stderr": 0.026756255129663762
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.49079754601226994,
|
215 |
-
"acc_stderr": 0.03927705600787443,
|
216 |
-
"acc_norm": 0.49079754601226994,
|
217 |
-
"acc_norm_stderr": 0.03927705600787443
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.39814814814814814,
|
221 |
-
"acc_stderr": 0.02723741509459247,
|
222 |
-
"acc_norm": 0.39814814814814814,
|
223 |
-
"acc_norm_stderr": 0.02723741509459247
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695235,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695235
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.47668393782383417,
|
233 |
-
"acc_stderr": 0.03604513672442205,
|
234 |
-
"acc_norm": 0.47668393782383417,
|
235 |
-
"acc_norm_stderr": 0.03604513672442205
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.30701754385964913,
|
239 |
-
"acc_stderr": 0.04339138322579861,
|
240 |
-
"acc_norm": 0.30701754385964913,
|
241 |
-
"acc_norm_stderr": 0.04339138322579861
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.44220183486238535,
|
245 |
-
"acc_stderr": 0.021293613207520205,
|
246 |
-
"acc_norm": 0.44220183486238535,
|
247 |
-
"acc_norm_stderr": 0.021293613207520205
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.35714285714285715,
|
251 |
-
"acc_stderr": 0.04285714285714281,
|
252 |
-
"acc_norm": 0.35714285714285715,
|
253 |
-
"acc_norm_stderr": 0.04285714285714281
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4117647058823529,
|
257 |
-
"acc_stderr": 0.028180596328259293,
|
258 |
-
"acc_norm": 0.4117647058823529,
|
259 |
-
"acc_norm_stderr": 0.028180596328259293
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.41,
|
263 |
-
"acc_stderr": 0.049431107042371025,
|
264 |
-
"acc_norm": 0.41,
|
265 |
-
"acc_norm_stderr": 0.049431107042371025
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5619834710743802,
|
269 |
-
"acc_stderr": 0.04529146804435792,
|
270 |
-
"acc_norm": 0.5619834710743802,
|
271 |
-
"acc_norm_stderr": 0.04529146804435792
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4342105263157895,
|
275 |
-
"acc_stderr": 0.040335656678483184,
|
276 |
-
"acc_norm": 0.4342105263157895,
|
277 |
-
"acc_norm_stderr": 0.040335656678483184
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.38235294117647056,
|
281 |
-
"acc_stderr": 0.019659922493623336,
|
282 |
-
"acc_norm": 0.38235294117647056,
|
283 |
-
"acc_norm_stderr": 0.019659922493623336
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.32978723404255317,
|
287 |
-
"acc_stderr": 0.028045946942042398,
|
288 |
-
"acc_norm": 0.32978723404255317,
|
289 |
-
"acc_norm_stderr": 0.028045946942042398
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.33035714285714285,
|
293 |
-
"acc_stderr": 0.04464285714285713,
|
294 |
-
"acc_norm": 0.33035714285714285,
|
295 |
-
"acc_norm_stderr": 0.04464285714285713
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.35648148148148145,
|
299 |
-
"acc_stderr": 0.032664783315272714,
|
300 |
-
"acc_norm": 0.35648148148148145,
|
301 |
-
"acc_norm_stderr": 0.032664783315272714
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27039106145251396,
|
305 |
-
"acc_stderr": 0.014854993938010085,
|
306 |
-
"acc_norm": 0.27039106145251396,
|
307 |
-
"acc_norm_stderr": 0.014854993938010085
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.39,
|
311 |
-
"acc_stderr": 0.04902071300001975,
|
312 |
-
"acc_norm": 0.39,
|
313 |
-
"acc_norm_stderr": 0.04902071300001975
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.56,
|
317 |
-
"acc_stderr": 0.04988876515698589,
|
318 |
-
"acc_norm": 0.56,
|
319 |
-
"acc_norm_stderr": 0.04988876515698589
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.33455882352941174,
|
323 |
-
"acc_stderr": 0.028661996202335303,
|
324 |
-
"acc_norm": 0.33455882352941174,
|
325 |
-
"acc_norm_stderr": 0.028661996202335303
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.42448979591836733,
|
329 |
-
"acc_stderr": 0.03164209487942941,
|
330 |
-
"acc_norm": 0.42448979591836733,
|
331 |
-
"acc_norm_stderr": 0.03164209487942941
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5189873417721519,
|
335 |
-
"acc_stderr": 0.03252375148090448,
|
336 |
-
"acc_norm": 0.5189873417721519,
|
337 |
-
"acc_norm_stderr": 0.03252375148090448
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3005215123859192,
|
341 |
-
"acc_stderr": 0.011709918883039124,
|
342 |
-
"acc_norm": 0.3005215123859192,
|
343 |
-
"acc_norm_stderr": 0.011709918883039124
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4264705882352941,
|
347 |
-
"acc_stderr": 0.03471157907953425,
|
348 |
-
"acc_norm": 0.4264705882352941,
|
349 |
-
"acc_norm_stderr": 0.03471157907953425
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.41818181818181815,
|
353 |
-
"acc_stderr": 0.03851716319398396,
|
354 |
-
"acc_norm": 0.41818181818181815,
|
355 |
-
"acc_norm_stderr": 0.03851716319398396
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3084455324357405,
|
359 |
-
"mc1_stderr": 0.01616803938315687,
|
360 |
-
"mc2": 0.5146617602010907,
|
361 |
-
"mc2_stderr": 0.0165702140232135
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4179456906729634,
|
365 |
-
"acc_stderr": 0.016957292005279723,
|
366 |
-
"acc_norm": 0.4592680047225502,
|
367 |
-
"acc_norm_stderr": 0.017133218276537677
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "StatPan/SinGung7B-DPO-v0.1-12600c",
|
436 |
-
"model_sha": "ce6e4ccd22d2ad3904ec1486456c22d5c4edb088",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
StatPan/all-you-need-is/result_2024-01-04 01:51:48.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.431740614334471,
|
5 |
-
"acc_stderr": 0.014474591427196206,
|
6 |
-
"acc_norm": 0.48976109215017066,
|
7 |
-
"acc_norm_stderr": 0.01460832690628502
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4331806413065126,
|
11 |
-
"acc_stderr": 0.004945023657032277,
|
12 |
-
"acc_norm": 0.5859390559649472,
|
13 |
-
"acc_norm_stderr": 0.004915524600627963
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5964912280701754,
|
17 |
-
"acc_stderr": 0.03762738699917057,
|
18 |
-
"acc_norm": 0.5964912280701754,
|
19 |
-
"acc_norm_stderr": 0.03762738699917057
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6893203883495146,
|
23 |
-
"acc_stderr": 0.0458212416016155,
|
24 |
-
"acc_norm": 0.6893203883495146,
|
25 |
-
"acc_norm_stderr": 0.0458212416016155
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6538952745849298,
|
29 |
-
"acc_stderr": 0.01701196526641208,
|
30 |
-
"acc_norm": 0.6538952745849298,
|
31 |
-
"acc_norm_stderr": 0.01701196526641208
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.45925925925925926,
|
35 |
-
"acc_stderr": 0.04304979692464244,
|
36 |
-
"acc_norm": 0.45925925925925926,
|
37 |
-
"acc_norm_stderr": 0.04304979692464244
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.2,
|
41 |
-
"acc_stderr": 0.04020151261036845,
|
42 |
-
"acc_norm": 0.2,
|
43 |
-
"acc_norm_stderr": 0.04020151261036845
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.5063829787234042,
|
47 |
-
"acc_stderr": 0.03268335899936335,
|
48 |
-
"acc_norm": 0.5063829787234042,
|
49 |
-
"acc_norm_stderr": 0.03268335899936335
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4879518072289157,
|
53 |
-
"acc_stderr": 0.0389136449583582,
|
54 |
-
"acc_norm": 0.4879518072289157,
|
55 |
-
"acc_norm_stderr": 0.0389136449583582
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5980707395498392,
|
59 |
-
"acc_stderr": 0.027846476005930473,
|
60 |
-
"acc_norm": 0.5980707395498392,
|
61 |
-
"acc_norm_stderr": 0.027846476005930473
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5695067264573991,
|
65 |
-
"acc_stderr": 0.033231973029429394,
|
66 |
-
"acc_norm": 0.5695067264573991,
|
67 |
-
"acc_norm_stderr": 0.033231973029429394
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.6335877862595419,
|
71 |
-
"acc_stderr": 0.042258754519696386,
|
72 |
-
"acc_norm": 0.6335877862595419,
|
73 |
-
"acc_norm_stderr": 0.042258754519696386
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.48,
|
77 |
-
"acc_stderr": 0.05021167315686779,
|
78 |
-
"acc_norm": 0.48,
|
79 |
-
"acc_norm_stderr": 0.05021167315686779
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7373737373737373,
|
83 |
-
"acc_stderr": 0.03135305009533086,
|
84 |
-
"acc_norm": 0.7373737373737373,
|
85 |
-
"acc_norm_stderr": 0.03135305009533086
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5517241379310345,
|
89 |
-
"acc_stderr": 0.04144311810878151,
|
90 |
-
"acc_norm": 0.5517241379310345,
|
91 |
-
"acc_norm_stderr": 0.04144311810878151
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3137254901960784,
|
95 |
-
"acc_stderr": 0.04617034827006716,
|
96 |
-
"acc_norm": 0.3137254901960784,
|
97 |
-
"acc_norm_stderr": 0.04617034827006716
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6638655462184874,
|
101 |
-
"acc_stderr": 0.03068473711513536,
|
102 |
-
"acc_norm": 0.6638655462184874,
|
103 |
-
"acc_norm_stderr": 0.03068473711513536
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5666666666666667,
|
107 |
-
"acc_stderr": 0.0251246535258851,
|
108 |
-
"acc_norm": 0.5666666666666667,
|
109 |
-
"acc_norm_stderr": 0.0251246535258851
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.62,
|
113 |
-
"acc_stderr": 0.048783173121456316,
|
114 |
-
"acc_norm": 0.62,
|
115 |
-
"acc_norm_stderr": 0.048783173121456316
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6203703703703703,
|
125 |
-
"acc_stderr": 0.04691521224077742,
|
126 |
-
"acc_norm": 0.6203703703703703,
|
127 |
-
"acc_norm_stderr": 0.04691521224077742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.37438423645320196,
|
131 |
-
"acc_stderr": 0.03405155380561952,
|
132 |
-
"acc_norm": 0.37438423645320196,
|
133 |
-
"acc_norm_stderr": 0.03405155380561952
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6064516129032258,
|
137 |
-
"acc_stderr": 0.027791878753132264,
|
138 |
-
"acc_norm": 0.6064516129032258,
|
139 |
-
"acc_norm_stderr": 0.027791878753132264
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8247863247863247,
|
143 |
-
"acc_stderr": 0.024904439098918214,
|
144 |
-
"acc_norm": 0.8247863247863247,
|
145 |
-
"acc_norm_stderr": 0.024904439098918214
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5433962264150943,
|
149 |
-
"acc_stderr": 0.030656748696739435,
|
150 |
-
"acc_norm": 0.5433962264150943,
|
151 |
-
"acc_norm_stderr": 0.030656748696739435
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5727272727272728,
|
155 |
-
"acc_stderr": 0.04738198703545483,
|
156 |
-
"acc_norm": 0.5727272727272728,
|
157 |
-
"acc_norm_stderr": 0.04738198703545483
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.34444444444444444,
|
161 |
-
"acc_stderr": 0.028972648884844267,
|
162 |
-
"acc_norm": 0.34444444444444444,
|
163 |
-
"acc_norm_stderr": 0.028972648884844267
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3973509933774834,
|
167 |
-
"acc_stderr": 0.03995524007681682,
|
168 |
-
"acc_norm": 0.3973509933774834,
|
169 |
-
"acc_norm_stderr": 0.03995524007681682
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6915422885572139,
|
173 |
-
"acc_stderr": 0.03265819588512697,
|
174 |
-
"acc_norm": 0.6915422885572139,
|
175 |
-
"acc_norm_stderr": 0.03265819588512697
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5144508670520231,
|
179 |
-
"acc_stderr": 0.03810871630454764,
|
180 |
-
"acc_norm": 0.5144508670520231,
|
181 |
-
"acc_norm_stderr": 0.03810871630454764
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.43915343915343913,
|
185 |
-
"acc_stderr": 0.02555992055053101,
|
186 |
-
"acc_norm": 0.43915343915343913,
|
187 |
-
"acc_norm_stderr": 0.02555992055053101
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5,
|
191 |
-
"acc_stderr": 0.04181210050035455,
|
192 |
-
"acc_norm": 0.5,
|
193 |
-
"acc_norm_stderr": 0.04181210050035455
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.41,
|
197 |
-
"acc_stderr": 0.049431107042371025,
|
198 |
-
"acc_norm": 0.41,
|
199 |
-
"acc_norm_stderr": 0.049431107042371025
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.72,
|
203 |
-
"acc_stderr": 0.04512608598542126,
|
204 |
-
"acc_norm": 0.72,
|
205 |
-
"acc_norm_stderr": 0.04512608598542126
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.615606936416185,
|
209 |
-
"acc_stderr": 0.026189666966272035,
|
210 |
-
"acc_norm": 0.615606936416185,
|
211 |
-
"acc_norm_stderr": 0.026189666966272035
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5276073619631901,
|
215 |
-
"acc_stderr": 0.0392237829061099,
|
216 |
-
"acc_norm": 0.5276073619631901,
|
217 |
-
"acc_norm_stderr": 0.0392237829061099
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5802469135802469,
|
221 |
-
"acc_stderr": 0.02746009955700513,
|
222 |
-
"acc_norm": 0.5802469135802469,
|
223 |
-
"acc_norm_stderr": 0.02746009955700513
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.047258156262526045,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.047258156262526045
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7098445595854922,
|
233 |
-
"acc_stderr": 0.03275264467791516,
|
234 |
-
"acc_norm": 0.7098445595854922,
|
235 |
-
"acc_norm_stderr": 0.03275264467791516
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.5350877192982456,
|
239 |
-
"acc_stderr": 0.046920083813689104,
|
240 |
-
"acc_norm": 0.5350877192982456,
|
241 |
-
"acc_norm_stderr": 0.046920083813689104
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6954128440366972,
|
245 |
-
"acc_stderr": 0.019732299420354038,
|
246 |
-
"acc_norm": 0.6954128440366972,
|
247 |
-
"acc_norm_stderr": 0.019732299420354038
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4365079365079365,
|
251 |
-
"acc_stderr": 0.044359328928514664,
|
252 |
-
"acc_norm": 0.4365079365079365,
|
253 |
-
"acc_norm_stderr": 0.044359328928514664
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5620915032679739,
|
257 |
-
"acc_stderr": 0.02840830202033269,
|
258 |
-
"acc_norm": 0.5620915032679739,
|
259 |
-
"acc_norm_stderr": 0.02840830202033269
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.57,
|
263 |
-
"acc_stderr": 0.04975698519562429,
|
264 |
-
"acc_norm": 0.57,
|
265 |
-
"acc_norm_stderr": 0.04975698519562429
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7603305785123967,
|
269 |
-
"acc_stderr": 0.03896878985070417,
|
270 |
-
"acc_norm": 0.7603305785123967,
|
271 |
-
"acc_norm_stderr": 0.03896878985070417
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5921052631578947,
|
275 |
-
"acc_stderr": 0.039993097127774734,
|
276 |
-
"acc_norm": 0.5921052631578947,
|
277 |
-
"acc_norm_stderr": 0.039993097127774734
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5261437908496732,
|
281 |
-
"acc_stderr": 0.020200164564804585,
|
282 |
-
"acc_norm": 0.5261437908496732,
|
283 |
-
"acc_norm_stderr": 0.020200164564804585
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.4148936170212766,
|
287 |
-
"acc_stderr": 0.029392236584612496,
|
288 |
-
"acc_norm": 0.4148936170212766,
|
289 |
-
"acc_norm_stderr": 0.029392236584612496
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.44642857142857145,
|
293 |
-
"acc_stderr": 0.04718471485219589,
|
294 |
-
"acc_norm": 0.44642857142857145,
|
295 |
-
"acc_norm_stderr": 0.04718471485219589
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5833333333333334,
|
299 |
-
"acc_stderr": 0.03362277436608044,
|
300 |
-
"acc_norm": 0.5833333333333334,
|
301 |
-
"acc_norm_stderr": 0.03362277436608044
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23910614525139665,
|
305 |
-
"acc_stderr": 0.014265554192331152,
|
306 |
-
"acc_norm": 0.23910614525139665,
|
307 |
-
"acc_norm_stderr": 0.014265554192331152
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.47,
|
311 |
-
"acc_stderr": 0.05016135580465919,
|
312 |
-
"acc_norm": 0.47,
|
313 |
-
"acc_norm_stderr": 0.05016135580465919
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.69,
|
317 |
-
"acc_stderr": 0.04648231987117316,
|
318 |
-
"acc_norm": 0.69,
|
319 |
-
"acc_norm_stderr": 0.04648231987117316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5294117647058824,
|
323 |
-
"acc_stderr": 0.03032024326500413,
|
324 |
-
"acc_norm": 0.5294117647058824,
|
325 |
-
"acc_norm_stderr": 0.03032024326500413
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5836734693877551,
|
329 |
-
"acc_stderr": 0.03155782816556165,
|
330 |
-
"acc_norm": 0.5836734693877551,
|
331 |
-
"acc_norm_stderr": 0.03155782816556165
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7721518987341772,
|
335 |
-
"acc_stderr": 0.027303484599069422,
|
336 |
-
"acc_norm": 0.7721518987341772,
|
337 |
-
"acc_norm_stderr": 0.027303484599069422
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.41590612777053454,
|
341 |
-
"acc_stderr": 0.012588323850313604,
|
342 |
-
"acc_norm": 0.41590612777053454,
|
343 |
-
"acc_norm_stderr": 0.012588323850313604
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6764705882352942,
|
347 |
-
"acc_stderr": 0.032834720561085606,
|
348 |
-
"acc_norm": 0.6764705882352942,
|
349 |
-
"acc_norm_stderr": 0.032834720561085606
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.696969696969697,
|
353 |
-
"acc_stderr": 0.03588624800091709,
|
354 |
-
"acc_norm": 0.696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03588624800091709
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3818849449204406,
|
359 |
-
"mc1_stderr": 0.0170081019391635,
|
360 |
-
"mc2": 0.5429614095843092,
|
361 |
-
"mc2_stderr": 0.015630544483169008
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.6115702479338843,
|
365 |
-
"acc_stderr": 0.01675692157106942,
|
366 |
-
"acc_norm": 0.6304604486422668,
|
367 |
-
"acc_norm_stderr": 0.01659488340568543
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "StatPan/all-you-need-is",
|
436 |
-
"model_sha": "56dd7571c69019e915bf81bfc69725de6a23ceb1",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4249146757679181,
|
5 |
-
"acc_stderr": 0.014445698968520769,
|
6 |
-
"acc_norm": 0.5,
|
7 |
-
"acc_norm_stderr": 0.014611390804670088
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.41724756024696275,
|
11 |
-
"acc_stderr": 0.004920967192255291,
|
12 |
-
"acc_norm": 0.5492929695279825,
|
13 |
-
"acc_norm_stderr": 0.004965473894646782
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5730994152046783,
|
17 |
-
"acc_stderr": 0.03793620616529917,
|
18 |
-
"acc_norm": 0.5730994152046783,
|
19 |
-
"acc_norm_stderr": 0.03793620616529917
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6310679611650486,
|
23 |
-
"acc_stderr": 0.0477761518115674,
|
24 |
-
"acc_norm": 0.6310679611650486,
|
25 |
-
"acc_norm_stderr": 0.0477761518115674
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.561941251596424,
|
29 |
-
"acc_stderr": 0.017742232238257258,
|
30 |
-
"acc_norm": 0.561941251596424,
|
31 |
-
"acc_norm_stderr": 0.017742232238257258
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3851851851851852,
|
35 |
-
"acc_stderr": 0.042039210401562783,
|
36 |
-
"acc_norm": 0.3851851851851852,
|
37 |
-
"acc_norm_stderr": 0.042039210401562783
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.43829787234042555,
|
47 |
-
"acc_stderr": 0.03243618636108101,
|
48 |
-
"acc_norm": 0.43829787234042555,
|
49 |
-
"acc_norm_stderr": 0.03243618636108101
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.42771084337349397,
|
53 |
-
"acc_stderr": 0.03851597683718533,
|
54 |
-
"acc_norm": 0.42771084337349397,
|
55 |
-
"acc_norm_stderr": 0.03851597683718533
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5434083601286174,
|
59 |
-
"acc_stderr": 0.0282908690541976,
|
60 |
-
"acc_norm": 0.5434083601286174,
|
61 |
-
"acc_norm_stderr": 0.0282908690541976
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5381165919282511,
|
65 |
-
"acc_stderr": 0.03346015011973228,
|
66 |
-
"acc_norm": 0.5381165919282511,
|
67 |
-
"acc_norm_stderr": 0.03346015011973228
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.46564885496183206,
|
71 |
-
"acc_stderr": 0.043749285605997376,
|
72 |
-
"acc_norm": 0.46564885496183206,
|
73 |
-
"acc_norm_stderr": 0.043749285605997376
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.46,
|
77 |
-
"acc_stderr": 0.05009082659620332,
|
78 |
-
"acc_norm": 0.46,
|
79 |
-
"acc_norm_stderr": 0.05009082659620332
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6313131313131313,
|
83 |
-
"acc_stderr": 0.034373055019806184,
|
84 |
-
"acc_norm": 0.6313131313131313,
|
85 |
-
"acc_norm_stderr": 0.034373055019806184
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4482758620689655,
|
89 |
-
"acc_stderr": 0.04144311810878151,
|
90 |
-
"acc_norm": 0.4482758620689655,
|
91 |
-
"acc_norm_stderr": 0.04144311810878151
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.24509803921568626,
|
95 |
-
"acc_stderr": 0.04280105837364397,
|
96 |
-
"acc_norm": 0.24509803921568626,
|
97 |
-
"acc_norm_stderr": 0.04280105837364397
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5546218487394958,
|
101 |
-
"acc_stderr": 0.0322841062671639,
|
102 |
-
"acc_norm": 0.5546218487394958,
|
103 |
-
"acc_norm_stderr": 0.0322841062671639
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5256410256410257,
|
107 |
-
"acc_stderr": 0.025317649726448677,
|
108 |
-
"acc_norm": 0.5256410256410257,
|
109 |
-
"acc_norm_stderr": 0.025317649726448677
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.61,
|
113 |
-
"acc_stderr": 0.04902071300001974,
|
114 |
-
"acc_norm": 0.61,
|
115 |
-
"acc_norm_stderr": 0.04902071300001974
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.29,
|
119 |
-
"acc_stderr": 0.04560480215720684,
|
120 |
-
"acc_norm": 0.29,
|
121 |
-
"acc_norm_stderr": 0.04560480215720684
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5648148148148148,
|
125 |
-
"acc_stderr": 0.04792898170907062,
|
126 |
-
"acc_norm": 0.5648148148148148,
|
127 |
-
"acc_norm_stderr": 0.04792898170907062
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4187192118226601,
|
131 |
-
"acc_stderr": 0.03471192860518468,
|
132 |
-
"acc_norm": 0.4187192118226601,
|
133 |
-
"acc_norm_stderr": 0.03471192860518468
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.535483870967742,
|
137 |
-
"acc_stderr": 0.02837228779796294,
|
138 |
-
"acc_norm": 0.535483870967742,
|
139 |
-
"acc_norm_stderr": 0.02837228779796294
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7478632478632479,
|
143 |
-
"acc_stderr": 0.028447965476231022,
|
144 |
-
"acc_norm": 0.7478632478632479,
|
145 |
-
"acc_norm_stderr": 0.028447965476231022
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.49056603773584906,
|
149 |
-
"acc_stderr": 0.030767394707808086,
|
150 |
-
"acc_norm": 0.49056603773584906,
|
151 |
-
"acc_norm_stderr": 0.030767394707808086
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5818181818181818,
|
155 |
-
"acc_stderr": 0.04724577405731572,
|
156 |
-
"acc_norm": 0.5818181818181818,
|
157 |
-
"acc_norm_stderr": 0.04724577405731572
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.34814814814814815,
|
161 |
-
"acc_stderr": 0.029045600290616255,
|
162 |
-
"acc_norm": 0.34814814814814815,
|
163 |
-
"acc_norm_stderr": 0.029045600290616255
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.32450331125827814,
|
167 |
-
"acc_stderr": 0.038227469376587525,
|
168 |
-
"acc_norm": 0.32450331125827814,
|
169 |
-
"acc_norm_stderr": 0.038227469376587525
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6915422885572139,
|
173 |
-
"acc_stderr": 0.03265819588512697,
|
174 |
-
"acc_norm": 0.6915422885572139,
|
175 |
-
"acc_norm_stderr": 0.03265819588512697
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3930635838150289,
|
179 |
-
"acc_stderr": 0.0372424959581773,
|
180 |
-
"acc_norm": 0.3930635838150289,
|
181 |
-
"acc_norm_stderr": 0.0372424959581773
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.38095238095238093,
|
185 |
-
"acc_stderr": 0.025010749116137602,
|
186 |
-
"acc_norm": 0.38095238095238093,
|
187 |
-
"acc_norm_stderr": 0.025010749116137602
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4027777777777778,
|
191 |
-
"acc_stderr": 0.04101405519842426,
|
192 |
-
"acc_norm": 0.4027777777777778,
|
193 |
-
"acc_norm_stderr": 0.04101405519842426
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.37,
|
197 |
-
"acc_stderr": 0.04852365870939099,
|
198 |
-
"acc_norm": 0.37,
|
199 |
-
"acc_norm_stderr": 0.04852365870939099
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.64,
|
203 |
-
"acc_stderr": 0.048241815132442176,
|
204 |
-
"acc_norm": 0.64,
|
205 |
-
"acc_norm_stderr": 0.048241815132442176
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.569364161849711,
|
209 |
-
"acc_stderr": 0.02665880027367238,
|
210 |
-
"acc_norm": 0.569364161849711,
|
211 |
-
"acc_norm_stderr": 0.02665880027367238
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5460122699386503,
|
215 |
-
"acc_stderr": 0.0391170190467718,
|
216 |
-
"acc_norm": 0.5460122699386503,
|
217 |
-
"acc_norm_stderr": 0.0391170190467718
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5,
|
221 |
-
"acc_stderr": 0.02782074420373286,
|
222 |
-
"acc_norm": 0.5,
|
223 |
-
"acc_norm_stderr": 0.02782074420373286
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5751295336787565,
|
233 |
-
"acc_stderr": 0.035674713352125395,
|
234 |
-
"acc_norm": 0.5751295336787565,
|
235 |
-
"acc_norm_stderr": 0.035674713352125395
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.32456140350877194,
|
239 |
-
"acc_stderr": 0.04404556157374768,
|
240 |
-
"acc_norm": 0.32456140350877194,
|
241 |
-
"acc_norm_stderr": 0.04404556157374768
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5889908256880734,
|
245 |
-
"acc_stderr": 0.021095050687277656,
|
246 |
-
"acc_norm": 0.5889908256880734,
|
247 |
-
"acc_norm_stderr": 0.021095050687277656
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4444444444444444,
|
251 |
-
"acc_stderr": 0.044444444444444495,
|
252 |
-
"acc_norm": 0.4444444444444444,
|
253 |
-
"acc_norm_stderr": 0.044444444444444495
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5261437908496732,
|
257 |
-
"acc_stderr": 0.028590752958852394,
|
258 |
-
"acc_norm": 0.5261437908496732,
|
259 |
-
"acc_norm_stderr": 0.028590752958852394
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.48,
|
263 |
-
"acc_stderr": 0.050211673156867795,
|
264 |
-
"acc_norm": 0.48,
|
265 |
-
"acc_norm_stderr": 0.050211673156867795
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.040655781409087044,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.040655781409087044
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5131578947368421,
|
275 |
-
"acc_stderr": 0.04067533136309172,
|
276 |
-
"acc_norm": 0.5131578947368421,
|
277 |
-
"acc_norm_stderr": 0.04067533136309172
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4362745098039216,
|
281 |
-
"acc_stderr": 0.02006287424353913,
|
282 |
-
"acc_norm": 0.4362745098039216,
|
283 |
-
"acc_norm_stderr": 0.02006287424353913
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.36524822695035464,
|
287 |
-
"acc_stderr": 0.02872386385328128,
|
288 |
-
"acc_norm": 0.36524822695035464,
|
289 |
-
"acc_norm_stderr": 0.02872386385328128
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.5,
|
293 |
-
"acc_stderr": 0.04745789978762494,
|
294 |
-
"acc_norm": 0.5,
|
295 |
-
"acc_norm_stderr": 0.04745789978762494
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.42592592592592593,
|
299 |
-
"acc_stderr": 0.03372343271653062,
|
300 |
-
"acc_norm": 0.42592592592592593,
|
301 |
-
"acc_norm_stderr": 0.03372343271653062
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23687150837988827,
|
305 |
-
"acc_stderr": 0.014219570788103986,
|
306 |
-
"acc_norm": 0.23687150837988827,
|
307 |
-
"acc_norm_stderr": 0.014219570788103986
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.42,
|
311 |
-
"acc_stderr": 0.049604496374885836,
|
312 |
-
"acc_norm": 0.42,
|
313 |
-
"acc_norm_stderr": 0.049604496374885836
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.69,
|
317 |
-
"acc_stderr": 0.046482319871173156,
|
318 |
-
"acc_norm": 0.69,
|
319 |
-
"acc_norm_stderr": 0.046482319871173156
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.36764705882352944,
|
323 |
-
"acc_stderr": 0.029289413409403192,
|
324 |
-
"acc_norm": 0.36764705882352944,
|
325 |
-
"acc_norm_stderr": 0.029289413409403192
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6326530612244898,
|
329 |
-
"acc_stderr": 0.030862144921087565,
|
330 |
-
"acc_norm": 0.6326530612244898,
|
331 |
-
"acc_norm_stderr": 0.030862144921087565
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6919831223628692,
|
335 |
-
"acc_stderr": 0.03005238933560569,
|
336 |
-
"acc_norm": 0.6919831223628692,
|
337 |
-
"acc_norm_stderr": 0.03005238933560569
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3546284224250326,
|
341 |
-
"acc_stderr": 0.01221857643909016,
|
342 |
-
"acc_norm": 0.3546284224250326,
|
343 |
-
"acc_norm_stderr": 0.01221857643909016
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5196078431372549,
|
347 |
-
"acc_stderr": 0.03506612560524866,
|
348 |
-
"acc_norm": 0.5196078431372549,
|
349 |
-
"acc_norm_stderr": 0.03506612560524866
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5696969696969697,
|
353 |
-
"acc_stderr": 0.03866225962879077,
|
354 |
-
"acc_norm": 0.5696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03866225962879077
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3378212974296206,
|
359 |
-
"mc1_stderr": 0.016557167322516896,
|
360 |
-
"mc2": 0.5219156106272662,
|
361 |
-
"mc2_stderr": 0.015613264148505234
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.45218417945690675,
|
365 |
-
"acc_stderr": 0.017111567130916785,
|
366 |
-
"acc_norm": 0.46162927981109797,
|
367 |
-
"acc_norm_stderr": 0.01713966022184556
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "StatPan/mistral7b-bartending-recipe-v1",
|
436 |
-
"model_sha": "5fc07b540bbec555260205e3a9005f55806703da",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3703071672354949,
|
5 |
-
"acc_stderr": 0.014111298751674948,
|
6 |
-
"acc_norm": 0.41552901023890787,
|
7 |
-
"acc_norm_stderr": 0.0144013666412164
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.37950607448715395,
|
11 |
-
"acc_stderr": 0.004842723234022034,
|
12 |
-
"acc_norm": 0.48207528380800635,
|
13 |
-
"acc_norm_stderr": 0.004986573992451693
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4502923976608187,
|
17 |
-
"acc_stderr": 0.03815827365913236,
|
18 |
-
"acc_norm": 0.4502923976608187,
|
19 |
-
"acc_norm_stderr": 0.03815827365913236
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.4854368932038835,
|
23 |
-
"acc_stderr": 0.04948637324026637,
|
24 |
-
"acc_norm": 0.4854368932038835,
|
25 |
-
"acc_norm_stderr": 0.04948637324026637
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4521072796934866,
|
29 |
-
"acc_stderr": 0.017797751493865623,
|
30 |
-
"acc_norm": 0.4521072796934866,
|
31 |
-
"acc_norm_stderr": 0.017797751493865623
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.362962962962963,
|
35 |
-
"acc_stderr": 0.04153948404742398,
|
36 |
-
"acc_norm": 0.362962962962963,
|
37 |
-
"acc_norm_stderr": 0.04153948404742398
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.045126085985421276,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.045126085985421276
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3404255319148936,
|
47 |
-
"acc_stderr": 0.03097669299853443,
|
48 |
-
"acc_norm": 0.3404255319148936,
|
49 |
-
"acc_norm_stderr": 0.03097669299853443
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3674698795180723,
|
53 |
-
"acc_stderr": 0.03753267402120574,
|
54 |
-
"acc_norm": 0.3674698795180723,
|
55 |
-
"acc_norm_stderr": 0.03753267402120574
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.43729903536977494,
|
59 |
-
"acc_stderr": 0.028173917761762878,
|
60 |
-
"acc_norm": 0.43729903536977494,
|
61 |
-
"acc_norm_stderr": 0.028173917761762878
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.47533632286995514,
|
65 |
-
"acc_stderr": 0.033516951676526276,
|
66 |
-
"acc_norm": 0.47533632286995514,
|
67 |
-
"acc_norm_stderr": 0.033516951676526276
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.37404580152671757,
|
71 |
-
"acc_stderr": 0.042438692422305246,
|
72 |
-
"acc_norm": 0.37404580152671757,
|
73 |
-
"acc_norm_stderr": 0.042438692422305246
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.41,
|
77 |
-
"acc_stderr": 0.049431107042371025,
|
78 |
-
"acc_norm": 0.41,
|
79 |
-
"acc_norm_stderr": 0.049431107042371025
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5353535353535354,
|
83 |
-
"acc_stderr": 0.035534363688280626,
|
84 |
-
"acc_norm": 0.5353535353535354,
|
85 |
-
"acc_norm_stderr": 0.035534363688280626
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4,
|
89 |
-
"acc_stderr": 0.040824829046386284,
|
90 |
-
"acc_norm": 0.4,
|
91 |
-
"acc_norm_stderr": 0.040824829046386284
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.13725490196078433,
|
95 |
-
"acc_stderr": 0.03424084669891521,
|
96 |
-
"acc_norm": 0.13725490196078433,
|
97 |
-
"acc_norm_stderr": 0.03424084669891521
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.42857142857142855,
|
101 |
-
"acc_stderr": 0.03214536859788639,
|
102 |
-
"acc_norm": 0.42857142857142855,
|
103 |
-
"acc_norm_stderr": 0.03214536859788639
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4256410256410256,
|
107 |
-
"acc_stderr": 0.025069094387296546,
|
108 |
-
"acc_norm": 0.4256410256410256,
|
109 |
-
"acc_norm_stderr": 0.025069094387296546
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.52,
|
113 |
-
"acc_stderr": 0.05021167315686779,
|
114 |
-
"acc_norm": 0.52,
|
115 |
-
"acc_norm_stderr": 0.05021167315686779
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.32,
|
119 |
-
"acc_stderr": 0.04688261722621505,
|
120 |
-
"acc_norm": 0.32,
|
121 |
-
"acc_norm_stderr": 0.04688261722621505
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5277777777777778,
|
125 |
-
"acc_stderr": 0.04826217294139894,
|
126 |
-
"acc_norm": 0.5277777777777778,
|
127 |
-
"acc_norm_stderr": 0.04826217294139894
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3842364532019704,
|
131 |
-
"acc_stderr": 0.03422398565657551,
|
132 |
-
"acc_norm": 0.3842364532019704,
|
133 |
-
"acc_norm_stderr": 0.03422398565657551
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4096774193548387,
|
137 |
-
"acc_stderr": 0.027976054915347357,
|
138 |
-
"acc_norm": 0.4096774193548387,
|
139 |
-
"acc_norm_stderr": 0.027976054915347357
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7008547008547008,
|
143 |
-
"acc_stderr": 0.02999695185834948,
|
144 |
-
"acc_norm": 0.7008547008547008,
|
145 |
-
"acc_norm_stderr": 0.02999695185834948
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4679245283018868,
|
149 |
-
"acc_stderr": 0.030709486992556538,
|
150 |
-
"acc_norm": 0.4679245283018868,
|
151 |
-
"acc_norm_stderr": 0.030709486992556538
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4636363636363636,
|
155 |
-
"acc_stderr": 0.047764491623961985,
|
156 |
-
"acc_norm": 0.4636363636363636,
|
157 |
-
"acc_norm_stderr": 0.047764491623961985
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.35185185185185186,
|
161 |
-
"acc_stderr": 0.029116617606083015,
|
162 |
-
"acc_norm": 0.35185185185185186,
|
163 |
-
"acc_norm_stderr": 0.029116617606083015
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.23178807947019867,
|
167 |
-
"acc_stderr": 0.03445406271987053,
|
168 |
-
"acc_norm": 0.23178807947019867,
|
169 |
-
"acc_norm_stderr": 0.03445406271987053
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4925373134328358,
|
173 |
-
"acc_stderr": 0.035351400842767194,
|
174 |
-
"acc_norm": 0.4925373134328358,
|
175 |
-
"acc_norm_stderr": 0.035351400842767194
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3699421965317919,
|
179 |
-
"acc_stderr": 0.03681229633394319,
|
180 |
-
"acc_norm": 0.3699421965317919,
|
181 |
-
"acc_norm_stderr": 0.03681229633394319
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.34656084656084657,
|
185 |
-
"acc_stderr": 0.02450877752102841,
|
186 |
-
"acc_norm": 0.34656084656084657,
|
187 |
-
"acc_norm_stderr": 0.02450877752102841
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3541666666666667,
|
191 |
-
"acc_stderr": 0.039994111357535424,
|
192 |
-
"acc_norm": 0.3541666666666667,
|
193 |
-
"acc_norm_stderr": 0.039994111357535424
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.27,
|
197 |
-
"acc_stderr": 0.044619604333847394,
|
198 |
-
"acc_norm": 0.27,
|
199 |
-
"acc_norm_stderr": 0.044619604333847394
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.58,
|
203 |
-
"acc_stderr": 0.04960449637488584,
|
204 |
-
"acc_norm": 0.58,
|
205 |
-
"acc_norm_stderr": 0.04960449637488584
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4797687861271676,
|
209 |
-
"acc_stderr": 0.026897049996382875,
|
210 |
-
"acc_norm": 0.4797687861271676,
|
211 |
-
"acc_norm_stderr": 0.026897049996382875
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.44171779141104295,
|
215 |
-
"acc_stderr": 0.03901591825836184,
|
216 |
-
"acc_norm": 0.44171779141104295,
|
217 |
-
"acc_norm_stderr": 0.03901591825836184
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.41358024691358025,
|
221 |
-
"acc_stderr": 0.027402042040269952,
|
222 |
-
"acc_norm": 0.41358024691358025,
|
223 |
-
"acc_norm_stderr": 0.027402042040269952
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.047258156262526045,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.047258156262526045
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.48704663212435234,
|
233 |
-
"acc_stderr": 0.03607228061047749,
|
234 |
-
"acc_norm": 0.48704663212435234,
|
235 |
-
"acc_norm_stderr": 0.03607228061047749
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489362,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489362
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.46055045871559636,
|
245 |
-
"acc_stderr": 0.02137049460999509,
|
246 |
-
"acc_norm": 0.46055045871559636,
|
247 |
-
"acc_norm_stderr": 0.02137049460999509
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3492063492063492,
|
251 |
-
"acc_stderr": 0.04263906892795133,
|
252 |
-
"acc_norm": 0.3492063492063492,
|
253 |
-
"acc_norm_stderr": 0.04263906892795133
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.45098039215686275,
|
257 |
-
"acc_stderr": 0.02849199358617157,
|
258 |
-
"acc_norm": 0.45098039215686275,
|
259 |
-
"acc_norm_stderr": 0.02849199358617157
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.45,
|
263 |
-
"acc_stderr": 0.049999999999999996,
|
264 |
-
"acc_norm": 0.45,
|
265 |
-
"acc_norm_stderr": 0.049999999999999996
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6446280991735537,
|
269 |
-
"acc_stderr": 0.04369236326573981,
|
270 |
-
"acc_norm": 0.6446280991735537,
|
271 |
-
"acc_norm_stderr": 0.04369236326573981
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3815789473684211,
|
275 |
-
"acc_stderr": 0.039531733777491945,
|
276 |
-
"acc_norm": 0.3815789473684211,
|
277 |
-
"acc_norm_stderr": 0.039531733777491945
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3741830065359477,
|
281 |
-
"acc_stderr": 0.019576953122088844,
|
282 |
-
"acc_norm": 0.3741830065359477,
|
283 |
-
"acc_norm_stderr": 0.019576953122088844
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.32269503546099293,
|
287 |
-
"acc_stderr": 0.02788913930053478,
|
288 |
-
"acc_norm": 0.32269503546099293,
|
289 |
-
"acc_norm_stderr": 0.02788913930053478
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.3392857142857143,
|
293 |
-
"acc_stderr": 0.04493949068613539,
|
294 |
-
"acc_norm": 0.3392857142857143,
|
295 |
-
"acc_norm_stderr": 0.04493949068613539
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3055555555555556,
|
299 |
-
"acc_stderr": 0.03141554629402544,
|
300 |
-
"acc_norm": 0.3055555555555556,
|
301 |
-
"acc_norm_stderr": 0.03141554629402544
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25251396648044694,
|
305 |
-
"acc_stderr": 0.014530330201468648,
|
306 |
-
"acc_norm": 0.25251396648044694,
|
307 |
-
"acc_norm_stderr": 0.014530330201468648
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.39,
|
311 |
-
"acc_stderr": 0.049020713000019756,
|
312 |
-
"acc_norm": 0.39,
|
313 |
-
"acc_norm_stderr": 0.049020713000019756
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.54,
|
317 |
-
"acc_stderr": 0.05009082659620332,
|
318 |
-
"acc_norm": 0.54,
|
319 |
-
"acc_norm_stderr": 0.05009082659620332
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3713235294117647,
|
323 |
-
"acc_stderr": 0.02934980313976587,
|
324 |
-
"acc_norm": 0.3713235294117647,
|
325 |
-
"acc_norm_stderr": 0.02934980313976587
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4204081632653061,
|
329 |
-
"acc_stderr": 0.03160106993449604,
|
330 |
-
"acc_norm": 0.4204081632653061,
|
331 |
-
"acc_norm_stderr": 0.03160106993449604
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5232067510548524,
|
335 |
-
"acc_stderr": 0.032512152011410174,
|
336 |
-
"acc_norm": 0.5232067510548524,
|
337 |
-
"acc_norm_stderr": 0.032512152011410174
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.32529335071707954,
|
341 |
-
"acc_stderr": 0.01196531153657153,
|
342 |
-
"acc_norm": 0.32529335071707954,
|
343 |
-
"acc_norm_stderr": 0.01196531153657153
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.44607843137254904,
|
347 |
-
"acc_stderr": 0.03488845451304974,
|
348 |
-
"acc_norm": 0.44607843137254904,
|
349 |
-
"acc_norm_stderr": 0.03488845451304974
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.44242424242424244,
|
353 |
-
"acc_stderr": 0.03878372113711274,
|
354 |
-
"acc_norm": 0.44242424242424244,
|
355 |
-
"acc_norm_stderr": 0.03878372113711274
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2668298653610771,
|
359 |
-
"mc1_stderr": 0.015483691939237258,
|
360 |
-
"mc2": 0.45912122204156075,
|
361 |
-
"mc2_stderr": 0.016449709945328097
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.448642266824085,
|
365 |
-
"acc_stderr": 0.017099430514725785,
|
366 |
-
"acc_norm": 0.4970484061393152,
|
367 |
-
"acc_norm_stderr": 0.017190054580194694
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "StatPan/singung-dpo-v0.1-2200",
|
436 |
-
"model_sha": "cb02b9bf247ef8597485d49647c8d91675609fa2",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SuperPowerMz/SON_Mistral-7B-QLoRA-Peft/result_2024-04-17 02:06:08.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.28242320819112626,
|
5 |
-
"acc_stderr": 0.01315545688409722,
|
6 |
-
"acc_norm": 0.33276450511945393,
|
7 |
-
"acc_norm_stderr": 0.013769863046192309
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3316072495518821,
|
11 |
-
"acc_stderr": 0.004698285350019223,
|
12 |
-
"acc_norm": 0.41724756024696275,
|
13 |
-
"acc_norm_stderr": 0.0049209671922553
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.45614035087719296,
|
17 |
-
"acc_stderr": 0.03820042586602967,
|
18 |
-
"acc_norm": 0.45614035087719296,
|
19 |
-
"acc_norm_stderr": 0.03820042586602967
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5048543689320388,
|
23 |
-
"acc_stderr": 0.04950504382128921,
|
24 |
-
"acc_norm": 0.5048543689320388,
|
25 |
-
"acc_norm_stderr": 0.04950504382128921
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.388250319284802,
|
29 |
-
"acc_stderr": 0.017427673295544354,
|
30 |
-
"acc_norm": 0.388250319284802,
|
31 |
-
"acc_norm_stderr": 0.017427673295544354
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3111111111111111,
|
35 |
-
"acc_stderr": 0.03999262876617723,
|
36 |
-
"acc_norm": 0.3111111111111111,
|
37 |
-
"acc_norm_stderr": 0.03999262876617723
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.25,
|
41 |
-
"acc_stderr": 0.04351941398892446,
|
42 |
-
"acc_norm": 0.25,
|
43 |
-
"acc_norm_stderr": 0.04351941398892446
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3446808510638298,
|
47 |
-
"acc_stderr": 0.03106898596312215,
|
48 |
-
"acc_norm": 0.3446808510638298,
|
49 |
-
"acc_norm_stderr": 0.03106898596312215
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3674698795180723,
|
53 |
-
"acc_stderr": 0.03753267402120574,
|
54 |
-
"acc_norm": 0.3674698795180723,
|
55 |
-
"acc_norm_stderr": 0.03753267402120574
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.43086816720257237,
|
59 |
-
"acc_stderr": 0.02812534098397271,
|
60 |
-
"acc_norm": 0.43086816720257237,
|
61 |
-
"acc_norm_stderr": 0.02812534098397271
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.33183856502242154,
|
65 |
-
"acc_stderr": 0.03160295143776679,
|
66 |
-
"acc_norm": 0.33183856502242154,
|
67 |
-
"acc_norm_stderr": 0.03160295143776679
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.45038167938931295,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.45038167938931295,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.36,
|
77 |
-
"acc_stderr": 0.04824181513244218,
|
78 |
-
"acc_norm": 0.36,
|
79 |
-
"acc_norm_stderr": 0.04824181513244218
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.45454545454545453,
|
83 |
-
"acc_stderr": 0.03547601494006936,
|
84 |
-
"acc_norm": 0.45454545454545453,
|
85 |
-
"acc_norm_stderr": 0.03547601494006936
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3586206896551724,
|
89 |
-
"acc_stderr": 0.039966295748767186,
|
90 |
-
"acc_norm": 0.3586206896551724,
|
91 |
-
"acc_norm_stderr": 0.039966295748767186
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.28431372549019607,
|
95 |
-
"acc_stderr": 0.04488482852329017,
|
96 |
-
"acc_norm": 0.28431372549019607,
|
97 |
-
"acc_norm_stderr": 0.04488482852329017
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4369747899159664,
|
101 |
-
"acc_stderr": 0.03221943636566196,
|
102 |
-
"acc_norm": 0.4369747899159664,
|
103 |
-
"acc_norm_stderr": 0.03221943636566196
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4,
|
107 |
-
"acc_stderr": 0.02483881198803316,
|
108 |
-
"acc_norm": 0.4,
|
109 |
-
"acc_norm_stderr": 0.02483881198803316
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.57,
|
113 |
-
"acc_stderr": 0.049756985195624284,
|
114 |
-
"acc_norm": 0.57,
|
115 |
-
"acc_norm_stderr": 0.049756985195624284
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.25,
|
119 |
-
"acc_stderr": 0.04351941398892446,
|
120 |
-
"acc_norm": 0.25,
|
121 |
-
"acc_norm_stderr": 0.04351941398892446
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.42592592592592593,
|
125 |
-
"acc_stderr": 0.0478034362693679,
|
126 |
-
"acc_norm": 0.42592592592592593,
|
127 |
-
"acc_norm_stderr": 0.0478034362693679
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3793103448275862,
|
131 |
-
"acc_stderr": 0.034139638059062345,
|
132 |
-
"acc_norm": 0.3793103448275862,
|
133 |
-
"acc_norm_stderr": 0.034139638059062345
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.36129032258064514,
|
137 |
-
"acc_stderr": 0.02732754844795754,
|
138 |
-
"acc_norm": 0.36129032258064514,
|
139 |
-
"acc_norm_stderr": 0.02732754844795754
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6623931623931624,
|
143 |
-
"acc_stderr": 0.030980296992618558,
|
144 |
-
"acc_norm": 0.6623931623931624,
|
145 |
-
"acc_norm_stderr": 0.030980296992618558
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3471698113207547,
|
149 |
-
"acc_stderr": 0.029300101705549655,
|
150 |
-
"acc_norm": 0.3471698113207547,
|
151 |
-
"acc_norm_stderr": 0.029300101705549655
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.45454545454545453,
|
155 |
-
"acc_stderr": 0.04769300568972743,
|
156 |
-
"acc_norm": 0.45454545454545453,
|
157 |
-
"acc_norm_stderr": 0.04769300568972743
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.28888888888888886,
|
161 |
-
"acc_stderr": 0.027634907264178544,
|
162 |
-
"acc_norm": 0.28888888888888886,
|
163 |
-
"acc_norm_stderr": 0.027634907264178544
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2582781456953642,
|
167 |
-
"acc_stderr": 0.035737053147634576,
|
168 |
-
"acc_norm": 0.2582781456953642,
|
169 |
-
"acc_norm_stderr": 0.035737053147634576
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5074626865671642,
|
173 |
-
"acc_stderr": 0.03535140084276719,
|
174 |
-
"acc_norm": 0.5074626865671642,
|
175 |
-
"acc_norm_stderr": 0.03535140084276719
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3352601156069364,
|
179 |
-
"acc_stderr": 0.03599586301247078,
|
180 |
-
"acc_norm": 0.3352601156069364,
|
181 |
-
"acc_norm_stderr": 0.03599586301247078
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3253968253968254,
|
185 |
-
"acc_stderr": 0.024130158299762613,
|
186 |
-
"acc_norm": 0.3253968253968254,
|
187 |
-
"acc_norm_stderr": 0.024130158299762613
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2986111111111111,
|
191 |
-
"acc_stderr": 0.03827052357950756,
|
192 |
-
"acc_norm": 0.2986111111111111,
|
193 |
-
"acc_norm_stderr": 0.03827052357950756
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.43,
|
197 |
-
"acc_stderr": 0.049756985195624284,
|
198 |
-
"acc_norm": 0.43,
|
199 |
-
"acc_norm_stderr": 0.049756985195624284
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.56,
|
203 |
-
"acc_stderr": 0.049888765156985884,
|
204 |
-
"acc_norm": 0.56,
|
205 |
-
"acc_norm_stderr": 0.049888765156985884
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4190751445086705,
|
209 |
-
"acc_stderr": 0.026564178111422622,
|
210 |
-
"acc_norm": 0.4190751445086705,
|
211 |
-
"acc_norm_stderr": 0.026564178111422622
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3803680981595092,
|
215 |
-
"acc_stderr": 0.03814269893261837,
|
216 |
-
"acc_norm": 0.3803680981595092,
|
217 |
-
"acc_norm_stderr": 0.03814269893261837
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.39814814814814814,
|
221 |
-
"acc_stderr": 0.02723741509459247,
|
222 |
-
"acc_norm": 0.39814814814814814,
|
223 |
-
"acc_norm_stderr": 0.02723741509459247
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252606,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252606
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.46632124352331605,
|
233 |
-
"acc_stderr": 0.036002440698671784,
|
234 |
-
"acc_norm": 0.46632124352331605,
|
235 |
-
"acc_norm_stderr": 0.036002440698671784
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022057,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022057
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.44220183486238535,
|
245 |
-
"acc_stderr": 0.02129361320752021,
|
246 |
-
"acc_norm": 0.44220183486238535,
|
247 |
-
"acc_norm_stderr": 0.02129361320752021
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.35714285714285715,
|
251 |
-
"acc_stderr": 0.042857142857142816,
|
252 |
-
"acc_norm": 0.35714285714285715,
|
253 |
-
"acc_norm_stderr": 0.042857142857142816
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.46078431372549017,
|
257 |
-
"acc_stderr": 0.028541722692618874,
|
258 |
-
"acc_norm": 0.46078431372549017,
|
259 |
-
"acc_norm_stderr": 0.028541722692618874
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.44,
|
263 |
-
"acc_stderr": 0.04988876515698589,
|
264 |
-
"acc_norm": 0.44,
|
265 |
-
"acc_norm_stderr": 0.04988876515698589
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5619834710743802,
|
269 |
-
"acc_stderr": 0.045291468044357915,
|
270 |
-
"acc_norm": 0.5619834710743802,
|
271 |
-
"acc_norm_stderr": 0.045291468044357915
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3355263157894737,
|
275 |
-
"acc_stderr": 0.038424985593952694,
|
276 |
-
"acc_norm": 0.3355263157894737,
|
277 |
-
"acc_norm_stderr": 0.038424985593952694
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.35947712418300654,
|
281 |
-
"acc_stderr": 0.019412539242032165,
|
282 |
-
"acc_norm": 0.35947712418300654,
|
283 |
-
"acc_norm_stderr": 0.019412539242032165
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.32978723404255317,
|
287 |
-
"acc_stderr": 0.028045946942042398,
|
288 |
-
"acc_norm": 0.32978723404255317,
|
289 |
-
"acc_norm_stderr": 0.028045946942042398
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2857142857142857,
|
293 |
-
"acc_stderr": 0.04287858751340456,
|
294 |
-
"acc_norm": 0.2857142857142857,
|
295 |
-
"acc_norm_stderr": 0.04287858751340456
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4212962962962963,
|
299 |
-
"acc_stderr": 0.03367462138896078,
|
300 |
-
"acc_norm": 0.4212962962962963,
|
301 |
-
"acc_norm_stderr": 0.03367462138896078
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2860335195530726,
|
305 |
-
"acc_stderr": 0.015113972129062132,
|
306 |
-
"acc_norm": 0.2860335195530726,
|
307 |
-
"acc_norm_stderr": 0.015113972129062132
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.41,
|
311 |
-
"acc_stderr": 0.04943110704237102,
|
312 |
-
"acc_norm": 0.41,
|
313 |
-
"acc_norm_stderr": 0.04943110704237102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.49,
|
317 |
-
"acc_stderr": 0.05024183937956912,
|
318 |
-
"acc_norm": 0.49,
|
319 |
-
"acc_norm_stderr": 0.05024183937956912
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.41544117647058826,
|
323 |
-
"acc_stderr": 0.029935342707877746,
|
324 |
-
"acc_norm": 0.41544117647058826,
|
325 |
-
"acc_norm_stderr": 0.029935342707877746
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4,
|
329 |
-
"acc_stderr": 0.031362502409358915,
|
330 |
-
"acc_norm": 0.4,
|
331 |
-
"acc_norm_stderr": 0.031362502409358915
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4092827004219409,
|
335 |
-
"acc_stderr": 0.032007041833595914,
|
336 |
-
"acc_norm": 0.4092827004219409,
|
337 |
-
"acc_norm_stderr": 0.032007041833595914
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.29595827900912647,
|
341 |
-
"acc_stderr": 0.011658518525277047,
|
342 |
-
"acc_norm": 0.29595827900912647,
|
343 |
-
"acc_norm_stderr": 0.011658518525277047
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2549019607843137,
|
347 |
-
"acc_stderr": 0.030587591351604246,
|
348 |
-
"acc_norm": 0.2549019607843137,
|
349 |
-
"acc_norm_stderr": 0.030587591351604246
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.21818181818181817,
|
353 |
-
"acc_stderr": 0.03225078108306289,
|
354 |
-
"acc_norm": 0.21818181818181817,
|
355 |
-
"acc_norm_stderr": 0.03225078108306289
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2521419828641371,
|
359 |
-
"mc1_stderr": 0.015201522246299953,
|
360 |
-
"mc2": 0.4160580138975093,
|
361 |
-
"mc2_stderr": 0.015283612333533092
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3695395513577332,
|
365 |
-
"acc_stderr": 0.01659488340568542,
|
366 |
-
"acc_norm": 0.4297520661157025,
|
367 |
-
"acc_norm_stderr": 0.01701984753597221
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "SuperPowerMz/SON_Mistral-7B-QLoRA-Peft",
|
436 |
-
"model_sha": "f5332637596dc9b925087cfb5cbfda05a6b58b85",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Surromind/Solar_v0.1/result_2024-03-28 23:58:36.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.44368600682593856,
|
5 |
-
"acc_stderr": 0.014518421825670444,
|
6 |
-
"acc_norm": 0.507679180887372,
|
7 |
-
"acc_norm_stderr": 0.014609667440892577
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.434973112925712,
|
11 |
-
"acc_stderr": 0.004947402907996247,
|
12 |
-
"acc_norm": 0.5925114519020116,
|
13 |
-
"acc_norm_stderr": 0.004903628887264533
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6666666666666666,
|
17 |
-
"acc_stderr": 0.036155076303109344,
|
18 |
-
"acc_norm": 0.6666666666666666,
|
19 |
-
"acc_norm_stderr": 0.036155076303109344
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.7669902912621359,
|
23 |
-
"acc_stderr": 0.041858325989283164,
|
24 |
-
"acc_norm": 0.7669902912621359,
|
25 |
-
"acc_norm_stderr": 0.041858325989283164
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.7292464878671775,
|
29 |
-
"acc_stderr": 0.01588988836256049,
|
30 |
-
"acc_norm": 0.7292464878671775,
|
31 |
-
"acc_norm_stderr": 0.01588988836256049
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4740740740740741,
|
35 |
-
"acc_stderr": 0.04313531696750574,
|
36 |
-
"acc_norm": 0.4740740740740741,
|
37 |
-
"acc_norm_stderr": 0.04313531696750574
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.04725815626252604,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.04725815626252604
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46382978723404256,
|
47 |
-
"acc_stderr": 0.03260038511835772,
|
48 |
-
"acc_norm": 0.46382978723404256,
|
49 |
-
"acc_norm_stderr": 0.03260038511835772
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.39156626506024095,
|
53 |
-
"acc_stderr": 0.03799857454479636,
|
54 |
-
"acc_norm": 0.39156626506024095,
|
55 |
-
"acc_norm_stderr": 0.03799857454479636
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5852090032154341,
|
59 |
-
"acc_stderr": 0.027982680459759563,
|
60 |
-
"acc_norm": 0.5852090032154341,
|
61 |
-
"acc_norm_stderr": 0.027982680459759563
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6098654708520179,
|
65 |
-
"acc_stderr": 0.03273766725459156,
|
66 |
-
"acc_norm": 0.6098654708520179,
|
67 |
-
"acc_norm_stderr": 0.03273766725459156
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5190839694656488,
|
71 |
-
"acc_stderr": 0.04382094705550988,
|
72 |
-
"acc_norm": 0.5190839694656488,
|
73 |
-
"acc_norm_stderr": 0.04382094705550988
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.64,
|
77 |
-
"acc_stderr": 0.048241815132442176,
|
78 |
-
"acc_norm": 0.64,
|
79 |
-
"acc_norm_stderr": 0.048241815132442176
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7626262626262627,
|
83 |
-
"acc_stderr": 0.030313710538198892,
|
84 |
-
"acc_norm": 0.7626262626262627,
|
85 |
-
"acc_norm_stderr": 0.030313710538198892
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4206896551724138,
|
89 |
-
"acc_stderr": 0.0411391498118926,
|
90 |
-
"acc_norm": 0.4206896551724138,
|
91 |
-
"acc_norm_stderr": 0.0411391498118926
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.30392156862745096,
|
95 |
-
"acc_stderr": 0.04576665403207762,
|
96 |
-
"acc_norm": 0.30392156862745096,
|
97 |
-
"acc_norm_stderr": 0.04576665403207762
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.592436974789916,
|
101 |
-
"acc_stderr": 0.03191863374478467,
|
102 |
-
"acc_norm": 0.592436974789916,
|
103 |
-
"acc_norm_stderr": 0.03191863374478467
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.6230769230769231,
|
107 |
-
"acc_stderr": 0.024570975364225995,
|
108 |
-
"acc_norm": 0.6230769230769231,
|
109 |
-
"acc_norm_stderr": 0.024570975364225995
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.71,
|
113 |
-
"acc_stderr": 0.04560480215720685,
|
114 |
-
"acc_norm": 0.71,
|
115 |
-
"acc_norm_stderr": 0.04560480215720685
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6666666666666666,
|
125 |
-
"acc_stderr": 0.04557239513497751,
|
126 |
-
"acc_norm": 0.6666666666666666,
|
127 |
-
"acc_norm_stderr": 0.04557239513497751
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4187192118226601,
|
131 |
-
"acc_stderr": 0.034711928605184676,
|
132 |
-
"acc_norm": 0.4187192118226601,
|
133 |
-
"acc_norm_stderr": 0.034711928605184676
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6419354838709678,
|
137 |
-
"acc_stderr": 0.02727389059430063,
|
138 |
-
"acc_norm": 0.6419354838709678,
|
139 |
-
"acc_norm_stderr": 0.02727389059430063
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8205128205128205,
|
143 |
-
"acc_stderr": 0.025140935950335445,
|
144 |
-
"acc_norm": 0.8205128205128205,
|
145 |
-
"acc_norm_stderr": 0.025140935950335445
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5245283018867924,
|
149 |
-
"acc_stderr": 0.030735822206205615,
|
150 |
-
"acc_norm": 0.5245283018867924,
|
151 |
-
"acc_norm_stderr": 0.030735822206205615
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.6090909090909091,
|
155 |
-
"acc_stderr": 0.04673752333670238,
|
156 |
-
"acc_norm": 0.6090909090909091,
|
157 |
-
"acc_norm_stderr": 0.04673752333670238
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3333333333333333,
|
161 |
-
"acc_stderr": 0.0287420409039485,
|
162 |
-
"acc_norm": 0.3333333333333333,
|
163 |
-
"acc_norm_stderr": 0.0287420409039485
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.03757949922943342,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.03757949922943342
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6766169154228856,
|
173 |
-
"acc_stderr": 0.03307615947979035,
|
174 |
-
"acc_norm": 0.6766169154228856,
|
175 |
-
"acc_norm_stderr": 0.03307615947979035
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.47398843930635837,
|
179 |
-
"acc_stderr": 0.038073017265045125,
|
180 |
-
"acc_norm": 0.47398843930635837,
|
181 |
-
"acc_norm_stderr": 0.038073017265045125
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3915343915343915,
|
185 |
-
"acc_stderr": 0.025138091388851105,
|
186 |
-
"acc_norm": 0.3915343915343915,
|
187 |
-
"acc_norm_stderr": 0.025138091388851105
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5833333333333334,
|
191 |
-
"acc_stderr": 0.04122728707651282,
|
192 |
-
"acc_norm": 0.5833333333333334,
|
193 |
-
"acc_norm_stderr": 0.04122728707651282
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.32,
|
197 |
-
"acc_stderr": 0.046882617226215034,
|
198 |
-
"acc_norm": 0.32,
|
199 |
-
"acc_norm_stderr": 0.046882617226215034
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.74,
|
203 |
-
"acc_stderr": 0.0440844002276808,
|
204 |
-
"acc_norm": 0.74,
|
205 |
-
"acc_norm_stderr": 0.0440844002276808
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5606936416184971,
|
209 |
-
"acc_stderr": 0.026720034380514995,
|
210 |
-
"acc_norm": 0.5606936416184971,
|
211 |
-
"acc_norm_stderr": 0.026720034380514995
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5950920245398773,
|
215 |
-
"acc_stderr": 0.038566721635489125,
|
216 |
-
"acc_norm": 0.5950920245398773,
|
217 |
-
"acc_norm_stderr": 0.038566721635489125
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5802469135802469,
|
221 |
-
"acc_stderr": 0.027460099557005135,
|
222 |
-
"acc_norm": 0.5802469135802469,
|
223 |
-
"acc_norm_stderr": 0.027460099557005135
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.047937248544110196,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.047937248544110196
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7253886010362695,
|
233 |
-
"acc_stderr": 0.032210245080411544,
|
234 |
-
"acc_norm": 0.7253886010362695,
|
235 |
-
"acc_norm_stderr": 0.032210245080411544
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.43859649122807015,
|
239 |
-
"acc_stderr": 0.04668000738510455,
|
240 |
-
"acc_norm": 0.43859649122807015,
|
241 |
-
"acc_norm_stderr": 0.04668000738510455
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7137614678899082,
|
245 |
-
"acc_stderr": 0.01937943662891996,
|
246 |
-
"acc_norm": 0.7137614678899082,
|
247 |
-
"acc_norm_stderr": 0.01937943662891996
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3968253968253968,
|
251 |
-
"acc_stderr": 0.04375888492727061,
|
252 |
-
"acc_norm": 0.3968253968253968,
|
253 |
-
"acc_norm_stderr": 0.04375888492727061
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5784313725490197,
|
257 |
-
"acc_stderr": 0.02827549015679146,
|
258 |
-
"acc_norm": 0.5784313725490197,
|
259 |
-
"acc_norm_stderr": 0.02827549015679146
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.62,
|
263 |
-
"acc_stderr": 0.048783173121456316,
|
264 |
-
"acc_norm": 0.62,
|
265 |
-
"acc_norm_stderr": 0.048783173121456316
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7355371900826446,
|
269 |
-
"acc_stderr": 0.04026187527591207,
|
270 |
-
"acc_norm": 0.7355371900826446,
|
271 |
-
"acc_norm_stderr": 0.04026187527591207
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.6381578947368421,
|
275 |
-
"acc_stderr": 0.03910525752849724,
|
276 |
-
"acc_norm": 0.6381578947368421,
|
277 |
-
"acc_norm_stderr": 0.03910525752849724
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5408496732026143,
|
281 |
-
"acc_stderr": 0.020160213617222516,
|
282 |
-
"acc_norm": 0.5408496732026143,
|
283 |
-
"acc_norm_stderr": 0.020160213617222516
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3546099290780142,
|
287 |
-
"acc_stderr": 0.028538650028878634,
|
288 |
-
"acc_norm": 0.3546099290780142,
|
289 |
-
"acc_norm_stderr": 0.028538650028878634
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.375,
|
293 |
-
"acc_stderr": 0.04595091388086298,
|
294 |
-
"acc_norm": 0.375,
|
295 |
-
"acc_norm_stderr": 0.04595091388086298
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4212962962962963,
|
299 |
-
"acc_stderr": 0.033674621388960775,
|
300 |
-
"acc_norm": 0.4212962962962963,
|
301 |
-
"acc_norm_stderr": 0.033674621388960775
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.22681564245810057,
|
305 |
-
"acc_stderr": 0.014005843570897882,
|
306 |
-
"acc_norm": 0.22681564245810057,
|
307 |
-
"acc_norm_stderr": 0.014005843570897882
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.45,
|
311 |
-
"acc_stderr": 0.04999999999999999,
|
312 |
-
"acc_norm": 0.45,
|
313 |
-
"acc_norm_stderr": 0.04999999999999999
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.64,
|
317 |
-
"acc_stderr": 0.04824181513244218,
|
318 |
-
"acc_norm": 0.64,
|
319 |
-
"acc_norm_stderr": 0.04824181513244218
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4632352941176471,
|
323 |
-
"acc_stderr": 0.03029061918048569,
|
324 |
-
"acc_norm": 0.4632352941176471,
|
325 |
-
"acc_norm_stderr": 0.03029061918048569
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4816326530612245,
|
329 |
-
"acc_stderr": 0.031987615467631264,
|
330 |
-
"acc_norm": 0.4816326530612245,
|
331 |
-
"acc_norm_stderr": 0.031987615467631264
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7510548523206751,
|
335 |
-
"acc_stderr": 0.028146970599422644,
|
336 |
-
"acc_norm": 0.7510548523206751,
|
337 |
-
"acc_norm_stderr": 0.028146970599422644
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.38070404172099087,
|
341 |
-
"acc_stderr": 0.012401430654645882,
|
342 |
-
"acc_norm": 0.38070404172099087,
|
343 |
-
"acc_norm_stderr": 0.012401430654645882
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6470588235294118,
|
347 |
-
"acc_stderr": 0.03354092437591518,
|
348 |
-
"acc_norm": 0.6470588235294118,
|
349 |
-
"acc_norm_stderr": 0.03354092437591518
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6484848484848484,
|
353 |
-
"acc_stderr": 0.037282069986826503,
|
354 |
-
"acc_norm": 0.6484848484848484,
|
355 |
-
"acc_norm_stderr": 0.037282069986826503
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27050183598531213,
|
359 |
-
"mc1_stderr": 0.015550778332842892,
|
360 |
-
"mc2": 0.4263566669862956,
|
361 |
-
"mc2_stderr": 0.014879291441628228
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5289256198347108,
|
365 |
-
"acc_stderr": 0.017161563949916345,
|
366 |
-
"acc_norm": 0.5489964580873672,
|
367 |
-
"acc_norm_stderr": 0.017107618859549346
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Surromind/Solar_v0.1",
|
436 |
-
"model_sha": "cf07c936b24f06ee31148e5dfc3b2f755b42c64a",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Surromind/gemma-2b-v0.1/result_2024-03-29 02:10:12.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.25,
|
5 |
-
"acc_stderr": 0.012653835621466646,
|
6 |
-
"acc_norm": 0.30119453924914674,
|
7 |
-
"acc_norm_stderr": 0.013406741767847629
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.30860386377215693,
|
11 |
-
"acc_stderr": 0.004609731925736885,
|
12 |
-
"acc_norm": 0.3572993427604063,
|
13 |
-
"acc_norm_stderr": 0.0047822469311949965
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.2982456140350877,
|
17 |
-
"acc_stderr": 0.03508771929824564,
|
18 |
-
"acc_norm": 0.2982456140350877,
|
19 |
-
"acc_norm_stderr": 0.03508771929824564
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.34951456310679613,
|
23 |
-
"acc_stderr": 0.04721188506097172,
|
24 |
-
"acc_norm": 0.34951456310679613,
|
25 |
-
"acc_norm_stderr": 0.04721188506097172
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.34610472541507026,
|
29 |
-
"acc_stderr": 0.017011965266412066,
|
30 |
-
"acc_norm": 0.34610472541507026,
|
31 |
-
"acc_norm_stderr": 0.017011965266412066
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37037037037037035,
|
35 |
-
"acc_stderr": 0.04171654161354543,
|
36 |
-
"acc_norm": 0.37037037037037035,
|
37 |
-
"acc_norm_stderr": 0.04171654161354543
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.045126085985421276,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.045126085985421276
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3617021276595745,
|
47 |
-
"acc_stderr": 0.03141082197596239,
|
48 |
-
"acc_norm": 0.3617021276595745,
|
49 |
-
"acc_norm_stderr": 0.03141082197596239
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.21686746987951808,
|
53 |
-
"acc_stderr": 0.03208284450356365,
|
54 |
-
"acc_norm": 0.21686746987951808,
|
55 |
-
"acc_norm_stderr": 0.03208284450356365
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.33440514469453375,
|
59 |
-
"acc_stderr": 0.026795422327893944,
|
60 |
-
"acc_norm": 0.33440514469453375,
|
61 |
-
"acc_norm_stderr": 0.026795422327893944
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3452914798206278,
|
65 |
-
"acc_stderr": 0.031911001928357954,
|
66 |
-
"acc_norm": 0.3452914798206278,
|
67 |
-
"acc_norm_stderr": 0.031911001928357954
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.33587786259541985,
|
71 |
-
"acc_stderr": 0.04142313771996664,
|
72 |
-
"acc_norm": 0.33587786259541985,
|
73 |
-
"acc_norm_stderr": 0.04142313771996664
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.35,
|
77 |
-
"acc_stderr": 0.04793724854411019,
|
78 |
-
"acc_norm": 0.35,
|
79 |
-
"acc_norm_stderr": 0.04793724854411019
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3181818181818182,
|
83 |
-
"acc_stderr": 0.03318477333845331,
|
84 |
-
"acc_norm": 0.3181818181818182,
|
85 |
-
"acc_norm_stderr": 0.03318477333845331
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3103448275862069,
|
89 |
-
"acc_stderr": 0.038552896163789485,
|
90 |
-
"acc_norm": 0.3103448275862069,
|
91 |
-
"acc_norm_stderr": 0.038552896163789485
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.11764705882352941,
|
95 |
-
"acc_stderr": 0.03205907733144528,
|
96 |
-
"acc_norm": 0.11764705882352941,
|
97 |
-
"acc_norm_stderr": 0.03205907733144528
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3445378151260504,
|
101 |
-
"acc_stderr": 0.03086868260412163,
|
102 |
-
"acc_norm": 0.3445378151260504,
|
103 |
-
"acc_norm_stderr": 0.03086868260412163
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.28974358974358977,
|
107 |
-
"acc_stderr": 0.023000628243687943,
|
108 |
-
"acc_norm": 0.28974358974358977,
|
109 |
-
"acc_norm_stderr": 0.023000628243687943
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.47,
|
113 |
-
"acc_stderr": 0.050161355804659205,
|
114 |
-
"acc_norm": 0.47,
|
115 |
-
"acc_norm_stderr": 0.050161355804659205
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.25,
|
119 |
-
"acc_stderr": 0.04351941398892446,
|
120 |
-
"acc_norm": 0.25,
|
121 |
-
"acc_norm_stderr": 0.04351941398892446
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.3888888888888889,
|
125 |
-
"acc_stderr": 0.0471282125742677,
|
126 |
-
"acc_norm": 0.3888888888888889,
|
127 |
-
"acc_norm_stderr": 0.0471282125742677
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3054187192118227,
|
131 |
-
"acc_stderr": 0.03240661565868408,
|
132 |
-
"acc_norm": 0.3054187192118227,
|
133 |
-
"acc_norm_stderr": 0.03240661565868408
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.35161290322580646,
|
137 |
-
"acc_stderr": 0.027162537826948458,
|
138 |
-
"acc_norm": 0.35161290322580646,
|
139 |
-
"acc_norm_stderr": 0.027162537826948458
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5341880341880342,
|
143 |
-
"acc_stderr": 0.03267942734081228,
|
144 |
-
"acc_norm": 0.5341880341880342,
|
145 |
-
"acc_norm_stderr": 0.03267942734081228
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3433962264150943,
|
149 |
-
"acc_stderr": 0.029224526469124792,
|
150 |
-
"acc_norm": 0.3433962264150943,
|
151 |
-
"acc_norm_stderr": 0.029224526469124792
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.39090909090909093,
|
155 |
-
"acc_stderr": 0.04673752333670237,
|
156 |
-
"acc_norm": 0.39090909090909093,
|
157 |
-
"acc_norm_stderr": 0.04673752333670237
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24444444444444444,
|
161 |
-
"acc_stderr": 0.026202766534652148,
|
162 |
-
"acc_norm": 0.24444444444444444,
|
163 |
-
"acc_norm_stderr": 0.026202766534652148
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.23841059602649006,
|
167 |
-
"acc_stderr": 0.0347918557259966,
|
168 |
-
"acc_norm": 0.23841059602649006,
|
169 |
-
"acc_norm_stderr": 0.0347918557259966
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.3781094527363184,
|
173 |
-
"acc_stderr": 0.034288678487786564,
|
174 |
-
"acc_norm": 0.3781094527363184,
|
175 |
-
"acc_norm_stderr": 0.034288678487786564
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.28901734104046245,
|
179 |
-
"acc_stderr": 0.03456425745087,
|
180 |
-
"acc_norm": 0.28901734104046245,
|
181 |
-
"acc_norm_stderr": 0.03456425745087
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.29894179894179895,
|
185 |
-
"acc_stderr": 0.023577604791655795,
|
186 |
-
"acc_norm": 0.29894179894179895,
|
187 |
-
"acc_norm_stderr": 0.023577604791655795
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2638888888888889,
|
191 |
-
"acc_stderr": 0.03685651095897532,
|
192 |
-
"acc_norm": 0.2638888888888889,
|
193 |
-
"acc_norm_stderr": 0.03685651095897532
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.23,
|
197 |
-
"acc_stderr": 0.04229525846816506,
|
198 |
-
"acc_norm": 0.23,
|
199 |
-
"acc_norm_stderr": 0.04229525846816506
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.34,
|
203 |
-
"acc_stderr": 0.04760952285695235,
|
204 |
-
"acc_norm": 0.34,
|
205 |
-
"acc_norm_stderr": 0.04760952285695235
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2976878612716763,
|
209 |
-
"acc_stderr": 0.024617055388677003,
|
210 |
-
"acc_norm": 0.2976878612716763,
|
211 |
-
"acc_norm_stderr": 0.024617055388677003
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.34355828220858897,
|
215 |
-
"acc_stderr": 0.03731133519673894,
|
216 |
-
"acc_norm": 0.34355828220858897,
|
217 |
-
"acc_norm_stderr": 0.03731133519673894
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2962962962962963,
|
221 |
-
"acc_stderr": 0.02540719779889016,
|
222 |
-
"acc_norm": 0.2962962962962963,
|
223 |
-
"acc_norm_stderr": 0.02540719779889016
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.25,
|
227 |
-
"acc_stderr": 0.04351941398892446,
|
228 |
-
"acc_norm": 0.25,
|
229 |
-
"acc_norm_stderr": 0.04351941398892446
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.30569948186528495,
|
233 |
-
"acc_stderr": 0.033248379397581594,
|
234 |
-
"acc_norm": 0.30569948186528495,
|
235 |
-
"acc_norm_stderr": 0.033248379397581594
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3508771929824561,
|
239 |
-
"acc_stderr": 0.044895393502706986,
|
240 |
-
"acc_norm": 0.3508771929824561,
|
241 |
-
"acc_norm_stderr": 0.044895393502706986
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3743119266055046,
|
245 |
-
"acc_stderr": 0.020748959408988306,
|
246 |
-
"acc_norm": 0.3743119266055046,
|
247 |
-
"acc_norm_stderr": 0.020748959408988306
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2698412698412698,
|
251 |
-
"acc_stderr": 0.039701582732351734,
|
252 |
-
"acc_norm": 0.2698412698412698,
|
253 |
-
"acc_norm_stderr": 0.039701582732351734
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3660130718954248,
|
257 |
-
"acc_stderr": 0.02758281141515961,
|
258 |
-
"acc_norm": 0.3660130718954248,
|
259 |
-
"acc_norm_stderr": 0.02758281141515961
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.4,
|
263 |
-
"acc_stderr": 0.049236596391733084,
|
264 |
-
"acc_norm": 0.4,
|
265 |
-
"acc_norm_stderr": 0.049236596391733084
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5041322314049587,
|
269 |
-
"acc_stderr": 0.04564198767432754,
|
270 |
-
"acc_norm": 0.5041322314049587,
|
271 |
-
"acc_norm_stderr": 0.04564198767432754
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.26973684210526316,
|
275 |
-
"acc_stderr": 0.036117805602848975,
|
276 |
-
"acc_norm": 0.26973684210526316,
|
277 |
-
"acc_norm_stderr": 0.036117805602848975
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3137254901960784,
|
281 |
-
"acc_stderr": 0.018771683893528176,
|
282 |
-
"acc_norm": 0.3137254901960784,
|
283 |
-
"acc_norm_stderr": 0.018771683893528176
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2978723404255319,
|
287 |
-
"acc_stderr": 0.02728160834446941,
|
288 |
-
"acc_norm": 0.2978723404255319,
|
289 |
-
"acc_norm_stderr": 0.02728160834446941
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.39285714285714285,
|
293 |
-
"acc_stderr": 0.046355501356099754,
|
294 |
-
"acc_norm": 0.39285714285714285,
|
295 |
-
"acc_norm_stderr": 0.046355501356099754
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2361111111111111,
|
299 |
-
"acc_stderr": 0.028963702570791026,
|
300 |
-
"acc_norm": 0.2361111111111111,
|
301 |
-
"acc_norm_stderr": 0.028963702570791026
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23575418994413408,
|
305 |
-
"acc_stderr": 0.014196375686290804,
|
306 |
-
"acc_norm": 0.23575418994413408,
|
307 |
-
"acc_norm_stderr": 0.014196375686290804
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.35,
|
311 |
-
"acc_stderr": 0.0479372485441102,
|
312 |
-
"acc_norm": 0.35,
|
313 |
-
"acc_norm_stderr": 0.0479372485441102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.37,
|
317 |
-
"acc_stderr": 0.04852365870939099,
|
318 |
-
"acc_norm": 0.37,
|
319 |
-
"acc_norm_stderr": 0.04852365870939099
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.27205882352941174,
|
323 |
-
"acc_stderr": 0.027033041151681456,
|
324 |
-
"acc_norm": 0.27205882352941174,
|
325 |
-
"acc_norm_stderr": 0.027033041151681456
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.2816326530612245,
|
329 |
-
"acc_stderr": 0.028795185574291282,
|
330 |
-
"acc_norm": 0.2816326530612245,
|
331 |
-
"acc_norm_stderr": 0.028795185574291282
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.38396624472573837,
|
335 |
-
"acc_stderr": 0.031658678064106674,
|
336 |
-
"acc_norm": 0.38396624472573837,
|
337 |
-
"acc_norm_stderr": 0.031658678064106674
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.29595827900912647,
|
341 |
-
"acc_stderr": 0.011658518525277039,
|
342 |
-
"acc_norm": 0.29595827900912647,
|
343 |
-
"acc_norm_stderr": 0.011658518525277039
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3333333333333333,
|
347 |
-
"acc_stderr": 0.03308611113236434,
|
348 |
-
"acc_norm": 0.3333333333333333,
|
349 |
-
"acc_norm_stderr": 0.03308611113236434
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.41818181818181815,
|
353 |
-
"acc_stderr": 0.038517163193983954,
|
354 |
-
"acc_norm": 0.41818181818181815,
|
355 |
-
"acc_norm_stderr": 0.038517163193983954
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.26560587515299877,
|
359 |
-
"mc1_stderr": 0.015461027627253602,
|
360 |
-
"mc2": 0.42603644196671103,
|
361 |
-
"mc2_stderr": 0.015812506803842018
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.12750885478158205,
|
365 |
-
"acc_stderr": 0.011467414350410928,
|
366 |
-
"acc_norm": 0.22668240850059032,
|
367 |
-
"acc_norm_stderr": 0.014394701800505892
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Surromind/gemma-2b-v0.1",
|
436 |
-
"model_sha": "3f9bdfea5688f36d91d07a991741875333b0f338",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0/result_2024-04-17 12:40:55.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.5204778156996587,
|
5 |
-
"acc_stderr": 0.014599131353034998,
|
6 |
-
"acc_norm": 0.5631399317406144,
|
7 |
-
"acc_norm_stderr": 0.01449442158425652
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.49880501892053375,
|
11 |
-
"acc_stderr": 0.00498976716081135,
|
12 |
-
"acc_norm": 0.6731726747659829,
|
13 |
-
"acc_norm_stderr": 0.004680949283855316
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6842105263157895,
|
17 |
-
"acc_stderr": 0.035650796707083106,
|
18 |
-
"acc_norm": 0.6842105263157895,
|
19 |
-
"acc_norm_stderr": 0.035650796707083106
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280041,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280041
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.7394636015325671,
|
29 |
-
"acc_stderr": 0.015696008563807123,
|
30 |
-
"acc_norm": 0.7394636015325671,
|
31 |
-
"acc_norm_stderr": 0.015696008563807123
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4962962962962963,
|
35 |
-
"acc_stderr": 0.04319223625811331,
|
36 |
-
"acc_norm": 0.4962962962962963,
|
37 |
-
"acc_norm_stderr": 0.04319223625811331
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.22,
|
41 |
-
"acc_stderr": 0.041633319989322695,
|
42 |
-
"acc_norm": 0.22,
|
43 |
-
"acc_norm_stderr": 0.041633319989322695
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.5319148936170213,
|
47 |
-
"acc_stderr": 0.03261936918467381,
|
48 |
-
"acc_norm": 0.5319148936170213,
|
49 |
-
"acc_norm_stderr": 0.03261936918467381
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.5120481927710844,
|
53 |
-
"acc_stderr": 0.038913644958358175,
|
54 |
-
"acc_norm": 0.5120481927710844,
|
55 |
-
"acc_norm_stderr": 0.038913644958358175
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6077170418006431,
|
59 |
-
"acc_stderr": 0.027731258647011998,
|
60 |
-
"acc_norm": 0.6077170418006431,
|
61 |
-
"acc_norm_stderr": 0.027731258647011998
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6367713004484304,
|
65 |
-
"acc_stderr": 0.032277904428505,
|
66 |
-
"acc_norm": 0.6367713004484304,
|
67 |
-
"acc_norm_stderr": 0.032277904428505
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5954198473282443,
|
71 |
-
"acc_stderr": 0.043046937953806645,
|
72 |
-
"acc_norm": 0.5954198473282443,
|
73 |
-
"acc_norm_stderr": 0.043046937953806645
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.51,
|
77 |
-
"acc_stderr": 0.05024183937956911,
|
78 |
-
"acc_norm": 0.51,
|
79 |
-
"acc_norm_stderr": 0.05024183937956911
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7676767676767676,
|
83 |
-
"acc_stderr": 0.030088629490217487,
|
84 |
-
"acc_norm": 0.7676767676767676,
|
85 |
-
"acc_norm_stderr": 0.030088629490217487
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5103448275862069,
|
89 |
-
"acc_stderr": 0.04165774775728763,
|
90 |
-
"acc_norm": 0.5103448275862069,
|
91 |
-
"acc_norm_stderr": 0.04165774775728763
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3431372549019608,
|
95 |
-
"acc_stderr": 0.04724007352383888,
|
96 |
-
"acc_norm": 0.3431372549019608,
|
97 |
-
"acc_norm_stderr": 0.04724007352383888
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6134453781512605,
|
101 |
-
"acc_stderr": 0.031631458075523776,
|
102 |
-
"acc_norm": 0.6134453781512605,
|
103 |
-
"acc_norm_stderr": 0.031631458075523776
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5615384615384615,
|
107 |
-
"acc_stderr": 0.025158266016868606,
|
108 |
-
"acc_norm": 0.5615384615384615,
|
109 |
-
"acc_norm_stderr": 0.025158266016868606
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.62,
|
113 |
-
"acc_stderr": 0.048783173121456316,
|
114 |
-
"acc_norm": 0.62,
|
115 |
-
"acc_norm_stderr": 0.048783173121456316
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.35,
|
119 |
-
"acc_stderr": 0.04793724854411019,
|
120 |
-
"acc_norm": 0.35,
|
121 |
-
"acc_norm_stderr": 0.04793724854411019
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6388888888888888,
|
125 |
-
"acc_stderr": 0.04643454608906275,
|
126 |
-
"acc_norm": 0.6388888888888888,
|
127 |
-
"acc_norm_stderr": 0.04643454608906275
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3793103448275862,
|
131 |
-
"acc_stderr": 0.03413963805906235,
|
132 |
-
"acc_norm": 0.3793103448275862,
|
133 |
-
"acc_norm_stderr": 0.03413963805906235
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.635483870967742,
|
137 |
-
"acc_stderr": 0.02737987122994324,
|
138 |
-
"acc_norm": 0.635483870967742,
|
139 |
-
"acc_norm_stderr": 0.02737987122994324
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8034188034188035,
|
143 |
-
"acc_stderr": 0.02603538609895129,
|
144 |
-
"acc_norm": 0.8034188034188035,
|
145 |
-
"acc_norm_stderr": 0.02603538609895129
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5773584905660377,
|
149 |
-
"acc_stderr": 0.03040233144576954,
|
150 |
-
"acc_norm": 0.5773584905660377,
|
151 |
-
"acc_norm_stderr": 0.03040233144576954
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.6090909090909091,
|
155 |
-
"acc_stderr": 0.04673752333670239,
|
156 |
-
"acc_norm": 0.6090909090909091,
|
157 |
-
"acc_norm_stderr": 0.04673752333670239
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.34074074074074073,
|
161 |
-
"acc_stderr": 0.028897748741131143,
|
162 |
-
"acc_norm": 0.34074074074074073,
|
163 |
-
"acc_norm_stderr": 0.028897748741131143
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33774834437086093,
|
167 |
-
"acc_stderr": 0.038615575462551684,
|
168 |
-
"acc_norm": 0.33774834437086093,
|
169 |
-
"acc_norm_stderr": 0.038615575462551684
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7114427860696517,
|
173 |
-
"acc_stderr": 0.03203841040213321,
|
174 |
-
"acc_norm": 0.7114427860696517,
|
175 |
-
"acc_norm_stderr": 0.03203841040213321
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5028901734104047,
|
179 |
-
"acc_stderr": 0.038124005659748335,
|
180 |
-
"acc_norm": 0.5028901734104047,
|
181 |
-
"acc_norm_stderr": 0.038124005659748335
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3941798941798942,
|
185 |
-
"acc_stderr": 0.025167982333894143,
|
186 |
-
"acc_norm": 0.3941798941798942,
|
187 |
-
"acc_norm_stderr": 0.025167982333894143
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5763888888888888,
|
191 |
-
"acc_stderr": 0.041321250197233685,
|
192 |
-
"acc_norm": 0.5763888888888888,
|
193 |
-
"acc_norm_stderr": 0.041321250197233685
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.3,
|
197 |
-
"acc_stderr": 0.046056618647183814,
|
198 |
-
"acc_norm": 0.3,
|
199 |
-
"acc_norm_stderr": 0.046056618647183814
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.74,
|
203 |
-
"acc_stderr": 0.04408440022768077,
|
204 |
-
"acc_norm": 0.74,
|
205 |
-
"acc_norm_stderr": 0.04408440022768077
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5895953757225434,
|
209 |
-
"acc_stderr": 0.026483392042098177,
|
210 |
-
"acc_norm": 0.5895953757225434,
|
211 |
-
"acc_norm_stderr": 0.026483392042098177
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.6073619631901841,
|
215 |
-
"acc_stderr": 0.0383674090783103,
|
216 |
-
"acc_norm": 0.6073619631901841,
|
217 |
-
"acc_norm_stderr": 0.0383674090783103
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6141975308641975,
|
221 |
-
"acc_stderr": 0.027085401226132146,
|
222 |
-
"acc_norm": 0.6141975308641975,
|
223 |
-
"acc_norm_stderr": 0.027085401226132146
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.0479372485441102,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.0479372485441102
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7875647668393783,
|
233 |
-
"acc_stderr": 0.02951928261681723,
|
234 |
-
"acc_norm": 0.7875647668393783,
|
235 |
-
"acc_norm_stderr": 0.02951928261681723
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.4473684210526316,
|
239 |
-
"acc_stderr": 0.046774730044912,
|
240 |
-
"acc_norm": 0.4473684210526316,
|
241 |
-
"acc_norm_stderr": 0.046774730044912
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7321100917431193,
|
245 |
-
"acc_stderr": 0.01898746225797865,
|
246 |
-
"acc_norm": 0.7321100917431193,
|
247 |
-
"acc_norm_stderr": 0.01898746225797865
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.38095238095238093,
|
251 |
-
"acc_stderr": 0.043435254289490965,
|
252 |
-
"acc_norm": 0.38095238095238093,
|
253 |
-
"acc_norm_stderr": 0.043435254289490965
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5816993464052288,
|
257 |
-
"acc_stderr": 0.028245134024387292,
|
258 |
-
"acc_norm": 0.5816993464052288,
|
259 |
-
"acc_norm_stderr": 0.028245134024387292
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.63,
|
263 |
-
"acc_stderr": 0.04852365870939099,
|
264 |
-
"acc_norm": 0.63,
|
265 |
-
"acc_norm_stderr": 0.04852365870939099
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.71900826446281,
|
269 |
-
"acc_stderr": 0.04103203830514512,
|
270 |
-
"acc_norm": 0.71900826446281,
|
271 |
-
"acc_norm_stderr": 0.04103203830514512
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.631578947368421,
|
275 |
-
"acc_stderr": 0.03925523381052932,
|
276 |
-
"acc_norm": 0.631578947368421,
|
277 |
-
"acc_norm_stderr": 0.03925523381052932
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5375816993464052,
|
281 |
-
"acc_stderr": 0.020170614974969765,
|
282 |
-
"acc_norm": 0.5375816993464052,
|
283 |
-
"acc_norm_stderr": 0.020170614974969765
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.425531914893617,
|
287 |
-
"acc_stderr": 0.02949482760014437,
|
288 |
-
"acc_norm": 0.425531914893617,
|
289 |
-
"acc_norm_stderr": 0.02949482760014437
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4375,
|
293 |
-
"acc_stderr": 0.04708567521880525,
|
294 |
-
"acc_norm": 0.4375,
|
295 |
-
"acc_norm_stderr": 0.04708567521880525
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4305555555555556,
|
299 |
-
"acc_stderr": 0.03376922151252335,
|
300 |
-
"acc_norm": 0.4305555555555556,
|
301 |
-
"acc_norm_stderr": 0.03376922151252335
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.3005586592178771,
|
305 |
-
"acc_stderr": 0.015334566806251159,
|
306 |
-
"acc_norm": 0.3005586592178771,
|
307 |
-
"acc_norm_stderr": 0.015334566806251159
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.47,
|
311 |
-
"acc_stderr": 0.05016135580465919,
|
312 |
-
"acc_norm": 0.47,
|
313 |
-
"acc_norm_stderr": 0.05016135580465919
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.72,
|
317 |
-
"acc_stderr": 0.045126085985421296,
|
318 |
-
"acc_norm": 0.72,
|
319 |
-
"acc_norm_stderr": 0.045126085985421296
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5404411764705882,
|
323 |
-
"acc_stderr": 0.03027332507734576,
|
324 |
-
"acc_norm": 0.5404411764705882,
|
325 |
-
"acc_norm_stderr": 0.03027332507734576
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6408163265306123,
|
329 |
-
"acc_stderr": 0.03071356045510849,
|
330 |
-
"acc_norm": 0.6408163265306123,
|
331 |
-
"acc_norm_stderr": 0.03071356045510849
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7721518987341772,
|
335 |
-
"acc_stderr": 0.02730348459906942,
|
336 |
-
"acc_norm": 0.7721518987341772,
|
337 |
-
"acc_norm_stderr": 0.02730348459906942
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.4041720990873533,
|
341 |
-
"acc_stderr": 0.012533504046491367,
|
342 |
-
"acc_norm": 0.4041720990873533,
|
343 |
-
"acc_norm_stderr": 0.012533504046491367
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.7549019607843137,
|
347 |
-
"acc_stderr": 0.03019028245350195,
|
348 |
-
"acc_norm": 0.7549019607843137,
|
349 |
-
"acc_norm_stderr": 0.03019028245350195
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.7393939393939394,
|
353 |
-
"acc_stderr": 0.03427743175816524,
|
354 |
-
"acc_norm": 0.7393939393939394,
|
355 |
-
"acc_norm_stderr": 0.03427743175816524
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3843329253365973,
|
359 |
-
"mc1_stderr": 0.017028707301245196,
|
360 |
-
"mc2": 0.5419514957059137,
|
361 |
-
"mc2_stderr": 0.01593235153874721
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.6340023612750886,
|
365 |
-
"acc_stderr": 0.016561489664895703,
|
366 |
-
"acc_norm": 0.6399055489964581,
|
367 |
-
"acc_norm_stderr": 0.016503686720440076
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0",
|
436 |
-
"model_sha": "9b553f5547f7208f7c1ca5fdb9b50b332edbf945",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0/result_2024-04-16 10:11:27.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4872013651877133,
|
5 |
-
"acc_stderr": 0.014606603181012544,
|
6 |
-
"acc_norm": 0.5409556313993175,
|
7 |
-
"acc_norm_stderr": 0.01456229107360123
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4826727743477395,
|
11 |
-
"acc_stderr": 0.004986784319771786,
|
12 |
-
"acc_norm": 0.6543517227643896,
|
13 |
-
"acc_norm_stderr": 0.00474607219107258
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6900584795321637,
|
17 |
-
"acc_stderr": 0.035469769593931624,
|
18 |
-
"acc_norm": 0.6900584795321637,
|
19 |
-
"acc_norm_stderr": 0.035469769593931624
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6504854368932039,
|
23 |
-
"acc_stderr": 0.047211885060971716,
|
24 |
-
"acc_norm": 0.6504854368932039,
|
25 |
-
"acc_norm_stderr": 0.047211885060971716
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.7420178799489144,
|
29 |
-
"acc_stderr": 0.01564583018834895,
|
30 |
-
"acc_norm": 0.7420178799489144,
|
31 |
-
"acc_norm_stderr": 0.01564583018834895
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.48148148148148145,
|
35 |
-
"acc_stderr": 0.043163785995113245,
|
36 |
-
"acc_norm": 0.48148148148148145,
|
37 |
-
"acc_norm_stderr": 0.043163785995113245
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.24,
|
41 |
-
"acc_stderr": 0.04292346959909283,
|
42 |
-
"acc_norm": 0.24,
|
43 |
-
"acc_norm_stderr": 0.04292346959909283
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.548936170212766,
|
47 |
-
"acc_stderr": 0.03252909619613197,
|
48 |
-
"acc_norm": 0.548936170212766,
|
49 |
-
"acc_norm_stderr": 0.03252909619613197
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.5,
|
53 |
-
"acc_stderr": 0.03892494720807614,
|
54 |
-
"acc_norm": 0.5,
|
55 |
-
"acc_norm_stderr": 0.03892494720807614
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6077170418006431,
|
59 |
-
"acc_stderr": 0.027731258647012005,
|
60 |
-
"acc_norm": 0.6077170418006431,
|
61 |
-
"acc_norm_stderr": 0.027731258647012005
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6053811659192825,
|
65 |
-
"acc_stderr": 0.03280400504755291,
|
66 |
-
"acc_norm": 0.6053811659192825,
|
67 |
-
"acc_norm_stderr": 0.03280400504755291
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.6183206106870229,
|
71 |
-
"acc_stderr": 0.04260735157644561,
|
72 |
-
"acc_norm": 0.6183206106870229,
|
73 |
-
"acc_norm_stderr": 0.04260735157644561
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.49,
|
77 |
-
"acc_stderr": 0.05024183937956912,
|
78 |
-
"acc_norm": 0.49,
|
79 |
-
"acc_norm_stderr": 0.05024183937956912
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7474747474747475,
|
83 |
-
"acc_stderr": 0.030954055470365907,
|
84 |
-
"acc_norm": 0.7474747474747475,
|
85 |
-
"acc_norm_stderr": 0.030954055470365907
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5103448275862069,
|
89 |
-
"acc_stderr": 0.04165774775728763,
|
90 |
-
"acc_norm": 0.5103448275862069,
|
91 |
-
"acc_norm_stderr": 0.04165774775728763
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3235294117647059,
|
95 |
-
"acc_stderr": 0.046550104113196177,
|
96 |
-
"acc_norm": 0.3235294117647059,
|
97 |
-
"acc_norm_stderr": 0.046550104113196177
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6050420168067226,
|
101 |
-
"acc_stderr": 0.031753678460966245,
|
102 |
-
"acc_norm": 0.6050420168067226,
|
103 |
-
"acc_norm_stderr": 0.031753678460966245
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5564102564102564,
|
107 |
-
"acc_stderr": 0.025189149894764208,
|
108 |
-
"acc_norm": 0.5564102564102564,
|
109 |
-
"acc_norm_stderr": 0.025189149894764208
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.64,
|
113 |
-
"acc_stderr": 0.04824181513244218,
|
114 |
-
"acc_norm": 0.64,
|
115 |
-
"acc_norm_stderr": 0.04824181513244218
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.38,
|
119 |
-
"acc_stderr": 0.048783173121456316,
|
120 |
-
"acc_norm": 0.38,
|
121 |
-
"acc_norm_stderr": 0.048783173121456316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6574074074074074,
|
125 |
-
"acc_stderr": 0.045879047413018105,
|
126 |
-
"acc_norm": 0.6574074074074074,
|
127 |
-
"acc_norm_stderr": 0.045879047413018105
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3842364532019704,
|
131 |
-
"acc_stderr": 0.034223985656575494,
|
132 |
-
"acc_norm": 0.3842364532019704,
|
133 |
-
"acc_norm_stderr": 0.034223985656575494
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6258064516129033,
|
137 |
-
"acc_stderr": 0.027528904299845704,
|
138 |
-
"acc_norm": 0.6258064516129033,
|
139 |
-
"acc_norm_stderr": 0.027528904299845704
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.8205128205128205,
|
143 |
-
"acc_stderr": 0.02514093595033544,
|
144 |
-
"acc_norm": 0.8205128205128205,
|
145 |
-
"acc_norm_stderr": 0.02514093595033544
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5735849056603773,
|
149 |
-
"acc_stderr": 0.030437794342983052,
|
150 |
-
"acc_norm": 0.5735849056603773,
|
151 |
-
"acc_norm_stderr": 0.030437794342983052
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.6090909090909091,
|
155 |
-
"acc_stderr": 0.04673752333670239,
|
156 |
-
"acc_norm": 0.6090909090909091,
|
157 |
-
"acc_norm_stderr": 0.04673752333670239
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3333333333333333,
|
161 |
-
"acc_stderr": 0.028742040903948492,
|
162 |
-
"acc_norm": 0.3333333333333333,
|
163 |
-
"acc_norm_stderr": 0.028742040903948492
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526732,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526732
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7064676616915423,
|
173 |
-
"acc_stderr": 0.032200241045342054,
|
174 |
-
"acc_norm": 0.7064676616915423,
|
175 |
-
"acc_norm_stderr": 0.032200241045342054
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.48554913294797686,
|
179 |
-
"acc_stderr": 0.03810871630454764,
|
180 |
-
"acc_norm": 0.48554913294797686,
|
181 |
-
"acc_norm_stderr": 0.03810871630454764
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3888888888888889,
|
185 |
-
"acc_stderr": 0.02510742548113727,
|
186 |
-
"acc_norm": 0.3888888888888889,
|
187 |
-
"acc_norm_stderr": 0.02510742548113727
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5763888888888888,
|
191 |
-
"acc_stderr": 0.041321250197233685,
|
192 |
-
"acc_norm": 0.5763888888888888,
|
193 |
-
"acc_norm_stderr": 0.041321250197233685
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.3,
|
197 |
-
"acc_stderr": 0.046056618647183814,
|
198 |
-
"acc_norm": 0.3,
|
199 |
-
"acc_norm_stderr": 0.046056618647183814
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.75,
|
203 |
-
"acc_stderr": 0.04351941398892446,
|
204 |
-
"acc_norm": 0.75,
|
205 |
-
"acc_norm_stderr": 0.04351941398892446
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5809248554913294,
|
209 |
-
"acc_stderr": 0.026564178111422622,
|
210 |
-
"acc_norm": 0.5809248554913294,
|
211 |
-
"acc_norm_stderr": 0.026564178111422622
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.6012269938650306,
|
215 |
-
"acc_stderr": 0.03847021420456022,
|
216 |
-
"acc_norm": 0.6012269938650306,
|
217 |
-
"acc_norm_stderr": 0.03847021420456022
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5925925925925926,
|
221 |
-
"acc_stderr": 0.02733954664066274,
|
222 |
-
"acc_norm": 0.5925925925925926,
|
223 |
-
"acc_norm_stderr": 0.02733954664066274
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.047609522856952365,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.047609522856952365
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7772020725388601,
|
233 |
-
"acc_stderr": 0.03003114797764154,
|
234 |
-
"acc_norm": 0.7772020725388601,
|
235 |
-
"acc_norm_stderr": 0.03003114797764154
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.42105263157894735,
|
239 |
-
"acc_stderr": 0.046446020912223177,
|
240 |
-
"acc_norm": 0.42105263157894735,
|
241 |
-
"acc_norm_stderr": 0.046446020912223177
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7321100917431193,
|
245 |
-
"acc_stderr": 0.01898746225797865,
|
246 |
-
"acc_norm": 0.7321100917431193,
|
247 |
-
"acc_norm_stderr": 0.01898746225797865
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3968253968253968,
|
251 |
-
"acc_stderr": 0.04375888492727062,
|
252 |
-
"acc_norm": 0.3968253968253968,
|
253 |
-
"acc_norm_stderr": 0.04375888492727062
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5915032679738562,
|
257 |
-
"acc_stderr": 0.028146405993096358,
|
258 |
-
"acc_norm": 0.5915032679738562,
|
259 |
-
"acc_norm_stderr": 0.028146405993096358
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.61,
|
263 |
-
"acc_stderr": 0.04902071300001975,
|
264 |
-
"acc_norm": 0.61,
|
265 |
-
"acc_norm_stderr": 0.04902071300001975
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.04065578140908705,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.04065578140908705
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.6052631578947368,
|
275 |
-
"acc_stderr": 0.039777499346220734,
|
276 |
-
"acc_norm": 0.6052631578947368,
|
277 |
-
"acc_norm_stderr": 0.039777499346220734
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5310457516339869,
|
281 |
-
"acc_stderr": 0.020188804456361887,
|
282 |
-
"acc_norm": 0.5310457516339869,
|
283 |
-
"acc_norm_stderr": 0.020188804456361887
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.39361702127659576,
|
287 |
-
"acc_stderr": 0.029144544781596154,
|
288 |
-
"acc_norm": 0.39361702127659576,
|
289 |
-
"acc_norm_stderr": 0.029144544781596154
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4107142857142857,
|
293 |
-
"acc_stderr": 0.04669510663875192,
|
294 |
-
"acc_norm": 0.4107142857142857,
|
295 |
-
"acc_norm_stderr": 0.04669510663875192
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.44907407407407407,
|
299 |
-
"acc_stderr": 0.03392238405321617,
|
300 |
-
"acc_norm": 0.44907407407407407,
|
301 |
-
"acc_norm_stderr": 0.03392238405321617
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.17318435754189945,
|
305 |
-
"acc_stderr": 0.012655809068644823,
|
306 |
-
"acc_norm": 0.17318435754189945,
|
307 |
-
"acc_norm_stderr": 0.012655809068644823
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.5,
|
311 |
-
"acc_stderr": 0.050251890762960605,
|
312 |
-
"acc_norm": 0.5,
|
313 |
-
"acc_norm_stderr": 0.050251890762960605
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.69,
|
317 |
-
"acc_stderr": 0.046482319871173156,
|
318 |
-
"acc_norm": 0.69,
|
319 |
-
"acc_norm_stderr": 0.046482319871173156
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5257352941176471,
|
323 |
-
"acc_stderr": 0.030332578094555026,
|
324 |
-
"acc_norm": 0.5257352941176471,
|
325 |
-
"acc_norm_stderr": 0.030332578094555026
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.636734693877551,
|
329 |
-
"acc_stderr": 0.030789051139030802,
|
330 |
-
"acc_norm": 0.636734693877551,
|
331 |
-
"acc_norm_stderr": 0.030789051139030802
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7763713080168776,
|
335 |
-
"acc_stderr": 0.027123298205229962,
|
336 |
-
"acc_norm": 0.7763713080168776,
|
337 |
-
"acc_norm_stderr": 0.027123298205229962
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.41590612777053454,
|
341 |
-
"acc_stderr": 0.012588323850313596,
|
342 |
-
"acc_norm": 0.41590612777053454,
|
343 |
-
"acc_norm_stderr": 0.012588323850313596
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.75,
|
347 |
-
"acc_stderr": 0.03039153369274154,
|
348 |
-
"acc_norm": 0.75,
|
349 |
-
"acc_norm_stderr": 0.03039153369274154
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.7454545454545455,
|
353 |
-
"acc_stderr": 0.03401506715249039,
|
354 |
-
"acc_norm": 0.7454545454545455,
|
355 |
-
"acc_norm_stderr": 0.03401506715249039
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.33414932680538556,
|
359 |
-
"mc1_stderr": 0.01651253067715052,
|
360 |
-
"mc2": 0.4886051214791807,
|
361 |
-
"mc2_stderr": 0.0156636395522276
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.6481700118063755,
|
365 |
-
"acc_stderr": 0.016418206451218054,
|
366 |
-
"acc_norm": 0.6564344746162928,
|
367 |
-
"acc_norm_stderr": 0.016327334806429134
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0",
|
436 |
-
"model_sha": "e9ad89a994e794eb0af2d3a0a5f5cb723a4bcb0c",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM1-CV-v1.0/result_2024-05-07 12:24:30.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4462457337883959,
|
5 |
-
"acc_stderr": 0.014526705548539985,
|
6 |
-
"acc_norm": 0.5136518771331058,
|
7 |
-
"acc_norm_stderr": 0.01460594342986095
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.44652459669388567,
|
11 |
-
"acc_stderr": 0.0049611615892284164,
|
12 |
-
"acc_norm": 0.6050587532364071,
|
13 |
-
"acc_norm_stderr": 0.004878390226591714
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6257309941520468,
|
17 |
-
"acc_stderr": 0.03711601185389482,
|
18 |
-
"acc_norm": 0.6257309941520468,
|
19 |
-
"acc_norm_stderr": 0.03711601185389482
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280041,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280041
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6807151979565773,
|
29 |
-
"acc_stderr": 0.016671261749538743,
|
30 |
-
"acc_norm": 0.6807151979565773,
|
31 |
-
"acc_norm_stderr": 0.016671261749538743
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4222222222222222,
|
35 |
-
"acc_stderr": 0.042667634040995814,
|
36 |
-
"acc_norm": 0.4222222222222222,
|
37 |
-
"acc_norm_stderr": 0.042667634040995814
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.19,
|
41 |
-
"acc_stderr": 0.03942772444036623,
|
42 |
-
"acc_norm": 0.19,
|
43 |
-
"acc_norm_stderr": 0.03942772444036623
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46808510638297873,
|
47 |
-
"acc_stderr": 0.03261936918467383,
|
48 |
-
"acc_norm": 0.46808510638297873,
|
49 |
-
"acc_norm_stderr": 0.03261936918467383
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4457831325301205,
|
53 |
-
"acc_stderr": 0.03869543323472101,
|
54 |
-
"acc_norm": 0.4457831325301205,
|
55 |
-
"acc_norm_stderr": 0.03869543323472101
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5659163987138264,
|
59 |
-
"acc_stderr": 0.0281502322445356,
|
60 |
-
"acc_norm": 0.5659163987138264,
|
61 |
-
"acc_norm_stderr": 0.0281502322445356
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.57847533632287,
|
65 |
-
"acc_stderr": 0.03314190222110657,
|
66 |
-
"acc_norm": 0.57847533632287,
|
67 |
-
"acc_norm_stderr": 0.03314190222110657
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5343511450381679,
|
71 |
-
"acc_stderr": 0.04374928560599738,
|
72 |
-
"acc_norm": 0.5343511450381679,
|
73 |
-
"acc_norm_stderr": 0.04374928560599738
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.43,
|
77 |
-
"acc_stderr": 0.049756985195624284,
|
78 |
-
"acc_norm": 0.43,
|
79 |
-
"acc_norm_stderr": 0.049756985195624284
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6515151515151515,
|
83 |
-
"acc_stderr": 0.033948539651564025,
|
84 |
-
"acc_norm": 0.6515151515151515,
|
85 |
-
"acc_norm_stderr": 0.033948539651564025
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.43448275862068964,
|
89 |
-
"acc_stderr": 0.04130740879555497,
|
90 |
-
"acc_norm": 0.43448275862068964,
|
91 |
-
"acc_norm_stderr": 0.04130740879555497
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2549019607843137,
|
95 |
-
"acc_stderr": 0.04336432707993176,
|
96 |
-
"acc_norm": 0.2549019607843137,
|
97 |
-
"acc_norm_stderr": 0.04336432707993176
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5630252100840336,
|
101 |
-
"acc_stderr": 0.03221943636566196,
|
102 |
-
"acc_norm": 0.5630252100840336,
|
103 |
-
"acc_norm_stderr": 0.03221943636566196
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5256410256410257,
|
107 |
-
"acc_stderr": 0.025317649726448673,
|
108 |
-
"acc_norm": 0.5256410256410257,
|
109 |
-
"acc_norm_stderr": 0.025317649726448673
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.59,
|
113 |
-
"acc_stderr": 0.049431107042371025,
|
114 |
-
"acc_norm": 0.59,
|
115 |
-
"acc_norm_stderr": 0.049431107042371025
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6851851851851852,
|
125 |
-
"acc_stderr": 0.04489931073591312,
|
126 |
-
"acc_norm": 0.6851851851851852,
|
127 |
-
"acc_norm_stderr": 0.04489931073591312
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.33497536945812806,
|
131 |
-
"acc_stderr": 0.033208527423483104,
|
132 |
-
"acc_norm": 0.33497536945812806,
|
133 |
-
"acc_norm_stderr": 0.033208527423483104
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5741935483870968,
|
137 |
-
"acc_stderr": 0.028129112709165904,
|
138 |
-
"acc_norm": 0.5741935483870968,
|
139 |
-
"acc_norm_stderr": 0.028129112709165904
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7735042735042735,
|
143 |
-
"acc_stderr": 0.027421007295392926,
|
144 |
-
"acc_norm": 0.7735042735042735,
|
145 |
-
"acc_norm_stderr": 0.027421007295392926
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5320754716981132,
|
149 |
-
"acc_stderr": 0.030709486992556538,
|
150 |
-
"acc_norm": 0.5320754716981132,
|
151 |
-
"acc_norm_stderr": 0.030709486992556538
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5363636363636364,
|
155 |
-
"acc_stderr": 0.047764491623961985,
|
156 |
-
"acc_norm": 0.5363636363636364,
|
157 |
-
"acc_norm_stderr": 0.047764491623961985
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.29259259259259257,
|
161 |
-
"acc_stderr": 0.027738969632176088,
|
162 |
-
"acc_norm": 0.29259259259259257,
|
163 |
-
"acc_norm_stderr": 0.027738969632176088
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6318407960199005,
|
173 |
-
"acc_stderr": 0.03410410565495302,
|
174 |
-
"acc_norm": 0.6318407960199005,
|
175 |
-
"acc_norm_stderr": 0.03410410565495302
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.45664739884393063,
|
179 |
-
"acc_stderr": 0.03798106566014498,
|
180 |
-
"acc_norm": 0.45664739884393063,
|
181 |
-
"acc_norm_stderr": 0.03798106566014498
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3994708994708995,
|
185 |
-
"acc_stderr": 0.02522545028406788,
|
186 |
-
"acc_norm": 0.3994708994708995,
|
187 |
-
"acc_norm_stderr": 0.02522545028406788
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5208333333333334,
|
191 |
-
"acc_stderr": 0.041775789507399935,
|
192 |
-
"acc_norm": 0.5208333333333334,
|
193 |
-
"acc_norm_stderr": 0.041775789507399935
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.3,
|
197 |
-
"acc_stderr": 0.046056618647183814,
|
198 |
-
"acc_norm": 0.3,
|
199 |
-
"acc_norm_stderr": 0.046056618647183814
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.67,
|
203 |
-
"acc_stderr": 0.047258156262526066,
|
204 |
-
"acc_norm": 0.67,
|
205 |
-
"acc_norm_stderr": 0.047258156262526066
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5202312138728323,
|
209 |
-
"acc_stderr": 0.026897049996382868,
|
210 |
-
"acc_norm": 0.5202312138728323,
|
211 |
-
"acc_norm_stderr": 0.026897049996382868
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5276073619631901,
|
215 |
-
"acc_stderr": 0.0392237829061099,
|
216 |
-
"acc_norm": 0.5276073619631901,
|
217 |
-
"acc_norm_stderr": 0.0392237829061099
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5524691358024691,
|
221 |
-
"acc_stderr": 0.027667138569422708,
|
222 |
-
"acc_norm": 0.5524691358024691,
|
223 |
-
"acc_norm_stderr": 0.027667138569422708
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.38,
|
227 |
-
"acc_stderr": 0.048783173121456316,
|
228 |
-
"acc_norm": 0.38,
|
229 |
-
"acc_norm_stderr": 0.048783173121456316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7409326424870466,
|
233 |
-
"acc_stderr": 0.031618779179354094,
|
234 |
-
"acc_norm": 0.7409326424870466,
|
235 |
-
"acc_norm_stderr": 0.031618779179354094
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.41228070175438597,
|
239 |
-
"acc_stderr": 0.04630653203366597,
|
240 |
-
"acc_norm": 0.41228070175438597,
|
241 |
-
"acc_norm_stderr": 0.04630653203366597
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6880733944954128,
|
245 |
-
"acc_stderr": 0.01986296797670724,
|
246 |
-
"acc_norm": 0.6880733944954128,
|
247 |
-
"acc_norm_stderr": 0.01986296797670724
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3888888888888889,
|
251 |
-
"acc_stderr": 0.04360314860077459,
|
252 |
-
"acc_norm": 0.3888888888888889,
|
253 |
-
"acc_norm_stderr": 0.04360314860077459
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5490196078431373,
|
257 |
-
"acc_stderr": 0.02849199358617156,
|
258 |
-
"acc_norm": 0.5490196078431373,
|
259 |
-
"acc_norm_stderr": 0.02849199358617156
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.56,
|
263 |
-
"acc_stderr": 0.04988876515698589,
|
264 |
-
"acc_norm": 0.56,
|
265 |
-
"acc_norm_stderr": 0.04988876515698589
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6942148760330579,
|
269 |
-
"acc_stderr": 0.04205953933884121,
|
270 |
-
"acc_norm": 0.6942148760330579,
|
271 |
-
"acc_norm_stderr": 0.04205953933884121
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5855263157894737,
|
275 |
-
"acc_stderr": 0.04008973785779206,
|
276 |
-
"acc_norm": 0.5855263157894737,
|
277 |
-
"acc_norm_stderr": 0.04008973785779206
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.48856209150326796,
|
281 |
-
"acc_stderr": 0.020222541515610874,
|
282 |
-
"acc_norm": 0.48856209150326796,
|
283 |
-
"acc_norm_stderr": 0.020222541515610874
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3262411347517731,
|
287 |
-
"acc_stderr": 0.02796845304356317,
|
288 |
-
"acc_norm": 0.3262411347517731,
|
289 |
-
"acc_norm_stderr": 0.02796845304356317
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.19776536312849163,
|
305 |
-
"acc_stderr": 0.013321620594050948,
|
306 |
-
"acc_norm": 0.19776536312849163,
|
307 |
-
"acc_norm_stderr": 0.013321620594050948
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.38,
|
311 |
-
"acc_stderr": 0.048783173121456316,
|
312 |
-
"acc_norm": 0.38,
|
313 |
-
"acc_norm_stderr": 0.048783173121456316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.6,
|
317 |
-
"acc_stderr": 0.04923659639173309,
|
318 |
-
"acc_norm": 0.6,
|
319 |
-
"acc_norm_stderr": 0.04923659639173309
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5073529411764706,
|
323 |
-
"acc_stderr": 0.030369552523902173,
|
324 |
-
"acc_norm": 0.5073529411764706,
|
325 |
-
"acc_norm_stderr": 0.030369552523902173
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5591836734693878,
|
329 |
-
"acc_stderr": 0.03178419114175363,
|
330 |
-
"acc_norm": 0.5591836734693878,
|
331 |
-
"acc_norm_stderr": 0.03178419114175363
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7341772151898734,
|
335 |
-
"acc_stderr": 0.028756799629658335,
|
336 |
-
"acc_norm": 0.7341772151898734,
|
337 |
-
"acc_norm_stderr": 0.028756799629658335
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.38722294654498046,
|
341 |
-
"acc_stderr": 0.012441155326854933,
|
342 |
-
"acc_norm": 0.38722294654498046,
|
343 |
-
"acc_norm_stderr": 0.012441155326854933
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6617647058823529,
|
347 |
-
"acc_stderr": 0.03320574612945431,
|
348 |
-
"acc_norm": 0.6617647058823529,
|
349 |
-
"acc_norm_stderr": 0.03320574612945431
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.696969696969697,
|
353 |
-
"acc_stderr": 0.03588624800091708,
|
354 |
-
"acc_norm": 0.696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03588624800091708
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3671970624235006,
|
359 |
-
"mc1_stderr": 0.01687480500145318,
|
360 |
-
"mc2": 0.5519207261004673,
|
361 |
-
"mc2_stderr": 0.01619848088035427
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5808736717827627,
|
365 |
-
"acc_stderr": 0.016963995010862796,
|
366 |
-
"acc_norm": 0.5997638724911453,
|
367 |
-
"acc_norm_stderr": 0.01684469351050504
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM1-CV-v1.0",
|
436 |
-
"model_sha": "523fab4d827e2f74acb7d809806a104fda8a325d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM1-CV-v2.0/result_2024-05-09 22:59:00.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.5435153583617748,
|
5 |
-
"acc_stderr": 0.014555949760496435,
|
6 |
-
"acc_norm": 0.6083617747440273,
|
7 |
-
"acc_norm_stderr": 0.01426412212493822
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.5240987851025692,
|
11 |
-
"acc_stderr": 0.004983982396187368,
|
12 |
-
"acc_norm": 0.6967735510854411,
|
13 |
-
"acc_norm_stderr": 0.004587128273935065
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6549707602339181,
|
17 |
-
"acc_stderr": 0.03645981377388806,
|
18 |
-
"acc_norm": 0.6549707602339181,
|
19 |
-
"acc_norm_stderr": 0.03645981377388806
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280041,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280041
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.7075351213282248,
|
29 |
-
"acc_stderr": 0.016267000684598652,
|
30 |
-
"acc_norm": 0.7075351213282248,
|
31 |
-
"acc_norm_stderr": 0.016267000684598652
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4444444444444444,
|
35 |
-
"acc_stderr": 0.04292596718256981,
|
36 |
-
"acc_norm": 0.4444444444444444,
|
37 |
-
"acc_norm_stderr": 0.04292596718256981
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.25,
|
41 |
-
"acc_stderr": 0.04351941398892446,
|
42 |
-
"acc_norm": 0.25,
|
43 |
-
"acc_norm_stderr": 0.04351941398892446
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.5063829787234042,
|
47 |
-
"acc_stderr": 0.032683358999363345,
|
48 |
-
"acc_norm": 0.5063829787234042,
|
49 |
-
"acc_norm_stderr": 0.032683358999363345
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.45180722891566266,
|
53 |
-
"acc_stderr": 0.038743715565879536,
|
54 |
-
"acc_norm": 0.45180722891566266,
|
55 |
-
"acc_norm_stderr": 0.038743715565879536
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5980707395498392,
|
59 |
-
"acc_stderr": 0.027846476005930477,
|
60 |
-
"acc_norm": 0.5980707395498392,
|
61 |
-
"acc_norm_stderr": 0.027846476005930477
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5874439461883408,
|
65 |
-
"acc_stderr": 0.03304062175449297,
|
66 |
-
"acc_norm": 0.5874439461883408,
|
67 |
-
"acc_norm_stderr": 0.03304062175449297
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5801526717557252,
|
71 |
-
"acc_stderr": 0.04328577215262971,
|
72 |
-
"acc_norm": 0.5801526717557252,
|
73 |
-
"acc_norm_stderr": 0.04328577215262971
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.51,
|
77 |
-
"acc_stderr": 0.05024183937956911,
|
78 |
-
"acc_norm": 0.51,
|
79 |
-
"acc_norm_stderr": 0.05024183937956911
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7727272727272727,
|
83 |
-
"acc_stderr": 0.029857515673386414,
|
84 |
-
"acc_norm": 0.7727272727272727,
|
85 |
-
"acc_norm_stderr": 0.029857515673386414
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3137254901960784,
|
95 |
-
"acc_stderr": 0.04617034827006717,
|
96 |
-
"acc_norm": 0.3137254901960784,
|
97 |
-
"acc_norm_stderr": 0.04617034827006717
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6134453781512605,
|
101 |
-
"acc_stderr": 0.031631458075523776,
|
102 |
-
"acc_norm": 0.6134453781512605,
|
103 |
-
"acc_norm_stderr": 0.031631458075523776
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5820512820512821,
|
107 |
-
"acc_stderr": 0.025007329882461207,
|
108 |
-
"acc_norm": 0.5820512820512821,
|
109 |
-
"acc_norm_stderr": 0.025007329882461207
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.6,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.6,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.37,
|
119 |
-
"acc_stderr": 0.048523658709391,
|
120 |
-
"acc_norm": 0.37,
|
121 |
-
"acc_norm_stderr": 0.048523658709391
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6666666666666666,
|
125 |
-
"acc_stderr": 0.04557239513497751,
|
126 |
-
"acc_norm": 0.6666666666666666,
|
127 |
-
"acc_norm_stderr": 0.04557239513497751
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3399014778325123,
|
131 |
-
"acc_stderr": 0.033327690684107895,
|
132 |
-
"acc_norm": 0.3399014778325123,
|
133 |
-
"acc_norm_stderr": 0.033327690684107895
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.6161290322580645,
|
137 |
-
"acc_stderr": 0.027666182075539635,
|
138 |
-
"acc_norm": 0.6161290322580645,
|
139 |
-
"acc_norm_stderr": 0.027666182075539635
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.782051282051282,
|
143 |
-
"acc_stderr": 0.027046857630716667,
|
144 |
-
"acc_norm": 0.782051282051282,
|
145 |
-
"acc_norm_stderr": 0.027046857630716667
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5547169811320755,
|
149 |
-
"acc_stderr": 0.030588052974270655,
|
150 |
-
"acc_norm": 0.5547169811320755,
|
151 |
-
"acc_norm_stderr": 0.030588052974270655
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5636363636363636,
|
155 |
-
"acc_stderr": 0.04750185058907296,
|
156 |
-
"acc_norm": 0.5636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04750185058907296
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.29259259259259257,
|
161 |
-
"acc_stderr": 0.02773896963217609,
|
162 |
-
"acc_norm": 0.29259259259259257,
|
163 |
-
"acc_norm_stderr": 0.02773896963217609
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2913907284768212,
|
167 |
-
"acc_stderr": 0.03710185726119995,
|
168 |
-
"acc_norm": 0.2913907284768212,
|
169 |
-
"acc_norm_stderr": 0.03710185726119995
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.681592039800995,
|
173 |
-
"acc_stderr": 0.03294118479054095,
|
174 |
-
"acc_norm": 0.681592039800995,
|
175 |
-
"acc_norm_stderr": 0.03294118479054095
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4913294797687861,
|
179 |
-
"acc_stderr": 0.038118909889404105,
|
180 |
-
"acc_norm": 0.4913294797687861,
|
181 |
-
"acc_norm_stderr": 0.038118909889404105
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.43915343915343913,
|
185 |
-
"acc_stderr": 0.025559920550531013,
|
186 |
-
"acc_norm": 0.43915343915343913,
|
187 |
-
"acc_norm_stderr": 0.025559920550531013
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5902777777777778,
|
191 |
-
"acc_stderr": 0.04112490974670787,
|
192 |
-
"acc_norm": 0.5902777777777778,
|
193 |
-
"acc_norm_stderr": 0.04112490974670787
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.33,
|
197 |
-
"acc_stderr": 0.047258156262526045,
|
198 |
-
"acc_norm": 0.33,
|
199 |
-
"acc_norm_stderr": 0.047258156262526045
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.71,
|
203 |
-
"acc_stderr": 0.045604802157206845,
|
204 |
-
"acc_norm": 0.71,
|
205 |
-
"acc_norm_stderr": 0.045604802157206845
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.569364161849711,
|
209 |
-
"acc_stderr": 0.026658800273672376,
|
210 |
-
"acc_norm": 0.569364161849711,
|
211 |
-
"acc_norm_stderr": 0.026658800273672376
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5950920245398773,
|
215 |
-
"acc_stderr": 0.038566721635489125,
|
216 |
-
"acc_norm": 0.5950920245398773,
|
217 |
-
"acc_norm_stderr": 0.038566721635489125
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5987654320987654,
|
221 |
-
"acc_stderr": 0.027272582849839796,
|
222 |
-
"acc_norm": 0.5987654320987654,
|
223 |
-
"acc_norm_stderr": 0.027272582849839796
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7616580310880829,
|
233 |
-
"acc_stderr": 0.030748905363909878,
|
234 |
-
"acc_norm": 0.7616580310880829,
|
235 |
-
"acc_norm_stderr": 0.030748905363909878
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.41228070175438597,
|
239 |
-
"acc_stderr": 0.04630653203366596,
|
240 |
-
"acc_norm": 0.41228070175438597,
|
241 |
-
"acc_norm_stderr": 0.04630653203366596
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7192660550458716,
|
245 |
-
"acc_stderr": 0.01926605504587161,
|
246 |
-
"acc_norm": 0.7192660550458716,
|
247 |
-
"acc_norm_stderr": 0.01926605504587161
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.373015873015873,
|
251 |
-
"acc_stderr": 0.04325506042017086,
|
252 |
-
"acc_norm": 0.373015873015873,
|
253 |
-
"acc_norm_stderr": 0.04325506042017086
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5751633986928104,
|
257 |
-
"acc_stderr": 0.028304576673141107,
|
258 |
-
"acc_norm": 0.5751633986928104,
|
259 |
-
"acc_norm_stderr": 0.028304576673141107
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.58,
|
263 |
-
"acc_stderr": 0.049604496374885836,
|
264 |
-
"acc_norm": 0.58,
|
265 |
-
"acc_norm_stderr": 0.049604496374885836
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7024793388429752,
|
269 |
-
"acc_stderr": 0.04173349148083499,
|
270 |
-
"acc_norm": 0.7024793388429752,
|
271 |
-
"acc_norm_stderr": 0.04173349148083499
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.625,
|
275 |
-
"acc_stderr": 0.039397364351956274,
|
276 |
-
"acc_norm": 0.625,
|
277 |
-
"acc_norm_stderr": 0.039397364351956274
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5179738562091504,
|
281 |
-
"acc_stderr": 0.020214761037872408,
|
282 |
-
"acc_norm": 0.5179738562091504,
|
283 |
-
"acc_norm_stderr": 0.020214761037872408
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.37943262411347517,
|
287 |
-
"acc_stderr": 0.028947338851614105,
|
288 |
-
"acc_norm": 0.37943262411347517,
|
289 |
-
"acc_norm_stderr": 0.028947338851614105
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5,
|
299 |
-
"acc_stderr": 0.034099716973523674,
|
300 |
-
"acc_norm": 0.5,
|
301 |
-
"acc_norm_stderr": 0.034099716973523674
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.4424581005586592,
|
305 |
-
"acc_stderr": 0.01661139368726858,
|
306 |
-
"acc_norm": 0.4424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01661139368726858
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.45,
|
311 |
-
"acc_stderr": 0.05,
|
312 |
-
"acc_norm": 0.45,
|
313 |
-
"acc_norm_stderr": 0.05
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.68,
|
317 |
-
"acc_stderr": 0.046882617226215034,
|
318 |
-
"acc_norm": 0.68,
|
319 |
-
"acc_norm_stderr": 0.046882617226215034
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5477941176470589,
|
323 |
-
"acc_stderr": 0.030233758551596438,
|
324 |
-
"acc_norm": 0.5477941176470589,
|
325 |
-
"acc_norm_stderr": 0.030233758551596438
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5795918367346938,
|
329 |
-
"acc_stderr": 0.03160106993449601,
|
330 |
-
"acc_norm": 0.5795918367346938,
|
331 |
-
"acc_norm_stderr": 0.03160106993449601
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7805907172995781,
|
335 |
-
"acc_stderr": 0.026939106581553945,
|
336 |
-
"acc_norm": 0.7805907172995781,
|
337 |
-
"acc_norm_stderr": 0.026939106581553945
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.4165580182529335,
|
341 |
-
"acc_stderr": 0.01259115324505739,
|
342 |
-
"acc_norm": 0.4165580182529335,
|
343 |
-
"acc_norm_stderr": 0.01259115324505739
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.7009803921568627,
|
347 |
-
"acc_stderr": 0.03213325717373618,
|
348 |
-
"acc_norm": 0.7009803921568627,
|
349 |
-
"acc_norm_stderr": 0.03213325717373618
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6848484848484848,
|
353 |
-
"acc_stderr": 0.0362773057502241,
|
354 |
-
"acc_norm": 0.6848484848484848,
|
355 |
-
"acc_norm_stderr": 0.0362773057502241
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.4944920440636475,
|
359 |
-
"mc1_stderr": 0.01750243899045107,
|
360 |
-
"mc2": 0.6608137228678551,
|
361 |
-
"mc2_stderr": 0.015917590211927863
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.6198347107438017,
|
365 |
-
"acc_stderr": 0.016689333596980098,
|
366 |
-
"acc_norm": 0.6257378984651711,
|
367 |
-
"acc_norm_stderr": 0.016637917789798742
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM1-CV-v2.0",
|
436 |
-
"model_sha": "6fb0adabdb4d74852287e8825dfe448cb0ee20c1",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM1-v2.0/result_2024-05-02 12:31:10.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4462457337883959,
|
5 |
-
"acc_stderr": 0.014526705548539985,
|
6 |
-
"acc_norm": 0.5136518771331058,
|
7 |
-
"acc_norm_stderr": 0.01460594342986095
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.44652459669388567,
|
11 |
-
"acc_stderr": 0.0049611615892284164,
|
12 |
-
"acc_norm": 0.6050587532364071,
|
13 |
-
"acc_norm_stderr": 0.004878390226591714
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6257309941520468,
|
17 |
-
"acc_stderr": 0.03711601185389482,
|
18 |
-
"acc_norm": 0.6257309941520468,
|
19 |
-
"acc_norm_stderr": 0.03711601185389482
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6796116504854369,
|
23 |
-
"acc_stderr": 0.04620284082280041,
|
24 |
-
"acc_norm": 0.6796116504854369,
|
25 |
-
"acc_norm_stderr": 0.04620284082280041
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6807151979565773,
|
29 |
-
"acc_stderr": 0.016671261749538743,
|
30 |
-
"acc_norm": 0.6807151979565773,
|
31 |
-
"acc_norm_stderr": 0.016671261749538743
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4222222222222222,
|
35 |
-
"acc_stderr": 0.042667634040995814,
|
36 |
-
"acc_norm": 0.4222222222222222,
|
37 |
-
"acc_norm_stderr": 0.042667634040995814
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.19,
|
41 |
-
"acc_stderr": 0.03942772444036623,
|
42 |
-
"acc_norm": 0.19,
|
43 |
-
"acc_norm_stderr": 0.03942772444036623
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46808510638297873,
|
47 |
-
"acc_stderr": 0.03261936918467383,
|
48 |
-
"acc_norm": 0.46808510638297873,
|
49 |
-
"acc_norm_stderr": 0.03261936918467383
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4457831325301205,
|
53 |
-
"acc_stderr": 0.03869543323472101,
|
54 |
-
"acc_norm": 0.4457831325301205,
|
55 |
-
"acc_norm_stderr": 0.03869543323472101
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5659163987138264,
|
59 |
-
"acc_stderr": 0.0281502322445356,
|
60 |
-
"acc_norm": 0.5659163987138264,
|
61 |
-
"acc_norm_stderr": 0.0281502322445356
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.57847533632287,
|
65 |
-
"acc_stderr": 0.03314190222110657,
|
66 |
-
"acc_norm": 0.57847533632287,
|
67 |
-
"acc_norm_stderr": 0.03314190222110657
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5343511450381679,
|
71 |
-
"acc_stderr": 0.04374928560599738,
|
72 |
-
"acc_norm": 0.5343511450381679,
|
73 |
-
"acc_norm_stderr": 0.04374928560599738
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.43,
|
77 |
-
"acc_stderr": 0.049756985195624284,
|
78 |
-
"acc_norm": 0.43,
|
79 |
-
"acc_norm_stderr": 0.049756985195624284
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6515151515151515,
|
83 |
-
"acc_stderr": 0.033948539651564025,
|
84 |
-
"acc_norm": 0.6515151515151515,
|
85 |
-
"acc_norm_stderr": 0.033948539651564025
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.43448275862068964,
|
89 |
-
"acc_stderr": 0.04130740879555497,
|
90 |
-
"acc_norm": 0.43448275862068964,
|
91 |
-
"acc_norm_stderr": 0.04130740879555497
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2549019607843137,
|
95 |
-
"acc_stderr": 0.04336432707993176,
|
96 |
-
"acc_norm": 0.2549019607843137,
|
97 |
-
"acc_norm_stderr": 0.04336432707993176
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5630252100840336,
|
101 |
-
"acc_stderr": 0.03221943636566196,
|
102 |
-
"acc_norm": 0.5630252100840336,
|
103 |
-
"acc_norm_stderr": 0.03221943636566196
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5256410256410257,
|
107 |
-
"acc_stderr": 0.025317649726448673,
|
108 |
-
"acc_norm": 0.5256410256410257,
|
109 |
-
"acc_norm_stderr": 0.025317649726448673
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.59,
|
113 |
-
"acc_stderr": 0.049431107042371025,
|
114 |
-
"acc_norm": 0.59,
|
115 |
-
"acc_norm_stderr": 0.049431107042371025
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6851851851851852,
|
125 |
-
"acc_stderr": 0.04489931073591312,
|
126 |
-
"acc_norm": 0.6851851851851852,
|
127 |
-
"acc_norm_stderr": 0.04489931073591312
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.33497536945812806,
|
131 |
-
"acc_stderr": 0.033208527423483104,
|
132 |
-
"acc_norm": 0.33497536945812806,
|
133 |
-
"acc_norm_stderr": 0.033208527423483104
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5741935483870968,
|
137 |
-
"acc_stderr": 0.028129112709165904,
|
138 |
-
"acc_norm": 0.5741935483870968,
|
139 |
-
"acc_norm_stderr": 0.028129112709165904
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7735042735042735,
|
143 |
-
"acc_stderr": 0.027421007295392926,
|
144 |
-
"acc_norm": 0.7735042735042735,
|
145 |
-
"acc_norm_stderr": 0.027421007295392926
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5320754716981132,
|
149 |
-
"acc_stderr": 0.030709486992556538,
|
150 |
-
"acc_norm": 0.5320754716981132,
|
151 |
-
"acc_norm_stderr": 0.030709486992556538
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5363636363636364,
|
155 |
-
"acc_stderr": 0.047764491623961985,
|
156 |
-
"acc_norm": 0.5363636363636364,
|
157 |
-
"acc_norm_stderr": 0.047764491623961985
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.29259259259259257,
|
161 |
-
"acc_stderr": 0.027738969632176088,
|
162 |
-
"acc_norm": 0.29259259259259257,
|
163 |
-
"acc_norm_stderr": 0.027738969632176088
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6318407960199005,
|
173 |
-
"acc_stderr": 0.03410410565495302,
|
174 |
-
"acc_norm": 0.6318407960199005,
|
175 |
-
"acc_norm_stderr": 0.03410410565495302
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.45664739884393063,
|
179 |
-
"acc_stderr": 0.03798106566014498,
|
180 |
-
"acc_norm": 0.45664739884393063,
|
181 |
-
"acc_norm_stderr": 0.03798106566014498
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3994708994708995,
|
185 |
-
"acc_stderr": 0.02522545028406788,
|
186 |
-
"acc_norm": 0.3994708994708995,
|
187 |
-
"acc_norm_stderr": 0.02522545028406788
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5208333333333334,
|
191 |
-
"acc_stderr": 0.041775789507399935,
|
192 |
-
"acc_norm": 0.5208333333333334,
|
193 |
-
"acc_norm_stderr": 0.041775789507399935
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.3,
|
197 |
-
"acc_stderr": 0.046056618647183814,
|
198 |
-
"acc_norm": 0.3,
|
199 |
-
"acc_norm_stderr": 0.046056618647183814
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.67,
|
203 |
-
"acc_stderr": 0.047258156262526066,
|
204 |
-
"acc_norm": 0.67,
|
205 |
-
"acc_norm_stderr": 0.047258156262526066
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5202312138728323,
|
209 |
-
"acc_stderr": 0.026897049996382868,
|
210 |
-
"acc_norm": 0.5202312138728323,
|
211 |
-
"acc_norm_stderr": 0.026897049996382868
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5276073619631901,
|
215 |
-
"acc_stderr": 0.0392237829061099,
|
216 |
-
"acc_norm": 0.5276073619631901,
|
217 |
-
"acc_norm_stderr": 0.0392237829061099
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5524691358024691,
|
221 |
-
"acc_stderr": 0.027667138569422708,
|
222 |
-
"acc_norm": 0.5524691358024691,
|
223 |
-
"acc_norm_stderr": 0.027667138569422708
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.38,
|
227 |
-
"acc_stderr": 0.048783173121456316,
|
228 |
-
"acc_norm": 0.38,
|
229 |
-
"acc_norm_stderr": 0.048783173121456316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.7409326424870466,
|
233 |
-
"acc_stderr": 0.031618779179354094,
|
234 |
-
"acc_norm": 0.7409326424870466,
|
235 |
-
"acc_norm_stderr": 0.031618779179354094
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.41228070175438597,
|
239 |
-
"acc_stderr": 0.04630653203366597,
|
240 |
-
"acc_norm": 0.41228070175438597,
|
241 |
-
"acc_norm_stderr": 0.04630653203366597
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6880733944954128,
|
245 |
-
"acc_stderr": 0.01986296797670724,
|
246 |
-
"acc_norm": 0.6880733944954128,
|
247 |
-
"acc_norm_stderr": 0.01986296797670724
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3888888888888889,
|
251 |
-
"acc_stderr": 0.04360314860077459,
|
252 |
-
"acc_norm": 0.3888888888888889,
|
253 |
-
"acc_norm_stderr": 0.04360314860077459
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5490196078431373,
|
257 |
-
"acc_stderr": 0.02849199358617156,
|
258 |
-
"acc_norm": 0.5490196078431373,
|
259 |
-
"acc_norm_stderr": 0.02849199358617156
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.56,
|
263 |
-
"acc_stderr": 0.04988876515698589,
|
264 |
-
"acc_norm": 0.56,
|
265 |
-
"acc_norm_stderr": 0.04988876515698589
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6942148760330579,
|
269 |
-
"acc_stderr": 0.04205953933884121,
|
270 |
-
"acc_norm": 0.6942148760330579,
|
271 |
-
"acc_norm_stderr": 0.04205953933884121
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5855263157894737,
|
275 |
-
"acc_stderr": 0.04008973785779206,
|
276 |
-
"acc_norm": 0.5855263157894737,
|
277 |
-
"acc_norm_stderr": 0.04008973785779206
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.48856209150326796,
|
281 |
-
"acc_stderr": 0.020222541515610874,
|
282 |
-
"acc_norm": 0.48856209150326796,
|
283 |
-
"acc_norm_stderr": 0.020222541515610874
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3262411347517731,
|
287 |
-
"acc_stderr": 0.02796845304356317,
|
288 |
-
"acc_norm": 0.3262411347517731,
|
289 |
-
"acc_norm_stderr": 0.02796845304356317
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.19776536312849163,
|
305 |
-
"acc_stderr": 0.013321620594050948,
|
306 |
-
"acc_norm": 0.19776536312849163,
|
307 |
-
"acc_norm_stderr": 0.013321620594050948
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.38,
|
311 |
-
"acc_stderr": 0.048783173121456316,
|
312 |
-
"acc_norm": 0.38,
|
313 |
-
"acc_norm_stderr": 0.048783173121456316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.6,
|
317 |
-
"acc_stderr": 0.04923659639173309,
|
318 |
-
"acc_norm": 0.6,
|
319 |
-
"acc_norm_stderr": 0.04923659639173309
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5073529411764706,
|
323 |
-
"acc_stderr": 0.030369552523902173,
|
324 |
-
"acc_norm": 0.5073529411764706,
|
325 |
-
"acc_norm_stderr": 0.030369552523902173
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5591836734693878,
|
329 |
-
"acc_stderr": 0.03178419114175363,
|
330 |
-
"acc_norm": 0.5591836734693878,
|
331 |
-
"acc_norm_stderr": 0.03178419114175363
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7341772151898734,
|
335 |
-
"acc_stderr": 0.028756799629658335,
|
336 |
-
"acc_norm": 0.7341772151898734,
|
337 |
-
"acc_norm_stderr": 0.028756799629658335
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.38722294654498046,
|
341 |
-
"acc_stderr": 0.012441155326854933,
|
342 |
-
"acc_norm": 0.38722294654498046,
|
343 |
-
"acc_norm_stderr": 0.012441155326854933
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6617647058823529,
|
347 |
-
"acc_stderr": 0.03320574612945431,
|
348 |
-
"acc_norm": 0.6617647058823529,
|
349 |
-
"acc_norm_stderr": 0.03320574612945431
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.696969696969697,
|
353 |
-
"acc_stderr": 0.03588624800091708,
|
354 |
-
"acc_norm": 0.696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03588624800091708
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3671970624235006,
|
359 |
-
"mc1_stderr": 0.01687480500145318,
|
360 |
-
"mc2": 0.5519207261004673,
|
361 |
-
"mc2_stderr": 0.01619848088035427
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5808736717827627,
|
365 |
-
"acc_stderr": 0.016963995010862796,
|
366 |
-
"acc_norm": 0.5997638724911453,
|
367 |
-
"acc_norm_stderr": 0.01684469351050504
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM1-v2.0",
|
436 |
-
"model_sha": "523fab4d827e2f74acb7d809806a104fda8a325d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM2-FP-v1.0/result_2024-05-08 00:08:39.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.7107508532423208,
|
5 |
-
"acc_stderr": 0.013250012579393443,
|
6 |
-
"acc_norm": 0.7525597269624573,
|
7 |
-
"acc_norm_stderr": 0.01261035266329267
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.6019717187811193,
|
11 |
-
"acc_stderr": 0.004884909544477079,
|
12 |
-
"acc_norm": 0.7401911969727146,
|
13 |
-
"acc_norm_stderr": 0.004376333451909809
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6608187134502924,
|
17 |
-
"acc_stderr": 0.03631053496488905,
|
18 |
-
"acc_norm": 0.6608187134502924,
|
19 |
-
"acc_norm_stderr": 0.03631053496488905
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6893203883495146,
|
23 |
-
"acc_stderr": 0.045821241601615506,
|
24 |
-
"acc_norm": 0.6893203883495146,
|
25 |
-
"acc_norm_stderr": 0.045821241601615506
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6602809706257982,
|
29 |
-
"acc_stderr": 0.016936394114301624,
|
30 |
-
"acc_norm": 0.6602809706257982,
|
31 |
-
"acc_norm_stderr": 0.016936394114301624
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.45185185185185184,
|
35 |
-
"acc_stderr": 0.04299268905480863,
|
36 |
-
"acc_norm": 0.45185185185185184,
|
37 |
-
"acc_norm_stderr": 0.04299268905480863
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.32,
|
41 |
-
"acc_stderr": 0.04688261722621504,
|
42 |
-
"acc_norm": 0.32,
|
43 |
-
"acc_norm_stderr": 0.04688261722621504
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46808510638297873,
|
47 |
-
"acc_stderr": 0.03261936918467383,
|
48 |
-
"acc_norm": 0.46808510638297873,
|
49 |
-
"acc_norm_stderr": 0.03261936918467383
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.5,
|
53 |
-
"acc_stderr": 0.03892494720807614,
|
54 |
-
"acc_norm": 0.5,
|
55 |
-
"acc_norm_stderr": 0.03892494720807614
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5787781350482315,
|
59 |
-
"acc_stderr": 0.028043399858210635,
|
60 |
-
"acc_norm": 0.5787781350482315,
|
61 |
-
"acc_norm_stderr": 0.028043399858210635
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6322869955156951,
|
65 |
-
"acc_stderr": 0.03236198350928275,
|
66 |
-
"acc_norm": 0.6322869955156951,
|
67 |
-
"acc_norm_stderr": 0.03236198350928275
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5114503816793893,
|
71 |
-
"acc_stderr": 0.04384140024078016,
|
72 |
-
"acc_norm": 0.5114503816793893,
|
73 |
-
"acc_norm_stderr": 0.04384140024078016
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.54,
|
77 |
-
"acc_stderr": 0.05009082659620333,
|
78 |
-
"acc_norm": 0.54,
|
79 |
-
"acc_norm_stderr": 0.05009082659620333
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7121212121212122,
|
83 |
-
"acc_stderr": 0.03225883512300993,
|
84 |
-
"acc_norm": 0.7121212121212122,
|
85 |
-
"acc_norm_stderr": 0.03225883512300993
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.503448275862069,
|
89 |
-
"acc_stderr": 0.04166567577101579,
|
90 |
-
"acc_norm": 0.503448275862069,
|
91 |
-
"acc_norm_stderr": 0.04166567577101579
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.29411764705882354,
|
95 |
-
"acc_stderr": 0.04533838195929776,
|
96 |
-
"acc_norm": 0.29411764705882354,
|
97 |
-
"acc_norm_stderr": 0.04533838195929776
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5966386554621849,
|
101 |
-
"acc_stderr": 0.031866081214088314,
|
102 |
-
"acc_norm": 0.5966386554621849,
|
103 |
-
"acc_norm_stderr": 0.031866081214088314
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5923076923076923,
|
107 |
-
"acc_stderr": 0.024915243985987854,
|
108 |
-
"acc_norm": 0.5923076923076923,
|
109 |
-
"acc_norm_stderr": 0.024915243985987854
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.67,
|
113 |
-
"acc_stderr": 0.047258156262526094,
|
114 |
-
"acc_norm": 0.67,
|
115 |
-
"acc_norm_stderr": 0.047258156262526094
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.0446196043338474,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.0446196043338474
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6111111111111112,
|
125 |
-
"acc_stderr": 0.0471282125742677,
|
126 |
-
"acc_norm": 0.6111111111111112,
|
127 |
-
"acc_norm_stderr": 0.0471282125742677
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4088669950738916,
|
131 |
-
"acc_stderr": 0.03459058815883233,
|
132 |
-
"acc_norm": 0.4088669950738916,
|
133 |
-
"acc_norm_stderr": 0.03459058815883233
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.632258064516129,
|
137 |
-
"acc_stderr": 0.027430866579973467,
|
138 |
-
"acc_norm": 0.632258064516129,
|
139 |
-
"acc_norm_stderr": 0.027430866579973467
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7564102564102564,
|
143 |
-
"acc_stderr": 0.028120966503914394,
|
144 |
-
"acc_norm": 0.7564102564102564,
|
145 |
-
"acc_norm_stderr": 0.028120966503914394
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.569811320754717,
|
149 |
-
"acc_stderr": 0.030471445867183238,
|
150 |
-
"acc_norm": 0.569811320754717,
|
151 |
-
"acc_norm_stderr": 0.030471445867183238
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5818181818181818,
|
155 |
-
"acc_stderr": 0.047245774057315726,
|
156 |
-
"acc_norm": 0.5818181818181818,
|
157 |
-
"acc_norm_stderr": 0.047245774057315726
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.34444444444444444,
|
161 |
-
"acc_stderr": 0.028972648884844267,
|
162 |
-
"acc_norm": 0.34444444444444444,
|
163 |
-
"acc_norm_stderr": 0.028972648884844267
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.7114427860696517,
|
173 |
-
"acc_stderr": 0.03203841040213322,
|
174 |
-
"acc_norm": 0.7114427860696517,
|
175 |
-
"acc_norm_stderr": 0.03203841040213322
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5144508670520231,
|
179 |
-
"acc_stderr": 0.03810871630454764,
|
180 |
-
"acc_norm": 0.5144508670520231,
|
181 |
-
"acc_norm_stderr": 0.03810871630454764
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.4312169312169312,
|
185 |
-
"acc_stderr": 0.025506481698138208,
|
186 |
-
"acc_norm": 0.4312169312169312,
|
187 |
-
"acc_norm_stderr": 0.025506481698138208
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5347222222222222,
|
191 |
-
"acc_stderr": 0.04171115858181618,
|
192 |
-
"acc_norm": 0.5347222222222222,
|
193 |
-
"acc_norm_stderr": 0.04171115858181618
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.38,
|
197 |
-
"acc_stderr": 0.04878317312145633,
|
198 |
-
"acc_norm": 0.38,
|
199 |
-
"acc_norm_stderr": 0.04878317312145633
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.77,
|
203 |
-
"acc_stderr": 0.04229525846816507,
|
204 |
-
"acc_norm": 0.77,
|
205 |
-
"acc_norm_stderr": 0.04229525846816507
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5722543352601156,
|
209 |
-
"acc_stderr": 0.026636539741116072,
|
210 |
-
"acc_norm": 0.5722543352601156,
|
211 |
-
"acc_norm_stderr": 0.026636539741116072
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.588957055214724,
|
215 |
-
"acc_stderr": 0.038656978537853624,
|
216 |
-
"acc_norm": 0.588957055214724,
|
217 |
-
"acc_norm_stderr": 0.038656978537853624
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6358024691358025,
|
221 |
-
"acc_stderr": 0.02677492989972234,
|
222 |
-
"acc_norm": 0.6358024691358025,
|
223 |
-
"acc_norm_stderr": 0.02677492989972234
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.0479372485441102,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.0479372485441102
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.772020725388601,
|
233 |
-
"acc_stderr": 0.030276909945178256,
|
234 |
-
"acc_norm": 0.772020725388601,
|
235 |
-
"acc_norm_stderr": 0.030276909945178256
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.42105263157894735,
|
239 |
-
"acc_stderr": 0.046446020912223177,
|
240 |
-
"acc_norm": 0.42105263157894735,
|
241 |
-
"acc_norm_stderr": 0.046446020912223177
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7045871559633028,
|
245 |
-
"acc_stderr": 0.019560619182976,
|
246 |
-
"acc_norm": 0.7045871559633028,
|
247 |
-
"acc_norm_stderr": 0.019560619182976
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4365079365079365,
|
251 |
-
"acc_stderr": 0.04435932892851466,
|
252 |
-
"acc_norm": 0.4365079365079365,
|
253 |
-
"acc_norm_stderr": 0.04435932892851466
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5784313725490197,
|
257 |
-
"acc_stderr": 0.02827549015679145,
|
258 |
-
"acc_norm": 0.5784313725490197,
|
259 |
-
"acc_norm_stderr": 0.02827549015679145
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.64,
|
263 |
-
"acc_stderr": 0.048241815132442176,
|
264 |
-
"acc_norm": 0.64,
|
265 |
-
"acc_norm_stderr": 0.048241815132442176
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.768595041322314,
|
269 |
-
"acc_stderr": 0.03849856098794088,
|
270 |
-
"acc_norm": 0.768595041322314,
|
271 |
-
"acc_norm_stderr": 0.03849856098794088
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.6513157894736842,
|
275 |
-
"acc_stderr": 0.03878139888797611,
|
276 |
-
"acc_norm": 0.6513157894736842,
|
277 |
-
"acc_norm_stderr": 0.03878139888797611
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5408496732026143,
|
281 |
-
"acc_stderr": 0.020160213617222516,
|
282 |
-
"acc_norm": 0.5408496732026143,
|
283 |
-
"acc_norm_stderr": 0.020160213617222516
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.37943262411347517,
|
287 |
-
"acc_stderr": 0.028947338851614098,
|
288 |
-
"acc_norm": 0.37943262411347517,
|
289 |
-
"acc_norm_stderr": 0.028947338851614098
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.36607142857142855,
|
293 |
-
"acc_stderr": 0.0457237235873743,
|
294 |
-
"acc_norm": 0.36607142857142855,
|
295 |
-
"acc_norm_stderr": 0.0457237235873743
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5138888888888888,
|
299 |
-
"acc_stderr": 0.03408655867977748,
|
300 |
-
"acc_norm": 0.5138888888888888,
|
301 |
-
"acc_norm_stderr": 0.03408655867977748
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.32625698324022345,
|
305 |
-
"acc_stderr": 0.01568044151888918,
|
306 |
-
"acc_norm": 0.32625698324022345,
|
307 |
-
"acc_norm_stderr": 0.01568044151888918
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.48,
|
311 |
-
"acc_stderr": 0.05021167315686779,
|
312 |
-
"acc_norm": 0.48,
|
313 |
-
"acc_norm_stderr": 0.05021167315686779
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.71,
|
317 |
-
"acc_stderr": 0.045604802157206845,
|
318 |
-
"acc_norm": 0.71,
|
319 |
-
"acc_norm_stderr": 0.045604802157206845
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5477941176470589,
|
323 |
-
"acc_stderr": 0.03023375855159644,
|
324 |
-
"acc_norm": 0.5477941176470589,
|
325 |
-
"acc_norm_stderr": 0.03023375855159644
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6448979591836734,
|
329 |
-
"acc_stderr": 0.030635655150387638,
|
330 |
-
"acc_norm": 0.6448979591836734,
|
331 |
-
"acc_norm_stderr": 0.030635655150387638
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7257383966244726,
|
335 |
-
"acc_stderr": 0.029041333510598025,
|
336 |
-
"acc_norm": 0.7257383966244726,
|
337 |
-
"acc_norm_stderr": 0.029041333510598025
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.42242503259452413,
|
341 |
-
"acc_stderr": 0.01261560047573493,
|
342 |
-
"acc_norm": 0.42242503259452413,
|
343 |
-
"acc_norm_stderr": 0.01261560047573493
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6519607843137255,
|
347 |
-
"acc_stderr": 0.03343311240488418,
|
348 |
-
"acc_norm": 0.6519607843137255,
|
349 |
-
"acc_norm_stderr": 0.03343311240488418
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6666666666666666,
|
353 |
-
"acc_stderr": 0.036810508691615486,
|
354 |
-
"acc_norm": 0.6666666666666666,
|
355 |
-
"acc_norm_stderr": 0.036810508691615486
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.6903304773561811,
|
359 |
-
"mc1_stderr": 0.016185744355144922,
|
360 |
-
"mc2": 0.7746340762004879,
|
361 |
-
"mc2_stderr": 0.013932769514766515
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4982290436835891,
|
365 |
-
"acc_stderr": 0.017190246276231863,
|
366 |
-
"acc_norm": 0.51357733175915,
|
367 |
-
"acc_norm_stderr": 0.01718401506040145
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM2-FP-v1.0",
|
436 |
-
"model_sha": "290d9ccce695529a194958ce348f9d4182f7fe81",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM2-FP-v2.0/result_2024-05-12 04:44:25.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.6860068259385665,
|
5 |
-
"acc_stderr": 0.013562691224726297,
|
6 |
-
"acc_norm": 0.7303754266211604,
|
7 |
-
"acc_norm_stderr": 0.012968040686869159
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.522405895239992,
|
11 |
-
"acc_stderr": 0.004984768912326951,
|
12 |
-
"acc_norm": 0.6799442342162916,
|
13 |
-
"acc_norm_stderr": 0.004655442766599439
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.6374269005847953,
|
17 |
-
"acc_stderr": 0.0368713061556206,
|
18 |
-
"acc_norm": 0.6374269005847953,
|
19 |
-
"acc_norm_stderr": 0.0368713061556206
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.7378640776699029,
|
23 |
-
"acc_stderr": 0.043546310772605956,
|
24 |
-
"acc_norm": 0.7378640776699029,
|
25 |
-
"acc_norm_stderr": 0.043546310772605956
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.6845466155810983,
|
29 |
-
"acc_stderr": 0.0166175017387634,
|
30 |
-
"acc_norm": 0.6845466155810983,
|
31 |
-
"acc_norm_stderr": 0.0166175017387634
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.45185185185185184,
|
35 |
-
"acc_stderr": 0.04299268905480863,
|
36 |
-
"acc_norm": 0.45185185185185184,
|
37 |
-
"acc_norm_stderr": 0.04299268905480863
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.4,
|
41 |
-
"acc_stderr": 0.04923659639173309,
|
42 |
-
"acc_norm": 0.4,
|
43 |
-
"acc_norm_stderr": 0.04923659639173309
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.5106382978723404,
|
47 |
-
"acc_stderr": 0.03267862331014063,
|
48 |
-
"acc_norm": 0.5106382978723404,
|
49 |
-
"acc_norm_stderr": 0.03267862331014063
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4939759036144578,
|
53 |
-
"acc_stderr": 0.03892212195333047,
|
54 |
-
"acc_norm": 0.4939759036144578,
|
55 |
-
"acc_norm_stderr": 0.03892212195333047
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.6302250803858521,
|
59 |
-
"acc_stderr": 0.027417996705630998,
|
60 |
-
"acc_norm": 0.6302250803858521,
|
61 |
-
"acc_norm_stderr": 0.027417996705630998
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.6636771300448431,
|
65 |
-
"acc_stderr": 0.031708824268455,
|
66 |
-
"acc_norm": 0.6636771300448431,
|
67 |
-
"acc_norm_stderr": 0.031708824268455
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5419847328244275,
|
71 |
-
"acc_stderr": 0.04369802690578757,
|
72 |
-
"acc_norm": 0.5419847328244275,
|
73 |
-
"acc_norm_stderr": 0.04369802690578757
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.55,
|
77 |
-
"acc_stderr": 0.05,
|
78 |
-
"acc_norm": 0.55,
|
79 |
-
"acc_norm_stderr": 0.05
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.7171717171717171,
|
83 |
-
"acc_stderr": 0.03208779558786752,
|
84 |
-
"acc_norm": 0.7171717171717171,
|
85 |
-
"acc_norm_stderr": 0.03208779558786752
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5379310344827586,
|
89 |
-
"acc_stderr": 0.04154659671707548,
|
90 |
-
"acc_norm": 0.5379310344827586,
|
91 |
-
"acc_norm_stderr": 0.04154659671707548
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2647058823529412,
|
95 |
-
"acc_stderr": 0.04389869956808778,
|
96 |
-
"acc_norm": 0.2647058823529412,
|
97 |
-
"acc_norm_stderr": 0.04389869956808778
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.6092436974789915,
|
101 |
-
"acc_stderr": 0.031693802357129965,
|
102 |
-
"acc_norm": 0.6092436974789915,
|
103 |
-
"acc_norm_stderr": 0.031693802357129965
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.6128205128205129,
|
107 |
-
"acc_stderr": 0.024697216930878937,
|
108 |
-
"acc_norm": 0.6128205128205129,
|
109 |
-
"acc_norm_stderr": 0.024697216930878937
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.69,
|
113 |
-
"acc_stderr": 0.04648231987117316,
|
114 |
-
"acc_norm": 0.69,
|
115 |
-
"acc_norm_stderr": 0.04648231987117316
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.28,
|
119 |
-
"acc_stderr": 0.04512608598542128,
|
120 |
-
"acc_norm": 0.28,
|
121 |
-
"acc_norm_stderr": 0.04512608598542128
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.6018518518518519,
|
125 |
-
"acc_stderr": 0.04732332615978813,
|
126 |
-
"acc_norm": 0.6018518518518519,
|
127 |
-
"acc_norm_stderr": 0.04732332615978813
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.41379310344827586,
|
131 |
-
"acc_stderr": 0.034653044884067945,
|
132 |
-
"acc_norm": 0.41379310344827586,
|
133 |
-
"acc_norm_stderr": 0.034653044884067945
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.632258064516129,
|
137 |
-
"acc_stderr": 0.02743086657997347,
|
138 |
-
"acc_norm": 0.632258064516129,
|
139 |
-
"acc_norm_stderr": 0.02743086657997347
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7991452991452992,
|
143 |
-
"acc_stderr": 0.02624677294689048,
|
144 |
-
"acc_norm": 0.7991452991452992,
|
145 |
-
"acc_norm_stderr": 0.02624677294689048
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5773584905660377,
|
149 |
-
"acc_stderr": 0.03040233144576954,
|
150 |
-
"acc_norm": 0.5773584905660377,
|
151 |
-
"acc_norm_stderr": 0.03040233144576954
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5909090909090909,
|
155 |
-
"acc_stderr": 0.04709306978661895,
|
156 |
-
"acc_norm": 0.5909090909090909,
|
157 |
-
"acc_norm_stderr": 0.04709306978661895
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.34074074074074073,
|
161 |
-
"acc_stderr": 0.028897748741131147,
|
162 |
-
"acc_norm": 0.34074074074074073,
|
163 |
-
"acc_norm_stderr": 0.028897748741131147
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3576158940397351,
|
167 |
-
"acc_stderr": 0.03913453431177258,
|
168 |
-
"acc_norm": 0.3576158940397351,
|
169 |
-
"acc_norm_stderr": 0.03913453431177258
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.736318407960199,
|
173 |
-
"acc_stderr": 0.03115715086935556,
|
174 |
-
"acc_norm": 0.736318407960199,
|
175 |
-
"acc_norm_stderr": 0.03115715086935556
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.5549132947976878,
|
179 |
-
"acc_stderr": 0.03789401760283647,
|
180 |
-
"acc_norm": 0.5549132947976878,
|
181 |
-
"acc_norm_stderr": 0.03789401760283647
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.4021164021164021,
|
185 |
-
"acc_stderr": 0.02525303255499769,
|
186 |
-
"acc_norm": 0.4021164021164021,
|
187 |
-
"acc_norm_stderr": 0.02525303255499769
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.5416666666666666,
|
191 |
-
"acc_stderr": 0.04166666666666665,
|
192 |
-
"acc_norm": 0.5416666666666666,
|
193 |
-
"acc_norm_stderr": 0.04166666666666665
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001975,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001975
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.76,
|
203 |
-
"acc_stderr": 0.042923469599092816,
|
204 |
-
"acc_norm": 0.76,
|
205 |
-
"acc_norm_stderr": 0.042923469599092816
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5924855491329479,
|
209 |
-
"acc_stderr": 0.026454578146931505,
|
210 |
-
"acc_norm": 0.5924855491329479,
|
211 |
-
"acc_norm_stderr": 0.026454578146931505
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.6319018404907976,
|
215 |
-
"acc_stderr": 0.03789213935838396,
|
216 |
-
"acc_norm": 0.6319018404907976,
|
217 |
-
"acc_norm_stderr": 0.03789213935838396
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.6450617283950617,
|
221 |
-
"acc_stderr": 0.026624152478845853,
|
222 |
-
"acc_norm": 0.6450617283950617,
|
223 |
-
"acc_norm_stderr": 0.026624152478845853
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.39,
|
227 |
-
"acc_stderr": 0.04902071300001975,
|
228 |
-
"acc_norm": 0.39,
|
229 |
-
"acc_norm_stderr": 0.04902071300001975
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.772020725388601,
|
233 |
-
"acc_stderr": 0.030276909945178256,
|
234 |
-
"acc_norm": 0.772020725388601,
|
235 |
-
"acc_norm_stderr": 0.030276909945178256
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.43859649122807015,
|
239 |
-
"acc_stderr": 0.04668000738510455,
|
240 |
-
"acc_norm": 0.43859649122807015,
|
241 |
-
"acc_norm_stderr": 0.04668000738510455
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.7174311926605504,
|
245 |
-
"acc_stderr": 0.01930424349770715,
|
246 |
-
"acc_norm": 0.7174311926605504,
|
247 |
-
"acc_norm_stderr": 0.01930424349770715
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4523809523809524,
|
251 |
-
"acc_stderr": 0.044518079590553275,
|
252 |
-
"acc_norm": 0.4523809523809524,
|
253 |
-
"acc_norm_stderr": 0.044518079590553275
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.6111111111111112,
|
257 |
-
"acc_stderr": 0.027914055510468,
|
258 |
-
"acc_norm": 0.6111111111111112,
|
259 |
-
"acc_norm_stderr": 0.027914055510468
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.68,
|
263 |
-
"acc_stderr": 0.04688261722621505,
|
264 |
-
"acc_norm": 0.68,
|
265 |
-
"acc_norm_stderr": 0.04688261722621505
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.8099173553719008,
|
269 |
-
"acc_stderr": 0.03581796951709282,
|
270 |
-
"acc_norm": 0.8099173553719008,
|
271 |
-
"acc_norm_stderr": 0.03581796951709282
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.618421052631579,
|
275 |
-
"acc_stderr": 0.03953173377749194,
|
276 |
-
"acc_norm": 0.618421052631579,
|
277 |
-
"acc_norm_stderr": 0.03953173377749194
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.5506535947712419,
|
281 |
-
"acc_stderr": 0.020123766528027266,
|
282 |
-
"acc_norm": 0.5506535947712419,
|
283 |
-
"acc_norm_stderr": 0.020123766528027266
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.36879432624113473,
|
287 |
-
"acc_stderr": 0.028782227561347247,
|
288 |
-
"acc_norm": 0.36879432624113473,
|
289 |
-
"acc_norm_stderr": 0.028782227561347247
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4017857142857143,
|
293 |
-
"acc_stderr": 0.04653333146973646,
|
294 |
-
"acc_norm": 0.4017857142857143,
|
295 |
-
"acc_norm_stderr": 0.04653333146973646
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.5138888888888888,
|
299 |
-
"acc_stderr": 0.03408655867977748,
|
300 |
-
"acc_norm": 0.5138888888888888,
|
301 |
-
"acc_norm_stderr": 0.03408655867977748
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.30502793296089387,
|
305 |
-
"acc_stderr": 0.015398723510916715,
|
306 |
-
"acc_norm": 0.30502793296089387,
|
307 |
-
"acc_norm_stderr": 0.015398723510916715
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.52,
|
311 |
-
"acc_stderr": 0.050211673156867795,
|
312 |
-
"acc_norm": 0.52,
|
313 |
-
"acc_norm_stderr": 0.050211673156867795
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.67,
|
317 |
-
"acc_stderr": 0.047258156262526094,
|
318 |
-
"acc_norm": 0.67,
|
319 |
-
"acc_norm_stderr": 0.047258156262526094
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.5514705882352942,
|
323 |
-
"acc_stderr": 0.030211479609121596,
|
324 |
-
"acc_norm": 0.5514705882352942,
|
325 |
-
"acc_norm_stderr": 0.030211479609121596
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6938775510204082,
|
329 |
-
"acc_stderr": 0.02950489645459597,
|
330 |
-
"acc_norm": 0.6938775510204082,
|
331 |
-
"acc_norm_stderr": 0.02950489645459597
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.7172995780590717,
|
335 |
-
"acc_stderr": 0.029312814153955934,
|
336 |
-
"acc_norm": 0.7172995780590717,
|
337 |
-
"acc_norm_stderr": 0.029312814153955934
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.4348109517601043,
|
341 |
-
"acc_stderr": 0.01266123380561627,
|
342 |
-
"acc_norm": 0.4348109517601043,
|
343 |
-
"acc_norm_stderr": 0.01266123380561627
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6666666666666666,
|
347 |
-
"acc_stderr": 0.03308611113236437,
|
348 |
-
"acc_norm": 0.6666666666666666,
|
349 |
-
"acc_norm_stderr": 0.03308611113236437
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6666666666666666,
|
353 |
-
"acc_stderr": 0.036810508691615486,
|
354 |
-
"acc_norm": 0.6666666666666666,
|
355 |
-
"acc_norm_stderr": 0.036810508691615486
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.6181150550795593,
|
359 |
-
"mc1_stderr": 0.01700810193916349,
|
360 |
-
"mc2": 0.7245345695347405,
|
361 |
-
"mc2_stderr": 0.014413813713662276
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.51357733175915,
|
365 |
-
"acc_stderr": 0.01718401506040145,
|
366 |
-
"acc_norm": 0.5360094451003542,
|
367 |
-
"acc_norm_stderr": 0.017145715365486664
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM2-FP-v2.0",
|
436 |
-
"model_sha": "fbd1c247b84b94689838632148824c6cba4c645d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
T3Q-LLM/T3Q-LLM3-NC-v1.0/result_2024-05-09 08:08:07.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3225255972696246,
|
5 |
-
"acc_stderr": 0.013659980894277371,
|
6 |
-
"acc_norm": 0.34982935153583616,
|
7 |
-
"acc_norm_stderr": 0.01393680921215828
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3381796454889464,
|
11 |
-
"acc_stderr": 0.004721231637092728,
|
12 |
-
"acc_norm": 0.4182433778131846,
|
13 |
-
"acc_norm_stderr": 0.004922624636945241
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.391812865497076,
|
17 |
-
"acc_stderr": 0.037439798259264016,
|
18 |
-
"acc_norm": 0.391812865497076,
|
19 |
-
"acc_norm_stderr": 0.037439798259264016
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2912621359223301,
|
23 |
-
"acc_stderr": 0.04498676320572924,
|
24 |
-
"acc_norm": 0.2912621359223301,
|
25 |
-
"acc_norm_stderr": 0.04498676320572924
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3652618135376756,
|
29 |
-
"acc_stderr": 0.017218530028838636,
|
30 |
-
"acc_norm": 0.3652618135376756,
|
31 |
-
"acc_norm_stderr": 0.017218530028838636
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.35555555555555557,
|
35 |
-
"acc_stderr": 0.04135176749720386,
|
36 |
-
"acc_norm": 0.35555555555555557,
|
37 |
-
"acc_norm_stderr": 0.04135176749720386
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.24,
|
41 |
-
"acc_stderr": 0.04292346959909283,
|
42 |
-
"acc_norm": 0.24,
|
43 |
-
"acc_norm_stderr": 0.04292346959909283
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.28936170212765955,
|
47 |
-
"acc_stderr": 0.02964400657700962,
|
48 |
-
"acc_norm": 0.28936170212765955,
|
49 |
-
"acc_norm_stderr": 0.02964400657700962
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3132530120481928,
|
53 |
-
"acc_stderr": 0.03610805018031023,
|
54 |
-
"acc_norm": 0.3132530120481928,
|
55 |
-
"acc_norm_stderr": 0.03610805018031023
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3633440514469453,
|
59 |
-
"acc_stderr": 0.027316847674192717,
|
60 |
-
"acc_norm": 0.3633440514469453,
|
61 |
-
"acc_norm_stderr": 0.027316847674192717
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3901345291479821,
|
65 |
-
"acc_stderr": 0.03273766725459156,
|
66 |
-
"acc_norm": 0.3901345291479821,
|
67 |
-
"acc_norm_stderr": 0.03273766725459156
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3511450381679389,
|
71 |
-
"acc_stderr": 0.04186445163013751,
|
72 |
-
"acc_norm": 0.3511450381679389,
|
73 |
-
"acc_norm_stderr": 0.04186445163013751
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.38,
|
77 |
-
"acc_stderr": 0.048783173121456316,
|
78 |
-
"acc_norm": 0.38,
|
79 |
-
"acc_norm_stderr": 0.048783173121456316
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2676767676767677,
|
83 |
-
"acc_stderr": 0.031544498882702866,
|
84 |
-
"acc_norm": 0.2676767676767677,
|
85 |
-
"acc_norm_stderr": 0.031544498882702866
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3586206896551724,
|
89 |
-
"acc_stderr": 0.03996629574876718,
|
90 |
-
"acc_norm": 0.3586206896551724,
|
91 |
-
"acc_norm_stderr": 0.03996629574876718
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237657,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237657
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.31512605042016806,
|
101 |
-
"acc_stderr": 0.03017680828897434,
|
102 |
-
"acc_norm": 0.31512605042016806,
|
103 |
-
"acc_norm_stderr": 0.03017680828897434
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3,
|
107 |
-
"acc_stderr": 0.023234581088428498,
|
108 |
-
"acc_norm": 0.3,
|
109 |
-
"acc_norm_stderr": 0.023234581088428498
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.36,
|
113 |
-
"acc_stderr": 0.048241815132442176,
|
114 |
-
"acc_norm": 0.36,
|
115 |
-
"acc_norm_stderr": 0.048241815132442176
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.37962962962962965,
|
125 |
-
"acc_stderr": 0.04691521224077742,
|
126 |
-
"acc_norm": 0.37962962962962965,
|
127 |
-
"acc_norm_stderr": 0.04691521224077742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.27586206896551724,
|
131 |
-
"acc_stderr": 0.031447125816782405,
|
132 |
-
"acc_norm": 0.27586206896551724,
|
133 |
-
"acc_norm_stderr": 0.031447125816782405
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.32903225806451614,
|
137 |
-
"acc_stderr": 0.026729499068349965,
|
138 |
-
"acc_norm": 0.32903225806451614,
|
139 |
-
"acc_norm_stderr": 0.026729499068349965
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.49145299145299143,
|
143 |
-
"acc_stderr": 0.032751303000970296,
|
144 |
-
"acc_norm": 0.49145299145299143,
|
145 |
-
"acc_norm_stderr": 0.032751303000970296
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3169811320754717,
|
149 |
-
"acc_stderr": 0.02863723563980092,
|
150 |
-
"acc_norm": 0.3169811320754717,
|
151 |
-
"acc_norm_stderr": 0.02863723563980092
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.3181818181818182,
|
155 |
-
"acc_stderr": 0.04461272175910507,
|
156 |
-
"acc_norm": 0.3181818181818182,
|
157 |
-
"acc_norm_stderr": 0.04461272175910507
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.23703703703703705,
|
161 |
-
"acc_stderr": 0.025928876132766114,
|
162 |
-
"acc_norm": 0.23703703703703705,
|
163 |
-
"acc_norm_stderr": 0.025928876132766114
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526732,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526732
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.36318407960199006,
|
173 |
-
"acc_stderr": 0.034005985055990146,
|
174 |
-
"acc_norm": 0.36318407960199006,
|
175 |
-
"acc_norm_stderr": 0.034005985055990146
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.31213872832369943,
|
179 |
-
"acc_stderr": 0.035331333893236574,
|
180 |
-
"acc_norm": 0.31213872832369943,
|
181 |
-
"acc_norm_stderr": 0.035331333893236574
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2671957671957672,
|
185 |
-
"acc_stderr": 0.022789673145776564,
|
186 |
-
"acc_norm": 0.2671957671957672,
|
187 |
-
"acc_norm_stderr": 0.022789673145776564
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3125,
|
191 |
-
"acc_stderr": 0.038760854559127644,
|
192 |
-
"acc_norm": 0.3125,
|
193 |
-
"acc_norm_stderr": 0.038760854559127644
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.23,
|
197 |
-
"acc_stderr": 0.042295258468165065,
|
198 |
-
"acc_norm": 0.23,
|
199 |
-
"acc_norm_stderr": 0.042295258468165065
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.43,
|
203 |
-
"acc_stderr": 0.049756985195624284,
|
204 |
-
"acc_norm": 0.43,
|
205 |
-
"acc_norm_stderr": 0.049756985195624284
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.315028901734104,
|
209 |
-
"acc_stderr": 0.02500931379006971,
|
210 |
-
"acc_norm": 0.315028901734104,
|
211 |
-
"acc_norm_stderr": 0.02500931379006971
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3067484662576687,
|
215 |
-
"acc_stderr": 0.03623089915724147,
|
216 |
-
"acc_norm": 0.3067484662576687,
|
217 |
-
"acc_norm_stderr": 0.03623089915724147
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.345679012345679,
|
221 |
-
"acc_stderr": 0.026462487777001893,
|
222 |
-
"acc_norm": 0.345679012345679,
|
223 |
-
"acc_norm_stderr": 0.026462487777001893
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.04512608598542127,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.04512608598542127
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.32124352331606215,
|
233 |
-
"acc_stderr": 0.033699508685490674,
|
234 |
-
"acc_norm": 0.32124352331606215,
|
235 |
-
"acc_norm_stderr": 0.033699508685490674
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2807017543859649,
|
239 |
-
"acc_stderr": 0.04227054451232199,
|
240 |
-
"acc_norm": 0.2807017543859649,
|
241 |
-
"acc_norm_stderr": 0.04227054451232199
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.27889908256880735,
|
245 |
-
"acc_stderr": 0.01922746887646352,
|
246 |
-
"acc_norm": 0.27889908256880735,
|
247 |
-
"acc_norm_stderr": 0.01922746887646352
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.15873015873015872,
|
251 |
-
"acc_stderr": 0.03268454013011744,
|
252 |
-
"acc_norm": 0.15873015873015872,
|
253 |
-
"acc_norm_stderr": 0.03268454013011744
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2908496732026144,
|
257 |
-
"acc_stderr": 0.026004800363952113,
|
258 |
-
"acc_norm": 0.2908496732026144,
|
259 |
-
"acc_norm_stderr": 0.026004800363952113
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.4,
|
263 |
-
"acc_stderr": 0.049236596391733084,
|
264 |
-
"acc_norm": 0.4,
|
265 |
-
"acc_norm_stderr": 0.049236596391733084
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.4214876033057851,
|
269 |
-
"acc_stderr": 0.04507732278775094,
|
270 |
-
"acc_norm": 0.4214876033057851,
|
271 |
-
"acc_norm_stderr": 0.04507732278775094
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.2236842105263158,
|
275 |
-
"acc_stderr": 0.03391160934343602,
|
276 |
-
"acc_norm": 0.2236842105263158,
|
277 |
-
"acc_norm_stderr": 0.03391160934343602
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2973856209150327,
|
281 |
-
"acc_stderr": 0.018492596536396955,
|
282 |
-
"acc_norm": 0.2973856209150327,
|
283 |
-
"acc_norm_stderr": 0.018492596536396955
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3049645390070922,
|
287 |
-
"acc_stderr": 0.027464708442022128,
|
288 |
-
"acc_norm": 0.3049645390070922,
|
289 |
-
"acc_norm_stderr": 0.027464708442022128
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.3482142857142857,
|
293 |
-
"acc_stderr": 0.045218299028335865,
|
294 |
-
"acc_norm": 0.3482142857142857,
|
295 |
-
"acc_norm_stderr": 0.045218299028335865
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.17592592592592593,
|
299 |
-
"acc_stderr": 0.025967420958258533,
|
300 |
-
"acc_norm": 0.17592592592592593,
|
301 |
-
"acc_norm_stderr": 0.025967420958258533
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23016759776536314,
|
305 |
-
"acc_stderr": 0.014078339253425809,
|
306 |
-
"acc_norm": 0.23016759776536314,
|
307 |
-
"acc_norm_stderr": 0.014078339253425809
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.24,
|
311 |
-
"acc_stderr": 0.04292346959909282,
|
312 |
-
"acc_norm": 0.24,
|
313 |
-
"acc_norm_stderr": 0.04292346959909282
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.27,
|
317 |
-
"acc_stderr": 0.044619604333847394,
|
318 |
-
"acc_norm": 0.27,
|
319 |
-
"acc_norm_stderr": 0.044619604333847394
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.20588235294117646,
|
323 |
-
"acc_stderr": 0.024562204314142314,
|
324 |
-
"acc_norm": 0.20588235294117646,
|
325 |
-
"acc_norm_stderr": 0.024562204314142314
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.20816326530612245,
|
329 |
-
"acc_stderr": 0.025991117672813292,
|
330 |
-
"acc_norm": 0.20816326530612245,
|
331 |
-
"acc_norm_stderr": 0.025991117672813292
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4177215189873418,
|
335 |
-
"acc_stderr": 0.032103530322412685,
|
336 |
-
"acc_norm": 0.4177215189873418,
|
337 |
-
"acc_norm_stderr": 0.032103530322412685
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27183833116036504,
|
341 |
-
"acc_stderr": 0.011363135278651418,
|
342 |
-
"acc_norm": 0.27183833116036504,
|
343 |
-
"acc_norm_stderr": 0.011363135278651418
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2647058823529412,
|
347 |
-
"acc_stderr": 0.030964517926923403,
|
348 |
-
"acc_norm": 0.2647058823529412,
|
349 |
-
"acc_norm_stderr": 0.030964517926923403
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.28484848484848485,
|
353 |
-
"acc_stderr": 0.035243908445117836,
|
354 |
-
"acc_norm": 0.28484848484848485,
|
355 |
-
"acc_norm_stderr": 0.035243908445117836
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.28518971848225216,
|
359 |
-
"mc1_stderr": 0.015805827874454895,
|
360 |
-
"mc2": 0.4632805233224405,
|
361 |
-
"mc2_stderr": 0.01565468325038931
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.282172373081464,
|
365 |
-
"acc_stderr": 0.01547327158398843,
|
366 |
-
"acc_norm": 0.3482880755608028,
|
367 |
-
"acc_norm_stderr": 0.016379926739148037
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "T3Q-LLM/T3Q-LLM3-NC-v1.0",
|
436 |
-
"model_sha": "6e7affbc1ca332e68bac7425eeff26363bfa914f",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TIGER-Lab/MAmmoTH2-7B-Plus/result_2024-05-13 17:04:51.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3148464163822526,
|
5 |
-
"acc_stderr": 0.01357265770308495,
|
6 |
-
"acc_norm": 0.378839590443686,
|
7 |
-
"acc_norm_stderr": 0.014175915490000319
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3392750448117905,
|
11 |
-
"acc_stderr": 0.004724956665879975,
|
12 |
-
"acc_norm": 0.40928101971718783,
|
13 |
-
"acc_norm_stderr": 0.004906962980328287
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.39766081871345027,
|
17 |
-
"acc_stderr": 0.0375363895576169,
|
18 |
-
"acc_norm": 0.39766081871345027,
|
19 |
-
"acc_norm_stderr": 0.0375363895576169
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5339805825242718,
|
23 |
-
"acc_stderr": 0.04939291447273482,
|
24 |
-
"acc_norm": 0.5339805825242718,
|
25 |
-
"acc_norm_stderr": 0.04939291447273482
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.44699872286079184,
|
29 |
-
"acc_stderr": 0.01777922523339421,
|
30 |
-
"acc_norm": 0.44699872286079184,
|
31 |
-
"acc_norm_stderr": 0.01777922523339421
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.32592592592592595,
|
35 |
-
"acc_stderr": 0.040491220417025055,
|
36 |
-
"acc_norm": 0.32592592592592595,
|
37 |
-
"acc_norm_stderr": 0.040491220417025055
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.31,
|
41 |
-
"acc_stderr": 0.04648231987117316,
|
42 |
-
"acc_norm": 0.31,
|
43 |
-
"acc_norm_stderr": 0.04648231987117316
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.40425531914893614,
|
47 |
-
"acc_stderr": 0.03208115750788684,
|
48 |
-
"acc_norm": 0.40425531914893614,
|
49 |
-
"acc_norm_stderr": 0.03208115750788684
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.40963855421686746,
|
53 |
-
"acc_stderr": 0.03828401115079022,
|
54 |
-
"acc_norm": 0.40963855421686746,
|
55 |
-
"acc_norm_stderr": 0.03828401115079022
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.44694533762057875,
|
59 |
-
"acc_stderr": 0.02823776942208533,
|
60 |
-
"acc_norm": 0.44694533762057875,
|
61 |
-
"acc_norm_stderr": 0.02823776942208533
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4663677130044843,
|
65 |
-
"acc_stderr": 0.033481800170603065,
|
66 |
-
"acc_norm": 0.4663677130044843,
|
67 |
-
"acc_norm_stderr": 0.033481800170603065
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4198473282442748,
|
71 |
-
"acc_stderr": 0.043285772152629715,
|
72 |
-
"acc_norm": 0.4198473282442748,
|
73 |
-
"acc_norm_stderr": 0.043285772152629715
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.39,
|
77 |
-
"acc_stderr": 0.04902071300001974,
|
78 |
-
"acc_norm": 0.39,
|
79 |
-
"acc_norm_stderr": 0.04902071300001974
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5353535353535354,
|
83 |
-
"acc_stderr": 0.035534363688280626,
|
84 |
-
"acc_norm": 0.5353535353535354,
|
85 |
-
"acc_norm_stderr": 0.035534363688280626
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2549019607843137,
|
95 |
-
"acc_stderr": 0.043364327079931785,
|
96 |
-
"acc_norm": 0.2549019607843137,
|
97 |
-
"acc_norm_stderr": 0.043364327079931785
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.592436974789916,
|
101 |
-
"acc_stderr": 0.03191863374478465,
|
102 |
-
"acc_norm": 0.592436974789916,
|
103 |
-
"acc_norm_stderr": 0.03191863374478465
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.43333333333333335,
|
107 |
-
"acc_stderr": 0.02512465352588513,
|
108 |
-
"acc_norm": 0.43333333333333335,
|
109 |
-
"acc_norm_stderr": 0.02512465352588513
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.6,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.6,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.24,
|
119 |
-
"acc_stderr": 0.042923469599092816,
|
120 |
-
"acc_norm": 0.24,
|
121 |
-
"acc_norm_stderr": 0.042923469599092816
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5092592592592593,
|
125 |
-
"acc_stderr": 0.04832853553437056,
|
126 |
-
"acc_norm": 0.5092592592592593,
|
127 |
-
"acc_norm_stderr": 0.04832853553437056
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.39408866995073893,
|
131 |
-
"acc_stderr": 0.03438157967036543,
|
132 |
-
"acc_norm": 0.39408866995073893,
|
133 |
-
"acc_norm_stderr": 0.03438157967036543
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4161290322580645,
|
137 |
-
"acc_stderr": 0.028040981380761554,
|
138 |
-
"acc_norm": 0.4161290322580645,
|
139 |
-
"acc_norm_stderr": 0.028040981380761554
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6965811965811965,
|
143 |
-
"acc_stderr": 0.03011821010694263,
|
144 |
-
"acc_norm": 0.6965811965811965,
|
145 |
-
"acc_norm_stderr": 0.03011821010694263
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.49056603773584906,
|
149 |
-
"acc_stderr": 0.03076739470780808,
|
150 |
-
"acc_norm": 0.49056603773584906,
|
151 |
-
"acc_norm_stderr": 0.03076739470780808
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5,
|
155 |
-
"acc_stderr": 0.04789131426105757,
|
156 |
-
"acc_norm": 0.5,
|
157 |
-
"acc_norm_stderr": 0.04789131426105757
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.31851851851851853,
|
161 |
-
"acc_stderr": 0.028406533090608463,
|
162 |
-
"acc_norm": 0.31851851851851853,
|
163 |
-
"acc_norm_stderr": 0.028406533090608463
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3509933774834437,
|
167 |
-
"acc_stderr": 0.03896981964257374,
|
168 |
-
"acc_norm": 0.3509933774834437,
|
169 |
-
"acc_norm_stderr": 0.03896981964257374
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5522388059701493,
|
173 |
-
"acc_stderr": 0.03516184772952167,
|
174 |
-
"acc_norm": 0.5522388059701493,
|
175 |
-
"acc_norm_stderr": 0.03516184772952167
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3583815028901734,
|
179 |
-
"acc_stderr": 0.036563436533531585,
|
180 |
-
"acc_norm": 0.3583815028901734,
|
181 |
-
"acc_norm_stderr": 0.036563436533531585
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3994708994708995,
|
185 |
-
"acc_stderr": 0.025225450284067873,
|
186 |
-
"acc_norm": 0.3994708994708995,
|
187 |
-
"acc_norm_stderr": 0.025225450284067873
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3680555555555556,
|
191 |
-
"acc_stderr": 0.040329990539607195,
|
192 |
-
"acc_norm": 0.3680555555555556,
|
193 |
-
"acc_norm_stderr": 0.040329990539607195
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.04512608598542128,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.04512608598542128
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.55,
|
203 |
-
"acc_stderr": 0.05,
|
204 |
-
"acc_norm": 0.55,
|
205 |
-
"acc_norm_stderr": 0.05
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.48554913294797686,
|
209 |
-
"acc_stderr": 0.026907849856282532,
|
210 |
-
"acc_norm": 0.48554913294797686,
|
211 |
-
"acc_norm_stderr": 0.026907849856282532
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5030674846625767,
|
215 |
-
"acc_stderr": 0.03928297078179663,
|
216 |
-
"acc_norm": 0.5030674846625767,
|
217 |
-
"acc_norm_stderr": 0.03928297078179663
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4228395061728395,
|
221 |
-
"acc_stderr": 0.027487472980871598,
|
222 |
-
"acc_norm": 0.4228395061728395,
|
223 |
-
"acc_norm_stderr": 0.027487472980871598
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.047937248544110175,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.047937248544110175
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.42487046632124353,
|
233 |
-
"acc_stderr": 0.035674713352125395,
|
234 |
-
"acc_norm": 0.42487046632124353,
|
235 |
-
"acc_norm_stderr": 0.035674713352125395
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3157894736842105,
|
239 |
-
"acc_stderr": 0.04372748290278007,
|
240 |
-
"acc_norm": 0.3157894736842105,
|
241 |
-
"acc_norm_stderr": 0.04372748290278007
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.43853211009174314,
|
245 |
-
"acc_stderr": 0.021274713073954565,
|
246 |
-
"acc_norm": 0.43853211009174314,
|
247 |
-
"acc_norm_stderr": 0.021274713073954565
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.42857142857142855,
|
251 |
-
"acc_stderr": 0.0442626668137991,
|
252 |
-
"acc_norm": 0.42857142857142855,
|
253 |
-
"acc_norm_stderr": 0.0442626668137991
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.47058823529411764,
|
257 |
-
"acc_stderr": 0.028580341065138296,
|
258 |
-
"acc_norm": 0.47058823529411764,
|
259 |
-
"acc_norm_stderr": 0.028580341065138296
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.46,
|
263 |
-
"acc_stderr": 0.05009082659620332,
|
264 |
-
"acc_norm": 0.46,
|
265 |
-
"acc_norm_stderr": 0.05009082659620332
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6115702479338843,
|
269 |
-
"acc_stderr": 0.044492703500683815,
|
270 |
-
"acc_norm": 0.6115702479338843,
|
271 |
-
"acc_norm_stderr": 0.044492703500683815
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3881578947368421,
|
275 |
-
"acc_stderr": 0.03965842097512744,
|
276 |
-
"acc_norm": 0.3881578947368421,
|
277 |
-
"acc_norm_stderr": 0.03965842097512744
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3660130718954248,
|
281 |
-
"acc_stderr": 0.01948802574552966,
|
282 |
-
"acc_norm": 0.3660130718954248,
|
283 |
-
"acc_norm_stderr": 0.01948802574552966
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3971631205673759,
|
287 |
-
"acc_stderr": 0.029189805673587088,
|
288 |
-
"acc_norm": 0.3971631205673759,
|
289 |
-
"acc_norm_stderr": 0.029189805673587088
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.4017857142857143,
|
293 |
-
"acc_stderr": 0.04653333146973646,
|
294 |
-
"acc_norm": 0.4017857142857143,
|
295 |
-
"acc_norm_stderr": 0.04653333146973646
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.37962962962962965,
|
299 |
-
"acc_stderr": 0.03309682581119035,
|
300 |
-
"acc_norm": 0.37962962962962965,
|
301 |
-
"acc_norm_stderr": 0.03309682581119035
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23016759776536314,
|
305 |
-
"acc_stderr": 0.014078339253425819,
|
306 |
-
"acc_norm": 0.23016759776536314,
|
307 |
-
"acc_norm_stderr": 0.014078339253425819
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.37,
|
311 |
-
"acc_stderr": 0.048523658709391,
|
312 |
-
"acc_norm": 0.37,
|
313 |
-
"acc_norm_stderr": 0.048523658709391
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.6,
|
317 |
-
"acc_stderr": 0.04923659639173309,
|
318 |
-
"acc_norm": 0.6,
|
319 |
-
"acc_norm_stderr": 0.04923659639173309
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.26838235294117646,
|
323 |
-
"acc_stderr": 0.0269174812243772,
|
324 |
-
"acc_norm": 0.26838235294117646,
|
325 |
-
"acc_norm_stderr": 0.0269174812243772
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5265306122448979,
|
329 |
-
"acc_stderr": 0.031964127345232726,
|
330 |
-
"acc_norm": 0.5265306122448979,
|
331 |
-
"acc_norm_stderr": 0.031964127345232726
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.569620253164557,
|
335 |
-
"acc_stderr": 0.03223017195937599,
|
336 |
-
"acc_norm": 0.569620253164557,
|
337 |
-
"acc_norm_stderr": 0.03223017195937599
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.32073011734028684,
|
341 |
-
"acc_stderr": 0.01192119999178262,
|
342 |
-
"acc_norm": 0.32073011734028684,
|
343 |
-
"acc_norm_stderr": 0.01192119999178262
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3137254901960784,
|
347 |
-
"acc_stderr": 0.032566854844603886,
|
348 |
-
"acc_norm": 0.3137254901960784,
|
349 |
-
"acc_norm_stderr": 0.032566854844603886
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3393939393939394,
|
353 |
-
"acc_stderr": 0.03697442205031595,
|
354 |
-
"acc_norm": 0.3393939393939394,
|
355 |
-
"acc_norm_stderr": 0.03697442205031595
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3157894736842105,
|
359 |
-
"mc1_stderr": 0.01627228795791693,
|
360 |
-
"mc2": 0.5013882170125173,
|
361 |
-
"mc2_stderr": 0.016079558760905017
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3907910271546635,
|
365 |
-
"acc_stderr": 0.016775298465108245,
|
366 |
-
"acc_norm": 0.42621015348288077,
|
367 |
-
"acc_norm_stderr": 0.017002122609489263
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TIGER-Lab/MAmmoTH2-7B-Plus",
|
436 |
-
"model_sha": "dd37ba4789b04ba59116fa9b0b90bcc0c5f3df15",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TIGER-Lab/MAmmoTH2-8B-Plus/result_2024-05-13 17:04:55.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3873720136518771,
|
5 |
-
"acc_stderr": 0.014235872487909869,
|
6 |
-
"acc_norm": 0.44795221843003413,
|
7 |
-
"acc_norm_stderr": 0.014532011498211672
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3767177853017327,
|
11 |
-
"acc_stderr": 0.004835728903731406,
|
12 |
-
"acc_norm": 0.4856602270464051,
|
13 |
-
"acc_norm_stderr": 0.004987728900897592
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5380116959064327,
|
17 |
-
"acc_stderr": 0.038237270928823064,
|
18 |
-
"acc_norm": 0.5380116959064327,
|
19 |
-
"acc_norm_stderr": 0.038237270928823064
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6213592233009708,
|
23 |
-
"acc_stderr": 0.048026946982589726,
|
24 |
-
"acc_norm": 0.6213592233009708,
|
25 |
-
"acc_norm_stderr": 0.048026946982589726
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5146871008939975,
|
29 |
-
"acc_stderr": 0.01787224802442913,
|
30 |
-
"acc_norm": 0.5146871008939975,
|
31 |
-
"acc_norm_stderr": 0.01787224802442913
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3851851851851852,
|
35 |
-
"acc_stderr": 0.042039210401562783,
|
36 |
-
"acc_norm": 0.3851851851851852,
|
37 |
-
"acc_norm_stderr": 0.042039210401562783
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.46382978723404256,
|
47 |
-
"acc_stderr": 0.03260038511835772,
|
48 |
-
"acc_norm": 0.46382978723404256,
|
49 |
-
"acc_norm_stderr": 0.03260038511835772
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3855421686746988,
|
53 |
-
"acc_stderr": 0.037891344246115496,
|
54 |
-
"acc_norm": 0.3855421686746988,
|
55 |
-
"acc_norm_stderr": 0.037891344246115496
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5080385852090032,
|
59 |
-
"acc_stderr": 0.02839442137098453,
|
60 |
-
"acc_norm": 0.5080385852090032,
|
61 |
-
"acc_norm_stderr": 0.02839442137098453
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5201793721973094,
|
65 |
-
"acc_stderr": 0.033530461674123005,
|
66 |
-
"acc_norm": 0.5201793721973094,
|
67 |
-
"acc_norm_stderr": 0.033530461674123005
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4198473282442748,
|
71 |
-
"acc_stderr": 0.04328577215262972,
|
72 |
-
"acc_norm": 0.4198473282442748,
|
73 |
-
"acc_norm_stderr": 0.04328577215262972
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.41,
|
77 |
-
"acc_stderr": 0.049431107042371025,
|
78 |
-
"acc_norm": 0.41,
|
79 |
-
"acc_norm_stderr": 0.049431107042371025
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5555555555555556,
|
83 |
-
"acc_stderr": 0.03540294377095368,
|
84 |
-
"acc_norm": 0.5555555555555556,
|
85 |
-
"acc_norm_stderr": 0.03540294377095368
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.28431372549019607,
|
95 |
-
"acc_stderr": 0.04488482852329017,
|
96 |
-
"acc_norm": 0.28431372549019607,
|
97 |
-
"acc_norm_stderr": 0.04488482852329017
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5462184873949579,
|
101 |
-
"acc_stderr": 0.032339434681820885,
|
102 |
-
"acc_norm": 0.5462184873949579,
|
103 |
-
"acc_norm_stderr": 0.032339434681820885
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.47692307692307695,
|
107 |
-
"acc_stderr": 0.025323990861736125,
|
108 |
-
"acc_norm": 0.47692307692307695,
|
109 |
-
"acc_norm_stderr": 0.025323990861736125
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.52,
|
113 |
-
"acc_stderr": 0.05021167315686779,
|
114 |
-
"acc_norm": 0.52,
|
115 |
-
"acc_norm_stderr": 0.05021167315686779
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.28,
|
119 |
-
"acc_stderr": 0.04512608598542128,
|
120 |
-
"acc_norm": 0.28,
|
121 |
-
"acc_norm_stderr": 0.04512608598542128
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5277777777777778,
|
125 |
-
"acc_stderr": 0.04826217294139894,
|
126 |
-
"acc_norm": 0.5277777777777778,
|
127 |
-
"acc_norm_stderr": 0.04826217294139894
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4482758620689655,
|
131 |
-
"acc_stderr": 0.03499113137676744,
|
132 |
-
"acc_norm": 0.4482758620689655,
|
133 |
-
"acc_norm_stderr": 0.03499113137676744
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5,
|
137 |
-
"acc_stderr": 0.028444006199428714,
|
138 |
-
"acc_norm": 0.5,
|
139 |
-
"acc_norm_stderr": 0.028444006199428714
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7393162393162394,
|
143 |
-
"acc_stderr": 0.02876034895652341,
|
144 |
-
"acc_norm": 0.7393162393162394,
|
145 |
-
"acc_norm_stderr": 0.02876034895652341
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.5132075471698113,
|
149 |
-
"acc_stderr": 0.030762134874500476,
|
150 |
-
"acc_norm": 0.5132075471698113,
|
151 |
-
"acc_norm_stderr": 0.030762134874500476
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5181818181818182,
|
155 |
-
"acc_stderr": 0.04785964010794916,
|
156 |
-
"acc_norm": 0.5181818181818182,
|
157 |
-
"acc_norm_stderr": 0.04785964010794916
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.36666666666666664,
|
161 |
-
"acc_stderr": 0.029381620726465076,
|
162 |
-
"acc_norm": 0.36666666666666664,
|
163 |
-
"acc_norm_stderr": 0.029381620726465076
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3509933774834437,
|
167 |
-
"acc_stderr": 0.03896981964257375,
|
168 |
-
"acc_norm": 0.3509933774834437,
|
169 |
-
"acc_norm_stderr": 0.03896981964257375
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5671641791044776,
|
173 |
-
"acc_stderr": 0.03503490923673281,
|
174 |
-
"acc_norm": 0.5671641791044776,
|
175 |
-
"acc_norm_stderr": 0.03503490923673281
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.42196531791907516,
|
179 |
-
"acc_stderr": 0.03765746693865151,
|
180 |
-
"acc_norm": 0.42196531791907516,
|
181 |
-
"acc_norm_stderr": 0.03765746693865151
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.455026455026455,
|
185 |
-
"acc_stderr": 0.02564692836104939,
|
186 |
-
"acc_norm": 0.455026455026455,
|
187 |
-
"acc_norm_stderr": 0.02564692836104939
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4513888888888889,
|
191 |
-
"acc_stderr": 0.04161402398403279,
|
192 |
-
"acc_norm": 0.4513888888888889,
|
193 |
-
"acc_norm_stderr": 0.04161402398403279
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.33,
|
197 |
-
"acc_stderr": 0.04725815626252604,
|
198 |
-
"acc_norm": 0.33,
|
199 |
-
"acc_norm_stderr": 0.04725815626252604
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.65,
|
203 |
-
"acc_stderr": 0.047937248544110175,
|
204 |
-
"acc_norm": 0.65,
|
205 |
-
"acc_norm_stderr": 0.047937248544110175
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5057803468208093,
|
209 |
-
"acc_stderr": 0.026917296179149123,
|
210 |
-
"acc_norm": 0.5057803468208093,
|
211 |
-
"acc_norm_stderr": 0.026917296179149123
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.49079754601226994,
|
215 |
-
"acc_stderr": 0.039277056007874414,
|
216 |
-
"acc_norm": 0.49079754601226994,
|
217 |
-
"acc_norm_stderr": 0.039277056007874414
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5246913580246914,
|
221 |
-
"acc_stderr": 0.02778680093142745,
|
222 |
-
"acc_norm": 0.5246913580246914,
|
223 |
-
"acc_norm_stderr": 0.02778680093142745
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.047937248544110196,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.047937248544110196
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5544041450777202,
|
233 |
-
"acc_stderr": 0.03587014986075658,
|
234 |
-
"acc_norm": 0.5544041450777202,
|
235 |
-
"acc_norm_stderr": 0.03587014986075658
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3508771929824561,
|
239 |
-
"acc_stderr": 0.044895393502706986,
|
240 |
-
"acc_norm": 0.3508771929824561,
|
241 |
-
"acc_norm_stderr": 0.044895393502706986
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5944954128440367,
|
245 |
-
"acc_stderr": 0.02105099799189684,
|
246 |
-
"acc_norm": 0.5944954128440367,
|
247 |
-
"acc_norm_stderr": 0.02105099799189684
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.40476190476190477,
|
251 |
-
"acc_stderr": 0.04390259265377561,
|
252 |
-
"acc_norm": 0.40476190476190477,
|
253 |
-
"acc_norm_stderr": 0.04390259265377561
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5,
|
257 |
-
"acc_stderr": 0.028629916715693413,
|
258 |
-
"acc_norm": 0.5,
|
259 |
-
"acc_norm_stderr": 0.028629916715693413
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.58,
|
263 |
-
"acc_stderr": 0.049604496374885836,
|
264 |
-
"acc_norm": 0.58,
|
265 |
-
"acc_norm_stderr": 0.049604496374885836
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6363636363636364,
|
269 |
-
"acc_stderr": 0.043913262867240704,
|
270 |
-
"acc_norm": 0.6363636363636364,
|
271 |
-
"acc_norm_stderr": 0.043913262867240704
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4868421052631579,
|
275 |
-
"acc_stderr": 0.04067533136309174,
|
276 |
-
"acc_norm": 0.4868421052631579,
|
277 |
-
"acc_norm_stderr": 0.04067533136309174
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3888888888888889,
|
281 |
-
"acc_stderr": 0.019722058939618065,
|
282 |
-
"acc_norm": 0.3888888888888889,
|
283 |
-
"acc_norm_stderr": 0.019722058939618065
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3404255319148936,
|
287 |
-
"acc_stderr": 0.028267657482650133,
|
288 |
-
"acc_norm": 0.3404255319148936,
|
289 |
-
"acc_norm_stderr": 0.028267657482650133
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.375,
|
293 |
-
"acc_stderr": 0.04595091388086298,
|
294 |
-
"acc_norm": 0.375,
|
295 |
-
"acc_norm_stderr": 0.04595091388086298
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4537037037037037,
|
299 |
-
"acc_stderr": 0.033953227263757976,
|
300 |
-
"acc_norm": 0.4537037037037037,
|
301 |
-
"acc_norm_stderr": 0.033953227263757976
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.19776536312849163,
|
305 |
-
"acc_stderr": 0.013321620594050948,
|
306 |
-
"acc_norm": 0.19776536312849163,
|
307 |
-
"acc_norm_stderr": 0.013321620594050948
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.42,
|
311 |
-
"acc_stderr": 0.049604496374885836,
|
312 |
-
"acc_norm": 0.42,
|
313 |
-
"acc_norm_stderr": 0.049604496374885836
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.58,
|
317 |
-
"acc_stderr": 0.049604496374885836,
|
318 |
-
"acc_norm": 0.58,
|
319 |
-
"acc_norm_stderr": 0.049604496374885836
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.35294117647058826,
|
323 |
-
"acc_stderr": 0.0290294228156814,
|
324 |
-
"acc_norm": 0.35294117647058826,
|
325 |
-
"acc_norm_stderr": 0.0290294228156814
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5551020408163265,
|
329 |
-
"acc_stderr": 0.031814251181977865,
|
330 |
-
"acc_norm": 0.5551020408163265,
|
331 |
-
"acc_norm_stderr": 0.031814251181977865
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6582278481012658,
|
335 |
-
"acc_stderr": 0.030874537537553617,
|
336 |
-
"acc_norm": 0.6582278481012658,
|
337 |
-
"acc_norm_stderr": 0.030874537537553617
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.35919165580182527,
|
341 |
-
"acc_stderr": 0.012253386187584259,
|
342 |
-
"acc_norm": 0.35919165580182527,
|
343 |
-
"acc_norm_stderr": 0.012253386187584259
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.6029411764705882,
|
347 |
-
"acc_stderr": 0.03434131164719131,
|
348 |
-
"acc_norm": 0.6029411764705882,
|
349 |
-
"acc_norm_stderr": 0.03434131164719131
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6181818181818182,
|
353 |
-
"acc_stderr": 0.03793713171165633,
|
354 |
-
"acc_norm": 0.6181818181818182,
|
355 |
-
"acc_norm_stderr": 0.03793713171165633
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3353733170134639,
|
359 |
-
"mc1_stderr": 0.016527534039668987,
|
360 |
-
"mc2": 0.49886776378301484,
|
361 |
-
"mc2_stderr": 0.01588273337036278
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5076741440377804,
|
365 |
-
"acc_stderr": 0.01718832921965428,
|
366 |
-
"acc_norm": 0.5442739079102715,
|
367 |
-
"acc_norm_stderr": 0.01712282914329265
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TIGER-Lab/MAmmoTH2-8B-Plus",
|
436 |
-
"model_sha": "a49b8a9dc80a42745201002ecbfeac12f4c696dc",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3370307167235495,
|
5 |
-
"acc_stderr": 0.013813476652902279,
|
6 |
-
"acc_norm": 0.38993174061433444,
|
7 |
-
"acc_norm_stderr": 0.014252959848892893
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3997211710814579,
|
11 |
-
"acc_stderr": 0.004888398535520494,
|
12 |
-
"acc_norm": 0.5370444134634534,
|
13 |
-
"acc_norm_stderr": 0.0049760677264325615
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.3391812865497076,
|
17 |
-
"acc_stderr": 0.036310534964889056,
|
18 |
-
"acc_norm": 0.3391812865497076,
|
19 |
-
"acc_norm_stderr": 0.036310534964889056
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2912621359223301,
|
23 |
-
"acc_stderr": 0.044986763205729245,
|
24 |
-
"acc_norm": 0.2912621359223301,
|
25 |
-
"acc_norm_stderr": 0.044986763205729245
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3665389527458493,
|
29 |
-
"acc_stderr": 0.01723124462679703,
|
30 |
-
"acc_norm": 0.3665389527458493,
|
31 |
-
"acc_norm_stderr": 0.01723124462679703
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34814814814814815,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.34814814814814815,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.34,
|
41 |
-
"acc_stderr": 0.04760952285695236,
|
42 |
-
"acc_norm": 0.34,
|
43 |
-
"acc_norm_stderr": 0.04760952285695236
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.33191489361702126,
|
47 |
-
"acc_stderr": 0.030783736757745664,
|
48 |
-
"acc_norm": 0.33191489361702126,
|
49 |
-
"acc_norm_stderr": 0.030783736757745664
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.25903614457831325,
|
53 |
-
"acc_stderr": 0.03410646614071857,
|
54 |
-
"acc_norm": 0.25903614457831325,
|
55 |
-
"acc_norm_stderr": 0.03410646614071857
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.34726688102893893,
|
59 |
-
"acc_stderr": 0.027040745502307333,
|
60 |
-
"acc_norm": 0.34726688102893893,
|
61 |
-
"acc_norm_stderr": 0.027040745502307333
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3721973094170404,
|
65 |
-
"acc_stderr": 0.032443052830087304,
|
66 |
-
"acc_norm": 0.3721973094170404,
|
67 |
-
"acc_norm_stderr": 0.032443052830087304
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.32061068702290074,
|
71 |
-
"acc_stderr": 0.040933292298342784,
|
72 |
-
"acc_norm": 0.32061068702290074,
|
73 |
-
"acc_norm_stderr": 0.040933292298342784
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.33,
|
77 |
-
"acc_stderr": 0.047258156262526045,
|
78 |
-
"acc_norm": 0.33,
|
79 |
-
"acc_norm_stderr": 0.047258156262526045
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.29797979797979796,
|
83 |
-
"acc_stderr": 0.03258630383836554,
|
84 |
-
"acc_norm": 0.29797979797979796,
|
85 |
-
"acc_norm_stderr": 0.03258630383836554
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.31724137931034485,
|
89 |
-
"acc_stderr": 0.038783523721386215,
|
90 |
-
"acc_norm": 0.31724137931034485,
|
91 |
-
"acc_norm_stderr": 0.038783523721386215
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.24369747899159663,
|
101 |
-
"acc_stderr": 0.027886828078380558,
|
102 |
-
"acc_norm": 0.24369747899159663,
|
103 |
-
"acc_norm_stderr": 0.027886828078380558
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2230769230769231,
|
107 |
-
"acc_stderr": 0.02110773012724398,
|
108 |
-
"acc_norm": 0.2230769230769231,
|
109 |
-
"acc_norm_stderr": 0.02110773012724398
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.39,
|
113 |
-
"acc_stderr": 0.04902071300001975,
|
114 |
-
"acc_norm": 0.39,
|
115 |
-
"acc_norm_stderr": 0.04902071300001975
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2777777777777778,
|
125 |
-
"acc_stderr": 0.04330043749650742,
|
126 |
-
"acc_norm": 0.2777777777777778,
|
127 |
-
"acc_norm_stderr": 0.04330043749650742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.35960591133004927,
|
131 |
-
"acc_stderr": 0.03376458246509567,
|
132 |
-
"acc_norm": 0.35960591133004927,
|
133 |
-
"acc_norm_stderr": 0.03376458246509567
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3258064516129032,
|
137 |
-
"acc_stderr": 0.026662010578567104,
|
138 |
-
"acc_norm": 0.3258064516129032,
|
139 |
-
"acc_norm_stderr": 0.026662010578567104
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.4230769230769231,
|
143 |
-
"acc_stderr": 0.032366121762202014,
|
144 |
-
"acc_norm": 0.4230769230769231,
|
145 |
-
"acc_norm_stderr": 0.032366121762202014
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.27169811320754716,
|
149 |
-
"acc_stderr": 0.027377706624670713,
|
150 |
-
"acc_norm": 0.27169811320754716,
|
151 |
-
"acc_norm_stderr": 0.027377706624670713
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.36363636363636365,
|
155 |
-
"acc_stderr": 0.04607582090719976,
|
156 |
-
"acc_norm": 0.36363636363636365,
|
157 |
-
"acc_norm_stderr": 0.04607582090719976
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.27037037037037037,
|
161 |
-
"acc_stderr": 0.027080372815145668,
|
162 |
-
"acc_norm": 0.27037037037037037,
|
163 |
-
"acc_norm_stderr": 0.027080372815145668
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.03658603262763743,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.03658603262763743
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.34328358208955223,
|
173 |
-
"acc_stderr": 0.03357379665433431,
|
174 |
-
"acc_norm": 0.34328358208955223,
|
175 |
-
"acc_norm_stderr": 0.03357379665433431
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3063583815028902,
|
179 |
-
"acc_stderr": 0.035149425512674394,
|
180 |
-
"acc_norm": 0.3063583815028902,
|
181 |
-
"acc_norm_stderr": 0.035149425512674394
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25925925925925924,
|
185 |
-
"acc_stderr": 0.022569897074918407,
|
186 |
-
"acc_norm": 0.25925925925925924,
|
187 |
-
"acc_norm_stderr": 0.022569897074918407
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2708333333333333,
|
191 |
-
"acc_stderr": 0.037161774375660164,
|
192 |
-
"acc_norm": 0.2708333333333333,
|
193 |
-
"acc_norm_stderr": 0.037161774375660164
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.44,
|
203 |
-
"acc_stderr": 0.04988876515698589,
|
204 |
-
"acc_norm": 0.44,
|
205 |
-
"acc_norm_stderr": 0.04988876515698589
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.34104046242774566,
|
209 |
-
"acc_stderr": 0.025522474632121615,
|
210 |
-
"acc_norm": 0.34104046242774566,
|
211 |
-
"acc_norm_stderr": 0.025522474632121615
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3006134969325153,
|
215 |
-
"acc_stderr": 0.03602511318806771,
|
216 |
-
"acc_norm": 0.3006134969325153,
|
217 |
-
"acc_norm_stderr": 0.03602511318806771
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.345679012345679,
|
221 |
-
"acc_stderr": 0.026462487777001886,
|
222 |
-
"acc_norm": 0.345679012345679,
|
223 |
-
"acc_norm_stderr": 0.026462487777001886
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252605,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252605
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.25906735751295334,
|
233 |
-
"acc_stderr": 0.031618779179354094,
|
234 |
-
"acc_norm": 0.25906735751295334,
|
235 |
-
"acc_norm_stderr": 0.031618779179354094
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.041424397194893624,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.041424397194893624
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.28440366972477066,
|
245 |
-
"acc_stderr": 0.01934203658770259,
|
246 |
-
"acc_norm": 0.28440366972477066,
|
247 |
-
"acc_norm_stderr": 0.01934203658770259
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.18253968253968253,
|
251 |
-
"acc_stderr": 0.03455071019102147,
|
252 |
-
"acc_norm": 0.18253968253968253,
|
253 |
-
"acc_norm_stderr": 0.03455071019102147
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3464052287581699,
|
257 |
-
"acc_stderr": 0.027245613047215365,
|
258 |
-
"acc_norm": 0.3464052287581699,
|
259 |
-
"acc_norm_stderr": 0.027245613047215365
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.39,
|
263 |
-
"acc_stderr": 0.04902071300001974,
|
264 |
-
"acc_norm": 0.39,
|
265 |
-
"acc_norm_stderr": 0.04902071300001974
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.371900826446281,
|
269 |
-
"acc_stderr": 0.044120158066245044,
|
270 |
-
"acc_norm": 0.371900826446281,
|
271 |
-
"acc_norm_stderr": 0.044120158066245044
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.29605263157894735,
|
275 |
-
"acc_stderr": 0.03715062154998904,
|
276 |
-
"acc_norm": 0.29605263157894735,
|
277 |
-
"acc_norm_stderr": 0.03715062154998904
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2761437908496732,
|
281 |
-
"acc_stderr": 0.018087276935663137,
|
282 |
-
"acc_norm": 0.2761437908496732,
|
283 |
-
"acc_norm_stderr": 0.018087276935663137
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2624113475177305,
|
287 |
-
"acc_stderr": 0.02624492034984301,
|
288 |
-
"acc_norm": 0.2624113475177305,
|
289 |
-
"acc_norm_stderr": 0.02624492034984301
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3333333333333333,
|
299 |
-
"acc_stderr": 0.032149521478027486,
|
300 |
-
"acc_norm": 0.3333333333333333,
|
301 |
-
"acc_norm_stderr": 0.032149521478027486
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.36,
|
311 |
-
"acc_stderr": 0.04824181513244218,
|
312 |
-
"acc_norm": 0.36,
|
313 |
-
"acc_norm_stderr": 0.04824181513244218
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.3,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.3,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.17647058823529413,
|
323 |
-
"acc_stderr": 0.02315746830855936,
|
324 |
-
"acc_norm": 0.17647058823529413,
|
325 |
-
"acc_norm_stderr": 0.02315746830855936
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3224489795918367,
|
329 |
-
"acc_stderr": 0.029923100563683906,
|
330 |
-
"acc_norm": 0.3224489795918367,
|
331 |
-
"acc_norm_stderr": 0.029923100563683906
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.27848101265822783,
|
335 |
-
"acc_stderr": 0.02917868230484255,
|
336 |
-
"acc_norm": 0.27848101265822783,
|
337 |
-
"acc_norm_stderr": 0.02917868230484255
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2848761408083442,
|
341 |
-
"acc_stderr": 0.011527830846368999,
|
342 |
-
"acc_norm": 0.2848761408083442,
|
343 |
-
"acc_norm_stderr": 0.011527830846368999
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.28431372549019607,
|
347 |
-
"acc_stderr": 0.03166009679399813,
|
348 |
-
"acc_norm": 0.28431372549019607,
|
349 |
-
"acc_norm_stderr": 0.03166009679399813
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3515151515151515,
|
353 |
-
"acc_stderr": 0.037282069986826503,
|
354 |
-
"acc_norm": 0.3515151515151515,
|
355 |
-
"acc_norm_stderr": 0.037282069986826503
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.28151774785801714,
|
359 |
-
"mc1_stderr": 0.01574402724825605,
|
360 |
-
"mc2": 0.4237266628764529,
|
361 |
-
"mc2_stderr": 0.01496751362237835
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2727272727272727,
|
365 |
-
"acc_stderr": 0.01531185311030035,
|
366 |
-
"acc_norm": 0.44391971664698937,
|
367 |
-
"acc_norm_stderr": 0.017081884623542543
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Taekyoon/llama2-org-koen-7b",
|
436 |
-
"model_sha": "869813335f48ec6a8af01c793c0e8705886d3b89",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TeamUNIVA/Komodo_6B_v1.0.0/result_2024-01-30 12:17:31.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.4325938566552901,
|
5 |
-
"acc_stderr": 0.014478005694182526,
|
6 |
-
"acc_norm": 0.5059726962457338,
|
7 |
-
"acc_norm_stderr": 0.01461034830025579
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.465345548695479,
|
11 |
-
"acc_stderr": 0.004977782217582458,
|
12 |
-
"acc_norm": 0.625771758613822,
|
13 |
-
"acc_norm_stderr": 0.004829339926388327
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5847953216374269,
|
17 |
-
"acc_stderr": 0.03779275945503201,
|
18 |
-
"acc_norm": 0.5847953216374269,
|
19 |
-
"acc_norm_stderr": 0.03779275945503201
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6116504854368932,
|
23 |
-
"acc_stderr": 0.0482572933735639,
|
24 |
-
"acc_norm": 0.6116504854368932,
|
25 |
-
"acc_norm_stderr": 0.0482572933735639
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5874840357598978,
|
29 |
-
"acc_stderr": 0.01760414910867193,
|
30 |
-
"acc_norm": 0.5874840357598978,
|
31 |
-
"acc_norm_stderr": 0.01760414910867193
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4444444444444444,
|
35 |
-
"acc_stderr": 0.042925967182569816,
|
36 |
-
"acc_norm": 0.4444444444444444,
|
37 |
-
"acc_norm_stderr": 0.042925967182569816
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.41702127659574467,
|
47 |
-
"acc_stderr": 0.03223276266711712,
|
48 |
-
"acc_norm": 0.41702127659574467,
|
49 |
-
"acc_norm_stderr": 0.03223276266711712
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.463855421686747,
|
53 |
-
"acc_stderr": 0.03882310850890593,
|
54 |
-
"acc_norm": 0.463855421686747,
|
55 |
-
"acc_norm_stderr": 0.03882310850890593
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5498392282958199,
|
59 |
-
"acc_stderr": 0.028256660723360177,
|
60 |
-
"acc_norm": 0.5498392282958199,
|
61 |
-
"acc_norm_stderr": 0.028256660723360177
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5426008968609866,
|
65 |
-
"acc_stderr": 0.033435777055830646,
|
66 |
-
"acc_norm": 0.5426008968609866,
|
67 |
-
"acc_norm_stderr": 0.033435777055830646
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.549618320610687,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.549618320610687,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.49,
|
77 |
-
"acc_stderr": 0.05024183937956913,
|
78 |
-
"acc_norm": 0.49,
|
79 |
-
"acc_norm_stderr": 0.05024183937956913
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6767676767676768,
|
83 |
-
"acc_stderr": 0.033322999210706444,
|
84 |
-
"acc_norm": 0.6767676767676768,
|
85 |
-
"acc_norm_stderr": 0.033322999210706444
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.46206896551724136,
|
89 |
-
"acc_stderr": 0.041546596717075474,
|
90 |
-
"acc_norm": 0.46206896551724136,
|
91 |
-
"acc_norm_stderr": 0.041546596717075474
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3235294117647059,
|
95 |
-
"acc_stderr": 0.046550104113196177,
|
96 |
-
"acc_norm": 0.3235294117647059,
|
97 |
-
"acc_norm_stderr": 0.046550104113196177
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5504201680672269,
|
101 |
-
"acc_stderr": 0.03231293497137707,
|
102 |
-
"acc_norm": 0.5504201680672269,
|
103 |
-
"acc_norm_stderr": 0.03231293497137707
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5025641025641026,
|
107 |
-
"acc_stderr": 0.025350672979412184,
|
108 |
-
"acc_norm": 0.5025641025641026,
|
109 |
-
"acc_norm_stderr": 0.025350672979412184
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.64,
|
113 |
-
"acc_stderr": 0.04824181513244218,
|
114 |
-
"acc_norm": 0.64,
|
115 |
-
"acc_norm_stderr": 0.04824181513244218
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.044619604333847394,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.044619604333847394
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5555555555555556,
|
125 |
-
"acc_stderr": 0.04803752235190192,
|
126 |
-
"acc_norm": 0.5555555555555556,
|
127 |
-
"acc_norm_stderr": 0.04803752235190192
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4088669950738916,
|
131 |
-
"acc_stderr": 0.034590588158832314,
|
132 |
-
"acc_norm": 0.4088669950738916,
|
133 |
-
"acc_norm_stderr": 0.034590588158832314
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5387096774193548,
|
137 |
-
"acc_stderr": 0.028358634859836942,
|
138 |
-
"acc_norm": 0.5387096774193548,
|
139 |
-
"acc_norm_stderr": 0.028358634859836942
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7350427350427351,
|
143 |
-
"acc_stderr": 0.028911208802749465,
|
144 |
-
"acc_norm": 0.7350427350427351,
|
145 |
-
"acc_norm_stderr": 0.028911208802749465
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.47547169811320755,
|
149 |
-
"acc_stderr": 0.030735822206205608,
|
150 |
-
"acc_norm": 0.47547169811320755,
|
151 |
-
"acc_norm_stderr": 0.030735822206205608
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5363636363636364,
|
155 |
-
"acc_stderr": 0.04776449162396197,
|
156 |
-
"acc_norm": 0.5363636363636364,
|
157 |
-
"acc_norm_stderr": 0.04776449162396197
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3037037037037037,
|
161 |
-
"acc_stderr": 0.028037929969114993,
|
162 |
-
"acc_norm": 0.3037037037037037,
|
163 |
-
"acc_norm_stderr": 0.028037929969114993
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.32450331125827814,
|
167 |
-
"acc_stderr": 0.038227469376587525,
|
168 |
-
"acc_norm": 0.32450331125827814,
|
169 |
-
"acc_norm_stderr": 0.038227469376587525
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6766169154228856,
|
173 |
-
"acc_stderr": 0.033076159479790354,
|
174 |
-
"acc_norm": 0.6766169154228856,
|
175 |
-
"acc_norm_stderr": 0.033076159479790354
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4624277456647399,
|
179 |
-
"acc_stderr": 0.038016851045244604,
|
180 |
-
"acc_norm": 0.4624277456647399,
|
181 |
-
"acc_norm_stderr": 0.038016851045244604
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3253968253968254,
|
185 |
-
"acc_stderr": 0.024130158299762623,
|
186 |
-
"acc_norm": 0.3253968253968254,
|
187 |
-
"acc_norm_stderr": 0.024130158299762623
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4930555555555556,
|
191 |
-
"acc_stderr": 0.04180806750294938,
|
192 |
-
"acc_norm": 0.4930555555555556,
|
193 |
-
"acc_norm_stderr": 0.04180806750294938
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.3,
|
197 |
-
"acc_stderr": 0.046056618647183814,
|
198 |
-
"acc_norm": 0.3,
|
199 |
-
"acc_norm_stderr": 0.046056618647183814
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.69,
|
203 |
-
"acc_stderr": 0.04648231987117316,
|
204 |
-
"acc_norm": 0.69,
|
205 |
-
"acc_norm_stderr": 0.04648231987117316
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.546242774566474,
|
209 |
-
"acc_stderr": 0.02680372058320617,
|
210 |
-
"acc_norm": 0.546242774566474,
|
211 |
-
"acc_norm_stderr": 0.02680372058320617
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5337423312883436,
|
215 |
-
"acc_stderr": 0.03919415545048412,
|
216 |
-
"acc_norm": 0.5337423312883436,
|
217 |
-
"acc_norm_stderr": 0.03919415545048412
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5277777777777778,
|
221 |
-
"acc_stderr": 0.027777777777777804,
|
222 |
-
"acc_norm": 0.5277777777777778,
|
223 |
-
"acc_norm_stderr": 0.027777777777777804
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.25,
|
227 |
-
"acc_stderr": 0.04351941398892446,
|
228 |
-
"acc_norm": 0.25,
|
229 |
-
"acc_norm_stderr": 0.04351941398892446
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6269430051813472,
|
233 |
-
"acc_stderr": 0.034902055920485744,
|
234 |
-
"acc_norm": 0.6269430051813472,
|
235 |
-
"acc_norm_stderr": 0.034902055920485744
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2894736842105263,
|
239 |
-
"acc_stderr": 0.04266339443159394,
|
240 |
-
"acc_norm": 0.2894736842105263,
|
241 |
-
"acc_norm_stderr": 0.04266339443159394
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6660550458715596,
|
245 |
-
"acc_stderr": 0.020220554196736407,
|
246 |
-
"acc_norm": 0.6660550458715596,
|
247 |
-
"acc_norm_stderr": 0.020220554196736407
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.30158730158730157,
|
251 |
-
"acc_stderr": 0.04104947269903394,
|
252 |
-
"acc_norm": 0.30158730158730157,
|
253 |
-
"acc_norm_stderr": 0.04104947269903394
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5294117647058824,
|
257 |
-
"acc_stderr": 0.0285803410651383,
|
258 |
-
"acc_norm": 0.5294117647058824,
|
259 |
-
"acc_norm_stderr": 0.0285803410651383
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.62,
|
263 |
-
"acc_stderr": 0.04878317312145633,
|
264 |
-
"acc_norm": 0.62,
|
265 |
-
"acc_norm_stderr": 0.04878317312145633
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.628099173553719,
|
269 |
-
"acc_stderr": 0.044120158066245044,
|
270 |
-
"acc_norm": 0.628099173553719,
|
271 |
-
"acc_norm_stderr": 0.044120158066245044
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4868421052631579,
|
275 |
-
"acc_stderr": 0.04067533136309174,
|
276 |
-
"acc_norm": 0.4868421052631579,
|
277 |
-
"acc_norm_stderr": 0.04067533136309174
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.477124183006536,
|
281 |
-
"acc_stderr": 0.020206653187884782,
|
282 |
-
"acc_norm": 0.477124183006536,
|
283 |
-
"acc_norm_stderr": 0.020206653187884782
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.36524822695035464,
|
287 |
-
"acc_stderr": 0.028723863853281295,
|
288 |
-
"acc_norm": 0.36524822695035464,
|
289 |
-
"acc_norm_stderr": 0.028723863853281295
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3287037037037037,
|
299 |
-
"acc_stderr": 0.032036140846700596,
|
300 |
-
"acc_norm": 0.3287037037037037,
|
301 |
-
"acc_norm_stderr": 0.032036140846700596
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.29832402234636873,
|
305 |
-
"acc_stderr": 0.015301840045129272,
|
306 |
-
"acc_norm": 0.29832402234636873,
|
307 |
-
"acc_norm_stderr": 0.015301840045129272
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.49,
|
311 |
-
"acc_stderr": 0.05024183937956912,
|
312 |
-
"acc_norm": 0.49,
|
313 |
-
"acc_norm_stderr": 0.05024183937956912
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.53,
|
317 |
-
"acc_stderr": 0.05016135580465919,
|
318 |
-
"acc_norm": 0.53,
|
319 |
-
"acc_norm_stderr": 0.05016135580465919
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.46691176470588236,
|
323 |
-
"acc_stderr": 0.030306257722468317,
|
324 |
-
"acc_norm": 0.46691176470588236,
|
325 |
-
"acc_norm_stderr": 0.030306257722468317
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6204081632653061,
|
329 |
-
"acc_stderr": 0.03106721126287246,
|
330 |
-
"acc_norm": 0.6204081632653061,
|
331 |
-
"acc_norm_stderr": 0.03106721126287246
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6286919831223629,
|
335 |
-
"acc_stderr": 0.03145068600744859,
|
336 |
-
"acc_norm": 0.6286919831223629,
|
337 |
-
"acc_norm_stderr": 0.03145068600744859
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3409387222946545,
|
341 |
-
"acc_stderr": 0.01210681720306721,
|
342 |
-
"acc_norm": 0.3409387222946545,
|
343 |
-
"acc_norm_stderr": 0.01210681720306721
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5588235294117647,
|
347 |
-
"acc_stderr": 0.034849415144292316,
|
348 |
-
"acc_norm": 0.5588235294117647,
|
349 |
-
"acc_norm_stderr": 0.034849415144292316
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6181818181818182,
|
353 |
-
"acc_stderr": 0.03793713171165634,
|
354 |
-
"acc_norm": 0.6181818181818182,
|
355 |
-
"acc_norm_stderr": 0.03793713171165634
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3157894736842105,
|
359 |
-
"mc1_stderr": 0.016272287957916933,
|
360 |
-
"mc2": 0.47900645996414987,
|
361 |
-
"mc2_stderr": 0.015981859090450398
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5997638724911453,
|
365 |
-
"acc_stderr": 0.016844693510505045,
|
366 |
-
"acc_norm": 0.5985832349468713,
|
367 |
-
"acc_norm_stderr": 0.01685290785872906
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TeamUNIVA/Komodo_6B_v1.0.0",
|
436 |
-
"model_sha": "d5b2219e4d6645c89e686bc989db42afe420ba4d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TeamUNIVA/Komodo_6B_v2.0.0/result_2024-02-09 17:20:47.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.42662116040955633,
|
5 |
-
"acc_stderr": 0.014453185592920293,
|
6 |
-
"acc_norm": 0.48890784982935154,
|
7 |
-
"acc_norm_stderr": 0.01460779491401305
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4684325831507668,
|
11 |
-
"acc_stderr": 0.0049798268294007604,
|
12 |
-
"acc_norm": 0.6255725951005776,
|
13 |
-
"acc_norm_stderr": 0.00482985605860357
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5497076023391813,
|
17 |
-
"acc_stderr": 0.038158273659132366,
|
18 |
-
"acc_norm": 0.5497076023391813,
|
19 |
-
"acc_norm_stderr": 0.038158273659132366
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6893203883495146,
|
23 |
-
"acc_stderr": 0.045821241601615506,
|
24 |
-
"acc_norm": 0.6893203883495146,
|
25 |
-
"acc_norm_stderr": 0.045821241601615506
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5862068965517241,
|
29 |
-
"acc_stderr": 0.017612204084663765,
|
30 |
-
"acc_norm": 0.5862068965517241,
|
31 |
-
"acc_norm_stderr": 0.017612204084663765
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.45185185185185184,
|
35 |
-
"acc_stderr": 0.04299268905480863,
|
36 |
-
"acc_norm": 0.45185185185185184,
|
37 |
-
"acc_norm_stderr": 0.04299268905480863
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.44680851063829785,
|
47 |
-
"acc_stderr": 0.032500536843658404,
|
48 |
-
"acc_norm": 0.44680851063829785,
|
49 |
-
"acc_norm_stderr": 0.032500536843658404
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4397590361445783,
|
53 |
-
"acc_stderr": 0.03864139923699121,
|
54 |
-
"acc_norm": 0.4397590361445783,
|
55 |
-
"acc_norm_stderr": 0.03864139923699121
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5530546623794212,
|
59 |
-
"acc_stderr": 0.028237769422085352,
|
60 |
-
"acc_norm": 0.5530546623794212,
|
61 |
-
"acc_norm_stderr": 0.028237769422085352
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.5336322869955157,
|
65 |
-
"acc_stderr": 0.033481800170603065,
|
66 |
-
"acc_norm": 0.5336322869955157,
|
67 |
-
"acc_norm_stderr": 0.033481800170603065
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.549618320610687,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.549618320610687,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.52,
|
77 |
-
"acc_stderr": 0.050211673156867795,
|
78 |
-
"acc_norm": 0.52,
|
79 |
-
"acc_norm_stderr": 0.050211673156867795
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6313131313131313,
|
83 |
-
"acc_stderr": 0.03437305501980619,
|
84 |
-
"acc_norm": 0.6313131313131313,
|
85 |
-
"acc_norm_stderr": 0.03437305501980619
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5655172413793104,
|
89 |
-
"acc_stderr": 0.04130740879555497,
|
90 |
-
"acc_norm": 0.5655172413793104,
|
91 |
-
"acc_norm_stderr": 0.04130740879555497
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.28431372549019607,
|
95 |
-
"acc_stderr": 0.04488482852329017,
|
96 |
-
"acc_norm": 0.28431372549019607,
|
97 |
-
"acc_norm_stderr": 0.04488482852329017
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5210084033613446,
|
101 |
-
"acc_stderr": 0.03244980849990028,
|
102 |
-
"acc_norm": 0.5210084033613446,
|
103 |
-
"acc_norm_stderr": 0.03244980849990028
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5128205128205128,
|
107 |
-
"acc_stderr": 0.02534267129380725,
|
108 |
-
"acc_norm": 0.5128205128205128,
|
109 |
-
"acc_norm_stderr": 0.02534267129380725
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.62,
|
113 |
-
"acc_stderr": 0.048783173121456316,
|
114 |
-
"acc_norm": 0.62,
|
115 |
-
"acc_norm_stderr": 0.048783173121456316
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5740740740740741,
|
125 |
-
"acc_stderr": 0.0478034362693679,
|
126 |
-
"acc_norm": 0.5740740740740741,
|
127 |
-
"acc_norm_stderr": 0.0478034362693679
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4433497536945813,
|
131 |
-
"acc_stderr": 0.03495334582162934,
|
132 |
-
"acc_norm": 0.4433497536945813,
|
133 |
-
"acc_norm_stderr": 0.03495334582162934
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5387096774193548,
|
137 |
-
"acc_stderr": 0.02835863485983695,
|
138 |
-
"acc_norm": 0.5387096774193548,
|
139 |
-
"acc_norm_stderr": 0.02835863485983695
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7264957264957265,
|
143 |
-
"acc_stderr": 0.029202540153431183,
|
144 |
-
"acc_norm": 0.7264957264957265,
|
145 |
-
"acc_norm_stderr": 0.029202540153431183
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4830188679245283,
|
149 |
-
"acc_stderr": 0.030755120364119905,
|
150 |
-
"acc_norm": 0.4830188679245283,
|
151 |
-
"acc_norm_stderr": 0.030755120364119905
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5636363636363636,
|
155 |
-
"acc_stderr": 0.04750185058907296,
|
156 |
-
"acc_norm": 0.5636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04750185058907296
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3111111111111111,
|
161 |
-
"acc_stderr": 0.028226446749683515,
|
162 |
-
"acc_norm": 0.3111111111111111,
|
163 |
-
"acc_norm_stderr": 0.028226446749683515
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526733,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526733
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6567164179104478,
|
173 |
-
"acc_stderr": 0.03357379665433432,
|
174 |
-
"acc_norm": 0.6567164179104478,
|
175 |
-
"acc_norm_stderr": 0.03357379665433432
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.42196531791907516,
|
179 |
-
"acc_stderr": 0.037657466938651483,
|
180 |
-
"acc_norm": 0.42196531791907516,
|
181 |
-
"acc_norm_stderr": 0.037657466938651483
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.36772486772486773,
|
185 |
-
"acc_stderr": 0.024833839825562424,
|
186 |
-
"acc_norm": 0.36772486772486773,
|
187 |
-
"acc_norm_stderr": 0.024833839825562424
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4722222222222222,
|
191 |
-
"acc_stderr": 0.04174752578923185,
|
192 |
-
"acc_norm": 0.4722222222222222,
|
193 |
-
"acc_norm_stderr": 0.04174752578923185
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.25,
|
197 |
-
"acc_stderr": 0.04351941398892446,
|
198 |
-
"acc_norm": 0.25,
|
199 |
-
"acc_norm_stderr": 0.04351941398892446
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.65,
|
203 |
-
"acc_stderr": 0.04793724854411019,
|
204 |
-
"acc_norm": 0.65,
|
205 |
-
"acc_norm_stderr": 0.04793724854411019
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5549132947976878,
|
209 |
-
"acc_stderr": 0.026756255129663765,
|
210 |
-
"acc_norm": 0.5549132947976878,
|
211 |
-
"acc_norm_stderr": 0.026756255129663765
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5030674846625767,
|
215 |
-
"acc_stderr": 0.03928297078179663,
|
216 |
-
"acc_norm": 0.5030674846625767,
|
217 |
-
"acc_norm_stderr": 0.03928297078179663
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5462962962962963,
|
221 |
-
"acc_stderr": 0.027701228468542602,
|
222 |
-
"acc_norm": 0.5462962962962963,
|
223 |
-
"acc_norm_stderr": 0.027701228468542602
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6321243523316062,
|
233 |
-
"acc_stderr": 0.03480175668466036,
|
234 |
-
"acc_norm": 0.6321243523316062,
|
235 |
-
"acc_norm_stderr": 0.03480175668466036
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3157894736842105,
|
239 |
-
"acc_stderr": 0.04372748290278007,
|
240 |
-
"acc_norm": 0.3157894736842105,
|
241 |
-
"acc_norm_stderr": 0.04372748290278007
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.653211009174312,
|
245 |
-
"acc_stderr": 0.020406097104093027,
|
246 |
-
"acc_norm": 0.653211009174312,
|
247 |
-
"acc_norm_stderr": 0.020406097104093027
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23809523809523808,
|
251 |
-
"acc_stderr": 0.038095238095238126,
|
252 |
-
"acc_norm": 0.23809523809523808,
|
253 |
-
"acc_norm_stderr": 0.038095238095238126
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5392156862745098,
|
257 |
-
"acc_stderr": 0.028541722692618877,
|
258 |
-
"acc_norm": 0.5392156862745098,
|
259 |
-
"acc_norm_stderr": 0.028541722692618877
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.6,
|
263 |
-
"acc_stderr": 0.049236596391733084,
|
264 |
-
"acc_norm": 0.6,
|
265 |
-
"acc_norm_stderr": 0.049236596391733084
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6115702479338843,
|
269 |
-
"acc_stderr": 0.04449270350068383,
|
270 |
-
"acc_norm": 0.6115702479338843,
|
271 |
-
"acc_norm_stderr": 0.04449270350068383
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.5263157894736842,
|
275 |
-
"acc_stderr": 0.04063302731486671,
|
276 |
-
"acc_norm": 0.5263157894736842,
|
277 |
-
"acc_norm_stderr": 0.04063302731486671
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.49019607843137253,
|
281 |
-
"acc_stderr": 0.0202239460050743,
|
282 |
-
"acc_norm": 0.49019607843137253,
|
283 |
-
"acc_norm_stderr": 0.0202239460050743
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.33687943262411346,
|
287 |
-
"acc_stderr": 0.02819553487396673,
|
288 |
-
"acc_norm": 0.33687943262411346,
|
289 |
-
"acc_norm_stderr": 0.02819553487396673
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.33035714285714285,
|
293 |
-
"acc_stderr": 0.04464285714285712,
|
294 |
-
"acc_norm": 0.33035714285714285,
|
295 |
-
"acc_norm_stderr": 0.04464285714285712
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3333333333333333,
|
299 |
-
"acc_stderr": 0.032149521478027486,
|
300 |
-
"acc_norm": 0.3333333333333333,
|
301 |
-
"acc_norm_stderr": 0.032149521478027486
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2737430167597765,
|
305 |
-
"acc_stderr": 0.014912413096372434,
|
306 |
-
"acc_norm": 0.2737430167597765,
|
307 |
-
"acc_norm_stderr": 0.014912413096372434
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.41,
|
311 |
-
"acc_stderr": 0.04943110704237102,
|
312 |
-
"acc_norm": 0.41,
|
313 |
-
"acc_norm_stderr": 0.04943110704237102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.6,
|
317 |
-
"acc_stderr": 0.049236596391733084,
|
318 |
-
"acc_norm": 0.6,
|
319 |
-
"acc_norm_stderr": 0.049236596391733084
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3860294117647059,
|
323 |
-
"acc_stderr": 0.029573269134411124,
|
324 |
-
"acc_norm": 0.3860294117647059,
|
325 |
-
"acc_norm_stderr": 0.029573269134411124
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6163265306122448,
|
329 |
-
"acc_stderr": 0.03113088039623595,
|
330 |
-
"acc_norm": 0.6163265306122448,
|
331 |
-
"acc_norm_stderr": 0.03113088039623595
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6540084388185654,
|
335 |
-
"acc_stderr": 0.030964810588786706,
|
336 |
-
"acc_norm": 0.6540084388185654,
|
337 |
-
"acc_norm_stderr": 0.030964810588786706
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3644067796610169,
|
341 |
-
"acc_stderr": 0.012291694983056477,
|
342 |
-
"acc_norm": 0.3644067796610169,
|
343 |
-
"acc_norm_stderr": 0.012291694983056477
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.553921568627451,
|
347 |
-
"acc_stderr": 0.034888454513049734,
|
348 |
-
"acc_norm": 0.553921568627451,
|
349 |
-
"acc_norm_stderr": 0.034888454513049734
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6242424242424243,
|
353 |
-
"acc_stderr": 0.03781887353205982,
|
354 |
-
"acc_norm": 0.6242424242424243,
|
355 |
-
"acc_norm_stderr": 0.03781887353205982
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3488372093023256,
|
359 |
-
"mc1_stderr": 0.016684419859986883,
|
360 |
-
"mc2": 0.5036147302296147,
|
361 |
-
"mc2_stderr": 0.015965776819730753
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.6115702479338843,
|
365 |
-
"acc_stderr": 0.01675692157106942,
|
366 |
-
"acc_norm": 0.6139315230224321,
|
367 |
-
"acc_norm_stderr": 0.01673813076032175
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TeamUNIVA/Komodo_6B_v2.0.0",
|
436 |
-
"model_sha": "337ec7305cfd6a931d31ebb1bfad4e4523877c95",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TeamUNIVA/Komodo_6B_v3.0.0/result_2024-03-04 11:29:12.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.43600682593856654,
|
5 |
-
"acc_stderr": 0.014491225699230916,
|
6 |
-
"acc_norm": 0.4931740614334471,
|
7 |
-
"acc_norm_stderr": 0.014610029151379813
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.48137821151165106,
|
11 |
-
"acc_stderr": 0.004986319587524966,
|
12 |
-
"acc_norm": 0.6385182234614618,
|
13 |
-
"acc_norm_stderr": 0.004794478426382608
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5380116959064327,
|
17 |
-
"acc_stderr": 0.03823727092882307,
|
18 |
-
"acc_norm": 0.5380116959064327,
|
19 |
-
"acc_norm_stderr": 0.03823727092882307
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6601941747572816,
|
23 |
-
"acc_stderr": 0.046897659372781335,
|
24 |
-
"acc_norm": 0.6601941747572816,
|
25 |
-
"acc_norm_stderr": 0.046897659372781335
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5862068965517241,
|
29 |
-
"acc_stderr": 0.01761220408466376,
|
30 |
-
"acc_norm": 0.5862068965517241,
|
31 |
-
"acc_norm_stderr": 0.01761220408466376
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.45185185185185184,
|
35 |
-
"acc_stderr": 0.04299268905480863,
|
36 |
-
"acc_norm": 0.45185185185185184,
|
37 |
-
"acc_norm_stderr": 0.04299268905480863
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.23,
|
41 |
-
"acc_stderr": 0.04229525846816505,
|
42 |
-
"acc_norm": 0.23,
|
43 |
-
"acc_norm_stderr": 0.04229525846816505
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4723404255319149,
|
47 |
-
"acc_stderr": 0.03263597118409769,
|
48 |
-
"acc_norm": 0.4723404255319149,
|
49 |
-
"acc_norm_stderr": 0.03263597118409769
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.40963855421686746,
|
53 |
-
"acc_stderr": 0.03828401115079022,
|
54 |
-
"acc_norm": 0.40963855421686746,
|
55 |
-
"acc_norm_stderr": 0.03828401115079022
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5369774919614148,
|
59 |
-
"acc_stderr": 0.028320325830105915,
|
60 |
-
"acc_norm": 0.5369774919614148,
|
61 |
-
"acc_norm_stderr": 0.028320325830105915
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.515695067264574,
|
65 |
-
"acc_stderr": 0.0335412657542081,
|
66 |
-
"acc_norm": 0.515695067264574,
|
67 |
-
"acc_norm_stderr": 0.0335412657542081
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.549618320610687,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.549618320610687,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.54,
|
77 |
-
"acc_stderr": 0.05009082659620333,
|
78 |
-
"acc_norm": 0.54,
|
79 |
-
"acc_norm_stderr": 0.05009082659620333
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6464646464646465,
|
83 |
-
"acc_stderr": 0.03406086723547155,
|
84 |
-
"acc_norm": 0.6464646464646465,
|
85 |
-
"acc_norm_stderr": 0.03406086723547155
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.5310344827586206,
|
89 |
-
"acc_stderr": 0.04158632762097828,
|
90 |
-
"acc_norm": 0.5310344827586206,
|
91 |
-
"acc_norm_stderr": 0.04158632762097828
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3137254901960784,
|
95 |
-
"acc_stderr": 0.04617034827006717,
|
96 |
-
"acc_norm": 0.3137254901960784,
|
97 |
-
"acc_norm_stderr": 0.04617034827006717
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5378151260504201,
|
101 |
-
"acc_stderr": 0.0323854694875898,
|
102 |
-
"acc_norm": 0.5378151260504201,
|
103 |
-
"acc_norm_stderr": 0.0323854694875898
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5461538461538461,
|
107 |
-
"acc_stderr": 0.025242770987126198,
|
108 |
-
"acc_norm": 0.5461538461538461,
|
109 |
-
"acc_norm_stderr": 0.025242770987126198
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.58,
|
113 |
-
"acc_stderr": 0.049604496374885836,
|
114 |
-
"acc_norm": 0.58,
|
115 |
-
"acc_norm_stderr": 0.049604496374885836
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.32,
|
119 |
-
"acc_stderr": 0.046882617226215034,
|
120 |
-
"acc_norm": 0.32,
|
121 |
-
"acc_norm_stderr": 0.046882617226215034
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5740740740740741,
|
125 |
-
"acc_stderr": 0.047803436269367894,
|
126 |
-
"acc_norm": 0.5740740740740741,
|
127 |
-
"acc_norm_stderr": 0.047803436269367894
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4236453201970443,
|
131 |
-
"acc_stderr": 0.034767257476490364,
|
132 |
-
"acc_norm": 0.4236453201970443,
|
133 |
-
"acc_norm_stderr": 0.034767257476490364
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.5225806451612903,
|
137 |
-
"acc_stderr": 0.02841498501970786,
|
138 |
-
"acc_norm": 0.5225806451612903,
|
139 |
-
"acc_norm_stderr": 0.02841498501970786
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.7478632478632479,
|
143 |
-
"acc_stderr": 0.02844796547623102,
|
144 |
-
"acc_norm": 0.7478632478632479,
|
145 |
-
"acc_norm_stderr": 0.02844796547623102
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.47547169811320755,
|
149 |
-
"acc_stderr": 0.030735822206205608,
|
150 |
-
"acc_norm": 0.47547169811320755,
|
151 |
-
"acc_norm_stderr": 0.030735822206205608
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.6090909090909091,
|
155 |
-
"acc_stderr": 0.04673752333670239,
|
156 |
-
"acc_norm": 0.6090909090909091,
|
157 |
-
"acc_norm_stderr": 0.04673752333670239
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.31851851851851853,
|
161 |
-
"acc_stderr": 0.02840653309060846,
|
162 |
-
"acc_norm": 0.31851851851851853,
|
163 |
-
"acc_norm_stderr": 0.02840653309060846
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389024,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.6417910447761194,
|
173 |
-
"acc_stderr": 0.03390393042268814,
|
174 |
-
"acc_norm": 0.6417910447761194,
|
175 |
-
"acc_norm_stderr": 0.03390393042268814
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4277456647398844,
|
179 |
-
"acc_stderr": 0.03772446857518027,
|
180 |
-
"acc_norm": 0.4277456647398844,
|
181 |
-
"acc_norm_stderr": 0.03772446857518027
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.30952380952380953,
|
185 |
-
"acc_stderr": 0.023809523809523857,
|
186 |
-
"acc_norm": 0.30952380952380953,
|
187 |
-
"acc_norm_stderr": 0.023809523809523857
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4791666666666667,
|
191 |
-
"acc_stderr": 0.041775789507399935,
|
192 |
-
"acc_norm": 0.4791666666666667,
|
193 |
-
"acc_norm_stderr": 0.041775789507399935
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.045126085985421276,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.045126085985421276
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.65,
|
203 |
-
"acc_stderr": 0.047937248544110196,
|
204 |
-
"acc_norm": 0.65,
|
205 |
-
"acc_norm_stderr": 0.047937248544110196
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5260115606936416,
|
209 |
-
"acc_stderr": 0.02688264343402289,
|
210 |
-
"acc_norm": 0.5260115606936416,
|
211 |
-
"acc_norm_stderr": 0.02688264343402289
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.5276073619631901,
|
215 |
-
"acc_stderr": 0.03922378290610991,
|
216 |
-
"acc_norm": 0.5276073619631901,
|
217 |
-
"acc_norm_stderr": 0.03922378290610991
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.5216049382716049,
|
221 |
-
"acc_stderr": 0.027794760105008736,
|
222 |
-
"acc_norm": 0.5216049382716049,
|
223 |
-
"acc_norm_stderr": 0.027794760105008736
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695235,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695235
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.6217616580310881,
|
233 |
-
"acc_stderr": 0.034998072761933376,
|
234 |
-
"acc_norm": 0.6217616580310881,
|
235 |
-
"acc_norm_stderr": 0.034998072761933376
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022056,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022056
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.6587155963302752,
|
245 |
-
"acc_stderr": 0.020328612816592435,
|
246 |
-
"acc_norm": 0.6587155963302752,
|
247 |
-
"acc_norm_stderr": 0.020328612816592435
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2777777777777778,
|
251 |
-
"acc_stderr": 0.04006168083848879,
|
252 |
-
"acc_norm": 0.2777777777777778,
|
253 |
-
"acc_norm_stderr": 0.04006168083848879
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5163398692810458,
|
257 |
-
"acc_stderr": 0.028614624752805434,
|
258 |
-
"acc_norm": 0.5163398692810458,
|
259 |
-
"acc_norm_stderr": 0.028614624752805434
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.61,
|
263 |
-
"acc_stderr": 0.04902071300001974,
|
264 |
-
"acc_norm": 0.61,
|
265 |
-
"acc_norm_stderr": 0.04902071300001974
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6033057851239669,
|
269 |
-
"acc_stderr": 0.044658697805310094,
|
270 |
-
"acc_norm": 0.6033057851239669,
|
271 |
-
"acc_norm_stderr": 0.044658697805310094
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.506578947368421,
|
275 |
-
"acc_stderr": 0.040685900502249704,
|
276 |
-
"acc_norm": 0.506578947368421,
|
277 |
-
"acc_norm_stderr": 0.040685900502249704
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4803921568627451,
|
281 |
-
"acc_stderr": 0.020212274976302954,
|
282 |
-
"acc_norm": 0.4803921568627451,
|
283 |
-
"acc_norm_stderr": 0.020212274976302954
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.32978723404255317,
|
287 |
-
"acc_stderr": 0.028045946942042398,
|
288 |
-
"acc_norm": 0.32978723404255317,
|
289 |
-
"acc_norm_stderr": 0.028045946942042398
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.29464285714285715,
|
293 |
-
"acc_stderr": 0.04327040932578731,
|
294 |
-
"acc_norm": 0.29464285714285715,
|
295 |
-
"acc_norm_stderr": 0.04327040932578731
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3611111111111111,
|
299 |
-
"acc_stderr": 0.032757734861009996,
|
300 |
-
"acc_norm": 0.3611111111111111,
|
301 |
-
"acc_norm_stderr": 0.032757734861009996
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25251396648044694,
|
305 |
-
"acc_stderr": 0.014530330201468628,
|
306 |
-
"acc_norm": 0.25251396648044694,
|
307 |
-
"acc_norm_stderr": 0.014530330201468628
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.4,
|
311 |
-
"acc_stderr": 0.049236596391733084,
|
312 |
-
"acc_norm": 0.4,
|
313 |
-
"acc_norm_stderr": 0.049236596391733084
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.52,
|
317 |
-
"acc_stderr": 0.050211673156867795,
|
318 |
-
"acc_norm": 0.52,
|
319 |
-
"acc_norm_stderr": 0.050211673156867795
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.43014705882352944,
|
323 |
-
"acc_stderr": 0.030074971917302875,
|
324 |
-
"acc_norm": 0.43014705882352944,
|
325 |
-
"acc_norm_stderr": 0.030074971917302875
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.6285714285714286,
|
329 |
-
"acc_stderr": 0.03093285879278986,
|
330 |
-
"acc_norm": 0.6285714285714286,
|
331 |
-
"acc_norm_stderr": 0.03093285879278986
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6540084388185654,
|
335 |
-
"acc_stderr": 0.03096481058878671,
|
336 |
-
"acc_norm": 0.6540084388185654,
|
337 |
-
"acc_norm_stderr": 0.03096481058878671
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.35984354628422427,
|
341 |
-
"acc_stderr": 0.0122582604836898,
|
342 |
-
"acc_norm": 0.35984354628422427,
|
343 |
-
"acc_norm_stderr": 0.0122582604836898
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.5441176470588235,
|
347 |
-
"acc_stderr": 0.03495624522015478,
|
348 |
-
"acc_norm": 0.5441176470588235,
|
349 |
-
"acc_norm_stderr": 0.03495624522015478
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.6363636363636364,
|
353 |
-
"acc_stderr": 0.03756335775187896,
|
354 |
-
"acc_norm": 0.6363636363636364,
|
355 |
-
"acc_norm_stderr": 0.03756335775187896
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.38310893512851896,
|
359 |
-
"mc1_stderr": 0.01701846167938986,
|
360 |
-
"mc2": 0.5300394533800307,
|
361 |
-
"mc2_stderr": 0.016087134575191925
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.6446280991735537,
|
365 |
-
"acc_stderr": 0.016455496000314516,
|
366 |
-
"acc_norm": 0.6434474616292798,
|
367 |
-
"acc_norm_stderr": 0.016467706981527445
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TeamUNIVA/Komodo_6B_v3.0.0",
|
436 |
-
"model_sha": "ac7f25d54977bd85ed364cc213bde0877031279f",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TeamUNIVA/Komodo_7B_v1.0.0/result_2024-01-30 12:16:24.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.47440273037542663,
|
5 |
-
"acc_stderr": 0.014592230885298959,
|
6 |
-
"acc_norm": 0.5213310580204779,
|
7 |
-
"acc_norm_stderr": 0.014598087973127104
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.5659231228838877,
|
11 |
-
"acc_stderr": 0.004946221512145284,
|
12 |
-
"acc_norm": 0.681736705835491,
|
13 |
-
"acc_norm_stderr": 0.004648503177353943
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.49122807017543857,
|
17 |
-
"acc_stderr": 0.038342347441649924,
|
18 |
-
"acc_norm": 0.49122807017543857,
|
19 |
-
"acc_norm_stderr": 0.038342347441649924
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6019417475728155,
|
23 |
-
"acc_stderr": 0.048467482539772386,
|
24 |
-
"acc_norm": 0.6019417475728155,
|
25 |
-
"acc_norm_stderr": 0.048467482539772386
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5223499361430396,
|
29 |
-
"acc_stderr": 0.017862091778507855,
|
30 |
-
"acc_norm": 0.5223499361430396,
|
31 |
-
"acc_norm_stderr": 0.017862091778507855
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3333333333333333,
|
35 |
-
"acc_stderr": 0.04072314811876837,
|
36 |
-
"acc_norm": 0.3333333333333333,
|
37 |
-
"acc_norm_stderr": 0.04072314811876837
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3702127659574468,
|
47 |
-
"acc_stderr": 0.031565646822367836,
|
48 |
-
"acc_norm": 0.3702127659574468,
|
49 |
-
"acc_norm_stderr": 0.031565646822367836
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.41566265060240964,
|
53 |
-
"acc_stderr": 0.038367221765980515,
|
54 |
-
"acc_norm": 0.41566265060240964,
|
55 |
-
"acc_norm_stderr": 0.038367221765980515
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.49517684887459806,
|
59 |
-
"acc_stderr": 0.028396770444111288,
|
60 |
-
"acc_norm": 0.49517684887459806,
|
61 |
-
"acc_norm_stderr": 0.028396770444111288
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4439461883408072,
|
65 |
-
"acc_stderr": 0.03334625674242728,
|
66 |
-
"acc_norm": 0.4439461883408072,
|
67 |
-
"acc_norm_stderr": 0.03334625674242728
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4580152671755725,
|
71 |
-
"acc_stderr": 0.04369802690578756,
|
72 |
-
"acc_norm": 0.4580152671755725,
|
73 |
-
"acc_norm_stderr": 0.04369802690578756
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.52,
|
77 |
-
"acc_stderr": 0.050211673156867795,
|
78 |
-
"acc_norm": 0.52,
|
79 |
-
"acc_norm_stderr": 0.050211673156867795
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.6161616161616161,
|
83 |
-
"acc_stderr": 0.03464881675016338,
|
84 |
-
"acc_norm": 0.6161616161616161,
|
85 |
-
"acc_norm_stderr": 0.03464881675016338
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4482758620689655,
|
89 |
-
"acc_stderr": 0.04144311810878151,
|
90 |
-
"acc_norm": 0.4482758620689655,
|
91 |
-
"acc_norm_stderr": 0.04144311810878151
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3431372549019608,
|
95 |
-
"acc_stderr": 0.047240073523838876,
|
96 |
-
"acc_norm": 0.3431372549019608,
|
97 |
-
"acc_norm_stderr": 0.047240073523838876
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.5840336134453782,
|
101 |
-
"acc_stderr": 0.03201650100739611,
|
102 |
-
"acc_norm": 0.5840336134453782,
|
103 |
-
"acc_norm_stderr": 0.03201650100739611
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.5333333333333333,
|
107 |
-
"acc_stderr": 0.025294608023986462,
|
108 |
-
"acc_norm": 0.5333333333333333,
|
109 |
-
"acc_norm_stderr": 0.025294608023986462
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.47,
|
113 |
-
"acc_stderr": 0.050161355804659205,
|
114 |
-
"acc_norm": 0.47,
|
115 |
-
"acc_norm_stderr": 0.050161355804659205
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.34,
|
119 |
-
"acc_stderr": 0.04760952285695235,
|
120 |
-
"acc_norm": 0.34,
|
121 |
-
"acc_norm_stderr": 0.04760952285695235
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5185185185185185,
|
125 |
-
"acc_stderr": 0.0483036602463533,
|
126 |
-
"acc_norm": 0.5185185185185185,
|
127 |
-
"acc_norm_stderr": 0.0483036602463533
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.43349753694581283,
|
131 |
-
"acc_stderr": 0.034867317274198714,
|
132 |
-
"acc_norm": 0.43349753694581283,
|
133 |
-
"acc_norm_stderr": 0.034867317274198714
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4838709677419355,
|
137 |
-
"acc_stderr": 0.028429203176724555,
|
138 |
-
"acc_norm": 0.4838709677419355,
|
139 |
-
"acc_norm_stderr": 0.028429203176724555
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6623931623931624,
|
143 |
-
"acc_stderr": 0.030980296992618558,
|
144 |
-
"acc_norm": 0.6623931623931624,
|
145 |
-
"acc_norm_stderr": 0.030980296992618558
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4188679245283019,
|
149 |
-
"acc_stderr": 0.0303650508291152,
|
150 |
-
"acc_norm": 0.4188679245283019,
|
151 |
-
"acc_norm_stderr": 0.0303650508291152
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5727272727272728,
|
155 |
-
"acc_stderr": 0.04738198703545483,
|
156 |
-
"acc_norm": 0.5727272727272728,
|
157 |
-
"acc_norm_stderr": 0.04738198703545483
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2814814814814815,
|
161 |
-
"acc_stderr": 0.027420019350945277,
|
162 |
-
"acc_norm": 0.2814814814814815,
|
163 |
-
"acc_norm_stderr": 0.027420019350945277
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.32450331125827814,
|
167 |
-
"acc_stderr": 0.03822746937658753,
|
168 |
-
"acc_norm": 0.32450331125827814,
|
169 |
-
"acc_norm_stderr": 0.03822746937658753
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5870646766169154,
|
173 |
-
"acc_stderr": 0.03481520803367348,
|
174 |
-
"acc_norm": 0.5870646766169154,
|
175 |
-
"acc_norm_stderr": 0.03481520803367348
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.37572254335260113,
|
179 |
-
"acc_stderr": 0.03692820767264867,
|
180 |
-
"acc_norm": 0.37572254335260113,
|
181 |
-
"acc_norm_stderr": 0.03692820767264867
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3386243386243386,
|
185 |
-
"acc_stderr": 0.02437319786798306,
|
186 |
-
"acc_norm": 0.3386243386243386,
|
187 |
-
"acc_norm_stderr": 0.02437319786798306
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3888888888888889,
|
191 |
-
"acc_stderr": 0.04076663253918567,
|
192 |
-
"acc_norm": 0.3888888888888889,
|
193 |
-
"acc_norm_stderr": 0.04076663253918567
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.39,
|
197 |
-
"acc_stderr": 0.04902071300001974,
|
198 |
-
"acc_norm": 0.39,
|
199 |
-
"acc_norm_stderr": 0.04902071300001974
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.59,
|
203 |
-
"acc_stderr": 0.04943110704237101,
|
204 |
-
"acc_norm": 0.59,
|
205 |
-
"acc_norm_stderr": 0.04943110704237101
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5115606936416185,
|
209 |
-
"acc_stderr": 0.026911898686377906,
|
210 |
-
"acc_norm": 0.5115606936416185,
|
211 |
-
"acc_norm_stderr": 0.026911898686377906
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.48466257668711654,
|
215 |
-
"acc_stderr": 0.03926522378708843,
|
216 |
-
"acc_norm": 0.48466257668711654,
|
217 |
-
"acc_norm_stderr": 0.03926522378708843
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4876543209876543,
|
221 |
-
"acc_stderr": 0.027812262269327235,
|
222 |
-
"acc_norm": 0.4876543209876543,
|
223 |
-
"acc_norm_stderr": 0.027812262269327235
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.045126085985421296,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.045126085985421296
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5492227979274611,
|
233 |
-
"acc_stderr": 0.035909109522355244,
|
234 |
-
"acc_norm": 0.5492227979274611,
|
235 |
-
"acc_norm_stderr": 0.035909109522355244
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3333333333333333,
|
239 |
-
"acc_stderr": 0.04434600701584926,
|
240 |
-
"acc_norm": 0.3333333333333333,
|
241 |
-
"acc_norm_stderr": 0.04434600701584926
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5339449541284403,
|
245 |
-
"acc_stderr": 0.021387863350353996,
|
246 |
-
"acc_norm": 0.5339449541284403,
|
247 |
-
"acc_norm_stderr": 0.021387863350353996
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4126984126984127,
|
251 |
-
"acc_stderr": 0.04403438954768177,
|
252 |
-
"acc_norm": 0.4126984126984127,
|
253 |
-
"acc_norm_stderr": 0.04403438954768177
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.5032679738562091,
|
257 |
-
"acc_stderr": 0.028629305194003543,
|
258 |
-
"acc_norm": 0.5032679738562091,
|
259 |
-
"acc_norm_stderr": 0.028629305194003543
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.49,
|
263 |
-
"acc_stderr": 0.05024183937956911,
|
264 |
-
"acc_norm": 0.49,
|
265 |
-
"acc_norm_stderr": 0.05024183937956911
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6776859504132231,
|
269 |
-
"acc_stderr": 0.04266416363352168,
|
270 |
-
"acc_norm": 0.6776859504132231,
|
271 |
-
"acc_norm_stderr": 0.04266416363352168
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.40789473684210525,
|
275 |
-
"acc_stderr": 0.03999309712777472,
|
276 |
-
"acc_norm": 0.40789473684210525,
|
277 |
-
"acc_norm_stderr": 0.03999309712777472
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4166666666666667,
|
281 |
-
"acc_stderr": 0.019944914136873573,
|
282 |
-
"acc_norm": 0.4166666666666667,
|
283 |
-
"acc_norm_stderr": 0.019944914136873573
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2907801418439716,
|
287 |
-
"acc_stderr": 0.027090664368353178,
|
288 |
-
"acc_norm": 0.2907801418439716,
|
289 |
-
"acc_norm_stderr": 0.027090664368353178
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.32142857142857145,
|
293 |
-
"acc_stderr": 0.0443280405529152,
|
294 |
-
"acc_norm": 0.32142857142857145,
|
295 |
-
"acc_norm_stderr": 0.0443280405529152
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.41203703703703703,
|
299 |
-
"acc_stderr": 0.03356787758160834,
|
300 |
-
"acc_norm": 0.41203703703703703,
|
301 |
-
"acc_norm_stderr": 0.03356787758160834
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24804469273743016,
|
305 |
-
"acc_stderr": 0.014444157808261457,
|
306 |
-
"acc_norm": 0.24804469273743016,
|
307 |
-
"acc_norm_stderr": 0.014444157808261457
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.43,
|
311 |
-
"acc_stderr": 0.04975698519562428,
|
312 |
-
"acc_norm": 0.43,
|
313 |
-
"acc_norm_stderr": 0.04975698519562428
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.57,
|
317 |
-
"acc_stderr": 0.049756985195624284,
|
318 |
-
"acc_norm": 0.57,
|
319 |
-
"acc_norm_stderr": 0.049756985195624284
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.41544117647058826,
|
323 |
-
"acc_stderr": 0.029935342707877743,
|
324 |
-
"acc_norm": 0.41544117647058826,
|
325 |
-
"acc_norm_stderr": 0.029935342707877743
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.563265306122449,
|
329 |
-
"acc_stderr": 0.03175195237583324,
|
330 |
-
"acc_norm": 0.563265306122449,
|
331 |
-
"acc_norm_stderr": 0.03175195237583324
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5822784810126582,
|
335 |
-
"acc_stderr": 0.032103530322412685,
|
336 |
-
"acc_norm": 0.5822784810126582,
|
337 |
-
"acc_norm_stderr": 0.032103530322412685
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3624511082138201,
|
341 |
-
"acc_stderr": 0.01227751253325249,
|
342 |
-
"acc_norm": 0.3624511082138201,
|
343 |
-
"acc_norm_stderr": 0.01227751253325249
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4803921568627451,
|
347 |
-
"acc_stderr": 0.03506612560524866,
|
348 |
-
"acc_norm": 0.4803921568627451,
|
349 |
-
"acc_norm_stderr": 0.03506612560524866
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4909090909090909,
|
353 |
-
"acc_stderr": 0.03903698647748441,
|
354 |
-
"acc_norm": 0.4909090909090909,
|
355 |
-
"acc_norm_stderr": 0.03903698647748441
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.4394124847001224,
|
359 |
-
"mc1_stderr": 0.01737452048251371,
|
360 |
-
"mc2": 0.5952931693636797,
|
361 |
-
"mc2_stderr": 0.01657492819641639
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5159386068476978,
|
365 |
-
"acc_stderr": 0.017181617837190195,
|
366 |
-
"acc_norm": 0.5489964580873672,
|
367 |
-
"acc_norm_stderr": 0.01710761885954935
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TeamUNIVA/Komodo_7B_v1.0.0",
|
436 |
-
"model_sha": "079cadef2c996d4a14365afc3d52f88b911b357e",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TeamUNIVA/Komodo_7B_v1.0.1/result_2024-01-30 12:16:51.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.48293515358361777,
|
5 |
-
"acc_stderr": 0.014602878388536597,
|
6 |
-
"acc_norm": 0.514505119453925,
|
7 |
-
"acc_norm_stderr": 0.01460524108137005
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.5424218283210516,
|
11 |
-
"acc_stderr": 0.004971789638563324,
|
12 |
-
"acc_norm": 0.6623182632941645,
|
13 |
-
"acc_norm_stderr": 0.00471952909991311
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4444444444444444,
|
17 |
-
"acc_stderr": 0.03811079669833531,
|
18 |
-
"acc_norm": 0.4444444444444444,
|
19 |
-
"acc_norm_stderr": 0.03811079669833531
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5631067961165048,
|
23 |
-
"acc_stderr": 0.04911147107365777,
|
24 |
-
"acc_norm": 0.5631067961165048,
|
25 |
-
"acc_norm_stderr": 0.04911147107365777
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5376756066411239,
|
29 |
-
"acc_stderr": 0.017829131764287177,
|
30 |
-
"acc_norm": 0.5376756066411239,
|
31 |
-
"acc_norm_stderr": 0.017829131764287177
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.32592592592592595,
|
35 |
-
"acc_stderr": 0.040491220417025055,
|
36 |
-
"acc_norm": 0.32592592592592595,
|
37 |
-
"acc_norm_stderr": 0.040491220417025055
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.04512608598542128,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.04512608598542128
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.34893617021276596,
|
47 |
-
"acc_stderr": 0.031158522131357766,
|
48 |
-
"acc_norm": 0.34893617021276596,
|
49 |
-
"acc_norm_stderr": 0.031158522131357766
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4397590361445783,
|
53 |
-
"acc_stderr": 0.03864139923699122,
|
54 |
-
"acc_norm": 0.4397590361445783,
|
55 |
-
"acc_norm_stderr": 0.03864139923699122
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4790996784565916,
|
59 |
-
"acc_stderr": 0.028373270961069414,
|
60 |
-
"acc_norm": 0.4790996784565916,
|
61 |
-
"acc_norm_stderr": 0.028373270961069414
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4260089686098655,
|
65 |
-
"acc_stderr": 0.033188332862172806,
|
66 |
-
"acc_norm": 0.4260089686098655,
|
67 |
-
"acc_norm_stderr": 0.033188332862172806
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.45038167938931295,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.45038167938931295,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.41,
|
77 |
-
"acc_stderr": 0.049431107042371025,
|
78 |
-
"acc_norm": 0.41,
|
79 |
-
"acc_norm_stderr": 0.049431107042371025
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5757575757575758,
|
83 |
-
"acc_stderr": 0.03521224908841586,
|
84 |
-
"acc_norm": 0.5757575757575758,
|
85 |
-
"acc_norm_stderr": 0.03521224908841586
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3448275862068966,
|
89 |
-
"acc_stderr": 0.03960933549451207,
|
90 |
-
"acc_norm": 0.3448275862068966,
|
91 |
-
"acc_norm_stderr": 0.03960933549451207
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.24509803921568626,
|
95 |
-
"acc_stderr": 0.04280105837364397,
|
96 |
-
"acc_norm": 0.24509803921568626,
|
97 |
-
"acc_norm_stderr": 0.04280105837364397
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.44537815126050423,
|
101 |
-
"acc_stderr": 0.032284106267163895,
|
102 |
-
"acc_norm": 0.44537815126050423,
|
103 |
-
"acc_norm_stderr": 0.032284106267163895
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4512820512820513,
|
107 |
-
"acc_stderr": 0.025230381238934833,
|
108 |
-
"acc_norm": 0.4512820512820513,
|
109 |
-
"acc_norm_stderr": 0.025230381238934833
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.57,
|
113 |
-
"acc_stderr": 0.049756985195624284,
|
114 |
-
"acc_norm": 0.57,
|
115 |
-
"acc_norm_stderr": 0.049756985195624284
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.28,
|
119 |
-
"acc_stderr": 0.045126085985421276,
|
120 |
-
"acc_norm": 0.28,
|
121 |
-
"acc_norm_stderr": 0.045126085985421276
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5092592592592593,
|
125 |
-
"acc_stderr": 0.04832853553437055,
|
126 |
-
"acc_norm": 0.5092592592592593,
|
127 |
-
"acc_norm_stderr": 0.04832853553437055
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3793103448275862,
|
131 |
-
"acc_stderr": 0.034139638059062345,
|
132 |
-
"acc_norm": 0.3793103448275862,
|
133 |
-
"acc_norm_stderr": 0.034139638059062345
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4,
|
137 |
-
"acc_stderr": 0.027869320571664632,
|
138 |
-
"acc_norm": 0.4,
|
139 |
-
"acc_norm_stderr": 0.027869320571664632
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6581196581196581,
|
143 |
-
"acc_stderr": 0.03107502852650776,
|
144 |
-
"acc_norm": 0.6581196581196581,
|
145 |
-
"acc_norm_stderr": 0.03107502852650776
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.39245283018867927,
|
149 |
-
"acc_stderr": 0.030052580579557845,
|
150 |
-
"acc_norm": 0.39245283018867927,
|
151 |
-
"acc_norm_stderr": 0.030052580579557845
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5,
|
155 |
-
"acc_stderr": 0.04789131426105757,
|
156 |
-
"acc_norm": 0.5,
|
157 |
-
"acc_norm_stderr": 0.04789131426105757
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24074074074074073,
|
161 |
-
"acc_stderr": 0.026067159222275794,
|
162 |
-
"acc_norm": 0.24074074074074073,
|
163 |
-
"acc_norm_stderr": 0.026067159222275794
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2582781456953642,
|
167 |
-
"acc_stderr": 0.035737053147634576,
|
168 |
-
"acc_norm": 0.2582781456953642,
|
169 |
-
"acc_norm_stderr": 0.035737053147634576
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.572139303482587,
|
173 |
-
"acc_stderr": 0.03498541988407795,
|
174 |
-
"acc_norm": 0.572139303482587,
|
175 |
-
"acc_norm_stderr": 0.03498541988407795
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3179190751445087,
|
179 |
-
"acc_stderr": 0.03550683989165581,
|
180 |
-
"acc_norm": 0.3179190751445087,
|
181 |
-
"acc_norm_stderr": 0.03550683989165581
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3439153439153439,
|
185 |
-
"acc_stderr": 0.024464426625596433,
|
186 |
-
"acc_norm": 0.3439153439153439,
|
187 |
-
"acc_norm_stderr": 0.024464426625596433
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.375,
|
191 |
-
"acc_stderr": 0.04048439222695598,
|
192 |
-
"acc_norm": 0.375,
|
193 |
-
"acc_norm_stderr": 0.04048439222695598
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.38,
|
197 |
-
"acc_stderr": 0.048783173121456316,
|
198 |
-
"acc_norm": 0.38,
|
199 |
-
"acc_norm_stderr": 0.048783173121456316
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.56,
|
203 |
-
"acc_stderr": 0.04988876515698589,
|
204 |
-
"acc_norm": 0.56,
|
205 |
-
"acc_norm_stderr": 0.04988876515698589
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.45375722543352603,
|
209 |
-
"acc_stderr": 0.026803720583206167,
|
210 |
-
"acc_norm": 0.45375722543352603,
|
211 |
-
"acc_norm_stderr": 0.026803720583206167
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.44171779141104295,
|
215 |
-
"acc_stderr": 0.03901591825836185,
|
216 |
-
"acc_norm": 0.44171779141104295,
|
217 |
-
"acc_norm_stderr": 0.03901591825836185
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4722222222222222,
|
221 |
-
"acc_stderr": 0.027777777777777804,
|
222 |
-
"acc_norm": 0.4722222222222222,
|
223 |
-
"acc_norm_stderr": 0.027777777777777804
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.045604802157206845,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.045604802157206845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5129533678756477,
|
233 |
-
"acc_stderr": 0.03607228061047749,
|
234 |
-
"acc_norm": 0.5129533678756477,
|
235 |
-
"acc_norm_stderr": 0.03607228061047749
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3508771929824561,
|
239 |
-
"acc_stderr": 0.044895393502706986,
|
240 |
-
"acc_norm": 0.3508771929824561,
|
241 |
-
"acc_norm_stderr": 0.044895393502706986
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5266055045871559,
|
245 |
-
"acc_stderr": 0.02140695268815158,
|
246 |
-
"acc_norm": 0.5266055045871559,
|
247 |
-
"acc_norm_stderr": 0.02140695268815158
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23809523809523808,
|
251 |
-
"acc_stderr": 0.038095238095238126,
|
252 |
-
"acc_norm": 0.23809523809523808,
|
253 |
-
"acc_norm_stderr": 0.038095238095238126
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4444444444444444,
|
257 |
-
"acc_stderr": 0.02845263998508801,
|
258 |
-
"acc_norm": 0.4444444444444444,
|
259 |
-
"acc_norm_stderr": 0.02845263998508801
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.44,
|
263 |
-
"acc_stderr": 0.049888765156985884,
|
264 |
-
"acc_norm": 0.44,
|
265 |
-
"acc_norm_stderr": 0.049888765156985884
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5867768595041323,
|
269 |
-
"acc_stderr": 0.04495087843548408,
|
270 |
-
"acc_norm": 0.5867768595041323,
|
271 |
-
"acc_norm_stderr": 0.04495087843548408
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3815789473684211,
|
275 |
-
"acc_stderr": 0.03953173377749194,
|
276 |
-
"acc_norm": 0.3815789473684211,
|
277 |
-
"acc_norm_stderr": 0.03953173377749194
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.4477124183006536,
|
281 |
-
"acc_stderr": 0.020116925347422425,
|
282 |
-
"acc_norm": 0.4477124183006536,
|
283 |
-
"acc_norm_stderr": 0.020116925347422425
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3262411347517731,
|
287 |
-
"acc_stderr": 0.027968453043563168,
|
288 |
-
"acc_norm": 0.3262411347517731,
|
289 |
-
"acc_norm_stderr": 0.027968453043563168
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2857142857142857,
|
293 |
-
"acc_stderr": 0.04287858751340456,
|
294 |
-
"acc_norm": 0.2857142857142857,
|
295 |
-
"acc_norm_stderr": 0.04287858751340456
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2916666666666667,
|
299 |
-
"acc_stderr": 0.030998666304560524,
|
300 |
-
"acc_norm": 0.2916666666666667,
|
301 |
-
"acc_norm_stderr": 0.030998666304560524
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24022346368715083,
|
305 |
-
"acc_stderr": 0.014288343803925314,
|
306 |
-
"acc_norm": 0.24022346368715083,
|
307 |
-
"acc_norm_stderr": 0.014288343803925314
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.33,
|
311 |
-
"acc_stderr": 0.047258156262526045,
|
312 |
-
"acc_norm": 0.33,
|
313 |
-
"acc_norm_stderr": 0.047258156262526045
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.57,
|
317 |
-
"acc_stderr": 0.049756985195624284,
|
318 |
-
"acc_norm": 0.57,
|
319 |
-
"acc_norm_stderr": 0.049756985195624284
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.39338235294117646,
|
323 |
-
"acc_stderr": 0.029674288281311183,
|
324 |
-
"acc_norm": 0.39338235294117646,
|
325 |
-
"acc_norm_stderr": 0.029674288281311183
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4489795918367347,
|
329 |
-
"acc_stderr": 0.0318421386668758,
|
330 |
-
"acc_norm": 0.4489795918367347,
|
331 |
-
"acc_norm_stderr": 0.0318421386668758
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5949367088607594,
|
335 |
-
"acc_stderr": 0.031955147413706725,
|
336 |
-
"acc_norm": 0.5949367088607594,
|
337 |
-
"acc_norm_stderr": 0.031955147413706725
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3376792698826597,
|
341 |
-
"acc_stderr": 0.01207856377714556,
|
342 |
-
"acc_norm": 0.3376792698826597,
|
343 |
-
"acc_norm_stderr": 0.01207856377714556
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4215686274509804,
|
347 |
-
"acc_stderr": 0.03465868196380758,
|
348 |
-
"acc_norm": 0.4215686274509804,
|
349 |
-
"acc_norm_stderr": 0.03465868196380758
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5212121212121212,
|
353 |
-
"acc_stderr": 0.03900828913737301,
|
354 |
-
"acc_norm": 0.5212121212121212,
|
355 |
-
"acc_norm_stderr": 0.03900828913737301
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.423500611995104,
|
359 |
-
"mc1_stderr": 0.01729742144853472,
|
360 |
-
"mc2": 0.5817391312297906,
|
361 |
-
"mc2_stderr": 0.01670758742996612
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5430932703659976,
|
365 |
-
"acc_stderr": 0.017126389093086777,
|
366 |
-
"acc_norm": 0.5761511216056671,
|
367 |
-
"acc_norm_stderr": 0.01698981083462825
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TeamUNIVA/Komodo_7B_v1.0.1",
|
436 |
-
"model_sha": "f59d7d20552ffe38c8c7bf5c4b14b5c70d5ac820",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3054607508532423,
|
5 |
-
"acc_stderr": 0.013460080478002494,
|
6 |
-
"acc_norm": 0.3583617747440273,
|
7 |
-
"acc_norm_stderr": 0.014012883334859859
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3761202947619996,
|
11 |
-
"acc_stderr": 0.004834207964061325,
|
12 |
-
"acc_norm": 0.4910376419040032,
|
13 |
-
"acc_norm_stderr": 0.004988979750014442
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.28654970760233917,
|
17 |
-
"acc_stderr": 0.03467826685703826,
|
18 |
-
"acc_norm": 0.28654970760233917,
|
19 |
-
"acc_norm_stderr": 0.03467826685703826
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2524271844660194,
|
23 |
-
"acc_stderr": 0.04301250399690879,
|
24 |
-
"acc_norm": 0.2524271844660194,
|
25 |
-
"acc_norm_stderr": 0.04301250399690879
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3282247765006386,
|
29 |
-
"acc_stderr": 0.01679168564019289,
|
30 |
-
"acc_norm": 0.3282247765006386,
|
31 |
-
"acc_norm_stderr": 0.01679168564019289
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34074074074074073,
|
35 |
-
"acc_stderr": 0.04094376269996794,
|
36 |
-
"acc_norm": 0.34074074074074073,
|
37 |
-
"acc_norm_stderr": 0.04094376269996794
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.045126085985421255,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.045126085985421255
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.251063829787234,
|
47 |
-
"acc_stderr": 0.02834696377716245,
|
48 |
-
"acc_norm": 0.251063829787234,
|
49 |
-
"acc_norm_stderr": 0.02834696377716245
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.25903614457831325,
|
53 |
-
"acc_stderr": 0.034106466140718564,
|
54 |
-
"acc_norm": 0.25903614457831325,
|
55 |
-
"acc_norm_stderr": 0.034106466140718564
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3311897106109325,
|
59 |
-
"acc_stderr": 0.026730620728004917,
|
60 |
-
"acc_norm": 0.3311897106109325,
|
61 |
-
"acc_norm_stderr": 0.026730620728004917
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.29596412556053814,
|
65 |
-
"acc_stderr": 0.0306365913486998,
|
66 |
-
"acc_norm": 0.29596412556053814,
|
67 |
-
"acc_norm_stderr": 0.0306365913486998
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.24427480916030533,
|
71 |
-
"acc_stderr": 0.037683359597287434,
|
72 |
-
"acc_norm": 0.24427480916030533,
|
73 |
-
"acc_norm_stderr": 0.037683359597287434
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.27,
|
77 |
-
"acc_stderr": 0.044619604333847394,
|
78 |
-
"acc_norm": 0.27,
|
79 |
-
"acc_norm_stderr": 0.044619604333847394
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.24242424242424243,
|
83 |
-
"acc_stderr": 0.030532892233932036,
|
84 |
-
"acc_norm": 0.24242424242424243,
|
85 |
-
"acc_norm_stderr": 0.030532892233932036
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3448275862068966,
|
89 |
-
"acc_stderr": 0.03960933549451207,
|
90 |
-
"acc_norm": 0.3448275862068966,
|
91 |
-
"acc_norm_stderr": 0.03960933549451207
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.23529411764705882,
|
101 |
-
"acc_stderr": 0.02755361446786382,
|
102 |
-
"acc_norm": 0.23529411764705882,
|
103 |
-
"acc_norm_stderr": 0.02755361446786382
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2076923076923077,
|
107 |
-
"acc_stderr": 0.0205675395672468,
|
108 |
-
"acc_norm": 0.2076923076923077,
|
109 |
-
"acc_norm_stderr": 0.0205675395672468
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.37,
|
113 |
-
"acc_stderr": 0.04852365870939099,
|
114 |
-
"acc_norm": 0.37,
|
115 |
-
"acc_norm_stderr": 0.04852365870939099
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.21,
|
119 |
-
"acc_stderr": 0.04093601807403326,
|
120 |
-
"acc_norm": 0.21,
|
121 |
-
"acc_norm_stderr": 0.04093601807403326
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.3888888888888889,
|
125 |
-
"acc_stderr": 0.0471282125742677,
|
126 |
-
"acc_norm": 0.3888888888888889,
|
127 |
-
"acc_norm_stderr": 0.0471282125742677
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.32019704433497537,
|
131 |
-
"acc_stderr": 0.032826493853041504,
|
132 |
-
"acc_norm": 0.32019704433497537,
|
133 |
-
"acc_norm_stderr": 0.032826493853041504
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2645161290322581,
|
137 |
-
"acc_stderr": 0.025091892378859275,
|
138 |
-
"acc_norm": 0.2645161290322581,
|
139 |
-
"acc_norm_stderr": 0.025091892378859275
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.358974358974359,
|
143 |
-
"acc_stderr": 0.03142616993791925,
|
144 |
-
"acc_norm": 0.358974358974359,
|
145 |
-
"acc_norm_stderr": 0.03142616993791925
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.27169811320754716,
|
149 |
-
"acc_stderr": 0.027377706624670713,
|
150 |
-
"acc_norm": 0.27169811320754716,
|
151 |
-
"acc_norm_stderr": 0.027377706624670713
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.32727272727272727,
|
155 |
-
"acc_stderr": 0.04494290866252088,
|
156 |
-
"acc_norm": 0.32727272727272727,
|
157 |
-
"acc_norm_stderr": 0.04494290866252088
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26296296296296295,
|
161 |
-
"acc_stderr": 0.026842057873833706,
|
162 |
-
"acc_norm": 0.26296296296296295,
|
163 |
-
"acc_norm_stderr": 0.026842057873833706
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.26490066225165565,
|
167 |
-
"acc_stderr": 0.03603038545360384,
|
168 |
-
"acc_norm": 0.26490066225165565,
|
169 |
-
"acc_norm_stderr": 0.03603038545360384
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.373134328358209,
|
173 |
-
"acc_stderr": 0.03419832608176007,
|
174 |
-
"acc_norm": 0.373134328358209,
|
175 |
-
"acc_norm_stderr": 0.03419832608176007
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2658959537572254,
|
179 |
-
"acc_stderr": 0.033687629322594316,
|
180 |
-
"acc_norm": 0.2658959537572254,
|
181 |
-
"acc_norm_stderr": 0.033687629322594316
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2671957671957672,
|
185 |
-
"acc_stderr": 0.02278967314577657,
|
186 |
-
"acc_norm": 0.2671957671957672,
|
187 |
-
"acc_norm_stderr": 0.02278967314577657
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2222222222222222,
|
191 |
-
"acc_stderr": 0.03476590104304134,
|
192 |
-
"acc_norm": 0.2222222222222222,
|
193 |
-
"acc_norm_stderr": 0.03476590104304134
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.36,
|
203 |
-
"acc_stderr": 0.048241815132442176,
|
204 |
-
"acc_norm": 0.36,
|
205 |
-
"acc_norm_stderr": 0.048241815132442176
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2976878612716763,
|
209 |
-
"acc_stderr": 0.024617055388676992,
|
210 |
-
"acc_norm": 0.2976878612716763,
|
211 |
-
"acc_norm_stderr": 0.024617055388676992
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.25153374233128833,
|
215 |
-
"acc_stderr": 0.03408997886857529,
|
216 |
-
"acc_norm": 0.25153374233128833,
|
217 |
-
"acc_norm_stderr": 0.03408997886857529
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.30246913580246915,
|
221 |
-
"acc_stderr": 0.025557653981868052,
|
222 |
-
"acc_norm": 0.30246913580246915,
|
223 |
-
"acc_norm_stderr": 0.025557653981868052
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.2694300518134715,
|
233 |
-
"acc_stderr": 0.03201867122877795,
|
234 |
-
"acc_norm": 0.2694300518134715,
|
235 |
-
"acc_norm_stderr": 0.03201867122877795
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022056,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022056
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.26238532110091745,
|
245 |
-
"acc_stderr": 0.018861885021534738,
|
246 |
-
"acc_norm": 0.26238532110091745,
|
247 |
-
"acc_norm_stderr": 0.018861885021534738
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.1349206349206349,
|
251 |
-
"acc_stderr": 0.030557101589417515,
|
252 |
-
"acc_norm": 0.1349206349206349,
|
253 |
-
"acc_norm_stderr": 0.030557101589417515
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.25163398692810457,
|
257 |
-
"acc_stderr": 0.0248480182638752,
|
258 |
-
"acc_norm": 0.25163398692810457,
|
259 |
-
"acc_norm_stderr": 0.0248480182638752
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.26,
|
263 |
-
"acc_stderr": 0.04408440022768079,
|
264 |
-
"acc_norm": 0.26,
|
265 |
-
"acc_norm_stderr": 0.04408440022768079
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.4049586776859504,
|
269 |
-
"acc_stderr": 0.04481137755942469,
|
270 |
-
"acc_norm": 0.4049586776859504,
|
271 |
-
"acc_norm_stderr": 0.04481137755942469
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.2894736842105263,
|
275 |
-
"acc_stderr": 0.03690677986137283,
|
276 |
-
"acc_norm": 0.2894736842105263,
|
277 |
-
"acc_norm_stderr": 0.03690677986137283
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.28921568627450983,
|
281 |
-
"acc_stderr": 0.018342529845275908,
|
282 |
-
"acc_norm": 0.28921568627450983,
|
283 |
-
"acc_norm_stderr": 0.018342529845275908
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2695035460992908,
|
287 |
-
"acc_stderr": 0.026469036818590634,
|
288 |
-
"acc_norm": 0.2695035460992908,
|
289 |
-
"acc_norm_stderr": 0.026469036818590634
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.24107142857142858,
|
293 |
-
"acc_stderr": 0.04059867246952689,
|
294 |
-
"acc_norm": 0.24107142857142858,
|
295 |
-
"acc_norm_stderr": 0.04059867246952689
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3287037037037037,
|
299 |
-
"acc_stderr": 0.03203614084670058,
|
300 |
-
"acc_norm": 0.3287037037037037,
|
301 |
-
"acc_norm_stderr": 0.03203614084670058
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24134078212290502,
|
305 |
-
"acc_stderr": 0.014310999547961441,
|
306 |
-
"acc_norm": 0.24134078212290502,
|
307 |
-
"acc_norm_stderr": 0.014310999547961441
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.31,
|
317 |
-
"acc_stderr": 0.04648231987117316,
|
318 |
-
"acc_norm": 0.31,
|
319 |
-
"acc_norm_stderr": 0.04648231987117316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.23529411764705882,
|
323 |
-
"acc_stderr": 0.025767252010855963,
|
324 |
-
"acc_norm": 0.23529411764705882,
|
325 |
-
"acc_norm_stderr": 0.025767252010855963
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.22857142857142856,
|
329 |
-
"acc_stderr": 0.026882144922307744,
|
330 |
-
"acc_norm": 0.22857142857142856,
|
331 |
-
"acc_norm_stderr": 0.026882144922307744
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.3291139240506329,
|
335 |
-
"acc_stderr": 0.03058732629470236,
|
336 |
-
"acc_norm": 0.3291139240506329,
|
337 |
-
"acc_norm_stderr": 0.03058732629470236
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27835723598435463,
|
341 |
-
"acc_stderr": 0.011446990197380989,
|
342 |
-
"acc_norm": 0.27835723598435463,
|
343 |
-
"acc_norm_stderr": 0.011446990197380989
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2549019607843137,
|
347 |
-
"acc_stderr": 0.030587591351604246,
|
348 |
-
"acc_norm": 0.2549019607843137,
|
349 |
-
"acc_norm_stderr": 0.030587591351604246
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2545454545454545,
|
353 |
-
"acc_stderr": 0.0340150671524904,
|
354 |
-
"acc_norm": 0.2545454545454545,
|
355 |
-
"acc_norm_stderr": 0.0340150671524904
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2484700122399021,
|
359 |
-
"mc1_stderr": 0.0151274270965207,
|
360 |
-
"mc2": 0.3908977745790188,
|
361 |
-
"mc2_stderr": 0.014711493002685353
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3305785123966942,
|
365 |
-
"acc_stderr": 0.0161734232988457,
|
366 |
-
"acc_norm": 0.4604486422668241,
|
367 |
-
"acc_norm_stderr": 0.01713648762604985
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "The-matt/llama2_ko-7b_distinctive-snowflake-182_1060",
|
436 |
-
"model_sha": "090368cb655024491c0c4dad13f8ac9a8e7d31cc",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3165529010238908,
|
5 |
-
"acc_stderr": 0.013592431519068084,
|
6 |
-
"acc_norm": 0.3728668941979522,
|
7 |
-
"acc_norm_stderr": 0.014131176760131165
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.37532364070902213,
|
11 |
-
"acc_stderr": 0.004832167854501651,
|
12 |
-
"acc_norm": 0.48994224258115915,
|
13 |
-
"acc_norm_stderr": 0.004988771791854509
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.25146198830409355,
|
17 |
-
"acc_stderr": 0.033275044238468436,
|
18 |
-
"acc_norm": 0.25146198830409355,
|
19 |
-
"acc_norm_stderr": 0.033275044238468436
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2524271844660194,
|
23 |
-
"acc_stderr": 0.04301250399690878,
|
24 |
-
"acc_norm": 0.2524271844660194,
|
25 |
-
"acc_norm_stderr": 0.04301250399690878
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3333333333333333,
|
29 |
-
"acc_stderr": 0.01685739124747255,
|
30 |
-
"acc_norm": 0.3333333333333333,
|
31 |
-
"acc_norm_stderr": 0.01685739124747255
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2962962962962963,
|
35 |
-
"acc_stderr": 0.03944624162501116,
|
36 |
-
"acc_norm": 0.2962962962962963,
|
37 |
-
"acc_norm_stderr": 0.03944624162501116
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.26,
|
41 |
-
"acc_stderr": 0.04408440022768077,
|
42 |
-
"acc_norm": 0.26,
|
43 |
-
"acc_norm_stderr": 0.04408440022768077
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.31063829787234043,
|
47 |
-
"acc_stderr": 0.03025123757921317,
|
48 |
-
"acc_norm": 0.31063829787234043,
|
49 |
-
"acc_norm_stderr": 0.03025123757921317
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2891566265060241,
|
53 |
-
"acc_stderr": 0.03529486801511115,
|
54 |
-
"acc_norm": 0.2891566265060241,
|
55 |
-
"acc_norm_stderr": 0.03529486801511115
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3729903536977492,
|
59 |
-
"acc_stderr": 0.02746661021314012,
|
60 |
-
"acc_norm": 0.3729903536977492,
|
61 |
-
"acc_norm_stderr": 0.02746661021314012
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.273542600896861,
|
65 |
-
"acc_stderr": 0.029918586707798834,
|
66 |
-
"acc_norm": 0.273542600896861,
|
67 |
-
"acc_norm_stderr": 0.029918586707798834
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3435114503816794,
|
71 |
-
"acc_stderr": 0.041649760719448786,
|
72 |
-
"acc_norm": 0.3435114503816794,
|
73 |
-
"acc_norm_stderr": 0.041649760719448786
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.27,
|
77 |
-
"acc_stderr": 0.044619604333847394,
|
78 |
-
"acc_norm": 0.27,
|
79 |
-
"acc_norm_stderr": 0.044619604333847394
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3484848484848485,
|
83 |
-
"acc_stderr": 0.033948539651564025,
|
84 |
-
"acc_norm": 0.3484848484848485,
|
85 |
-
"acc_norm_stderr": 0.033948539651564025
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2896551724137931,
|
89 |
-
"acc_stderr": 0.03780019230438014,
|
90 |
-
"acc_norm": 0.2896551724137931,
|
91 |
-
"acc_norm_stderr": 0.03780019230438014
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.13725490196078433,
|
95 |
-
"acc_stderr": 0.0342408466989152,
|
96 |
-
"acc_norm": 0.13725490196078433,
|
97 |
-
"acc_norm_stderr": 0.0342408466989152
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.24789915966386555,
|
101 |
-
"acc_stderr": 0.028047967224176892,
|
102 |
-
"acc_norm": 0.24789915966386555,
|
103 |
-
"acc_norm_stderr": 0.028047967224176892
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.21794871794871795,
|
107 |
-
"acc_stderr": 0.020932445774463182,
|
108 |
-
"acc_norm": 0.21794871794871795,
|
109 |
-
"acc_norm_stderr": 0.020932445774463182
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.31,
|
113 |
-
"acc_stderr": 0.04648231987117316,
|
114 |
-
"acc_norm": 0.31,
|
115 |
-
"acc_norm_stderr": 0.04648231987117316
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.35,
|
119 |
-
"acc_stderr": 0.0479372485441102,
|
120 |
-
"acc_norm": 0.35,
|
121 |
-
"acc_norm_stderr": 0.0479372485441102
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25,
|
125 |
-
"acc_stderr": 0.04186091791394607,
|
126 |
-
"acc_norm": 0.25,
|
127 |
-
"acc_norm_stderr": 0.04186091791394607
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3103448275862069,
|
131 |
-
"acc_stderr": 0.03255086769970103,
|
132 |
-
"acc_norm": 0.3103448275862069,
|
133 |
-
"acc_norm_stderr": 0.03255086769970103
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2967741935483871,
|
137 |
-
"acc_stderr": 0.02598850079241188,
|
138 |
-
"acc_norm": 0.2967741935483871,
|
139 |
-
"acc_norm_stderr": 0.02598850079241188
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.3247863247863248,
|
143 |
-
"acc_stderr": 0.03067902276549883,
|
144 |
-
"acc_norm": 0.3247863247863248,
|
145 |
-
"acc_norm_stderr": 0.03067902276549883
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.35471698113207545,
|
149 |
-
"acc_stderr": 0.029445175328199593,
|
150 |
-
"acc_norm": 0.35471698113207545,
|
151 |
-
"acc_norm_stderr": 0.029445175328199593
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.33636363636363636,
|
155 |
-
"acc_stderr": 0.04525393596302505,
|
156 |
-
"acc_norm": 0.33636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04525393596302505
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2777777777777778,
|
161 |
-
"acc_stderr": 0.02730914058823018,
|
162 |
-
"acc_norm": 0.2777777777777778,
|
163 |
-
"acc_norm_stderr": 0.02730914058823018
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389024,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.25870646766169153,
|
173 |
-
"acc_stderr": 0.030965903123573033,
|
174 |
-
"acc_norm": 0.25870646766169153,
|
175 |
-
"acc_norm_stderr": 0.030965903123573033
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.24855491329479767,
|
179 |
-
"acc_stderr": 0.03295304696818318,
|
180 |
-
"acc_norm": 0.24855491329479767,
|
181 |
-
"acc_norm_stderr": 0.03295304696818318
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25132275132275134,
|
185 |
-
"acc_stderr": 0.022340482339643898,
|
186 |
-
"acc_norm": 0.25132275132275134,
|
187 |
-
"acc_norm_stderr": 0.022340482339643898
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.1875,
|
191 |
-
"acc_stderr": 0.032639560491693344,
|
192 |
-
"acc_norm": 0.1875,
|
193 |
-
"acc_norm_stderr": 0.032639560491693344
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.22,
|
197 |
-
"acc_stderr": 0.0416333199893227,
|
198 |
-
"acc_norm": 0.22,
|
199 |
-
"acc_norm_stderr": 0.0416333199893227
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.36,
|
203 |
-
"acc_stderr": 0.04824181513244218,
|
204 |
-
"acc_norm": 0.36,
|
205 |
-
"acc_norm_stderr": 0.04824181513244218
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.28901734104046245,
|
209 |
-
"acc_stderr": 0.02440517393578324,
|
210 |
-
"acc_norm": 0.28901734104046245,
|
211 |
-
"acc_norm_stderr": 0.02440517393578324
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.26380368098159507,
|
215 |
-
"acc_stderr": 0.034624199316156234,
|
216 |
-
"acc_norm": 0.26380368098159507,
|
217 |
-
"acc_norm_stderr": 0.034624199316156234
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.31790123456790126,
|
221 |
-
"acc_stderr": 0.025910063528240865,
|
222 |
-
"acc_norm": 0.31790123456790126,
|
223 |
-
"acc_norm_stderr": 0.025910063528240865
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.26,
|
227 |
-
"acc_stderr": 0.04408440022768078,
|
228 |
-
"acc_norm": 0.26,
|
229 |
-
"acc_norm_stderr": 0.04408440022768078
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.2694300518134715,
|
233 |
-
"acc_stderr": 0.03201867122877793,
|
234 |
-
"acc_norm": 0.2694300518134715,
|
235 |
-
"acc_norm_stderr": 0.03201867122877793
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2807017543859649,
|
239 |
-
"acc_stderr": 0.04227054451232199,
|
240 |
-
"acc_norm": 0.2807017543859649,
|
241 |
-
"acc_norm_stderr": 0.04227054451232199
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3100917431192661,
|
245 |
-
"acc_stderr": 0.01983084968443975,
|
246 |
-
"acc_norm": 0.3100917431192661,
|
247 |
-
"acc_norm_stderr": 0.01983084968443975
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.1984126984126984,
|
251 |
-
"acc_stderr": 0.03567016675276863,
|
252 |
-
"acc_norm": 0.1984126984126984,
|
253 |
-
"acc_norm_stderr": 0.03567016675276863
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.32679738562091504,
|
257 |
-
"acc_stderr": 0.026857294663281413,
|
258 |
-
"acc_norm": 0.32679738562091504,
|
259 |
-
"acc_norm_stderr": 0.026857294663281413
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.26,
|
263 |
-
"acc_stderr": 0.0440844002276808,
|
264 |
-
"acc_norm": 0.26,
|
265 |
-
"acc_norm_stderr": 0.0440844002276808
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.4214876033057851,
|
269 |
-
"acc_stderr": 0.045077322787750944,
|
270 |
-
"acc_norm": 0.4214876033057851,
|
271 |
-
"acc_norm_stderr": 0.045077322787750944
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.28289473684210525,
|
275 |
-
"acc_stderr": 0.03665349695640767,
|
276 |
-
"acc_norm": 0.28289473684210525,
|
277 |
-
"acc_norm_stderr": 0.03665349695640767
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.31209150326797386,
|
281 |
-
"acc_stderr": 0.01874501120127766,
|
282 |
-
"acc_norm": 0.31209150326797386,
|
283 |
-
"acc_norm_stderr": 0.01874501120127766
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.23404255319148937,
|
287 |
-
"acc_stderr": 0.025257861359432403,
|
288 |
-
"acc_norm": 0.23404255319148937,
|
289 |
-
"acc_norm_stderr": 0.025257861359432403
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.33035714285714285,
|
293 |
-
"acc_stderr": 0.04464285714285712,
|
294 |
-
"acc_norm": 0.33035714285714285,
|
295 |
-
"acc_norm_stderr": 0.04464285714285712
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.26851851851851855,
|
299 |
-
"acc_stderr": 0.030225226160012376,
|
300 |
-
"acc_norm": 0.26851851851851855,
|
301 |
-
"acc_norm_stderr": 0.030225226160012376
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.25,
|
311 |
-
"acc_stderr": 0.04351941398892446,
|
312 |
-
"acc_norm": 0.25,
|
313 |
-
"acc_norm_stderr": 0.04351941398892446
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.24,
|
317 |
-
"acc_stderr": 0.042923469599092816,
|
318 |
-
"acc_norm": 0.24,
|
319 |
-
"acc_norm_stderr": 0.042923469599092816
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.35661764705882354,
|
323 |
-
"acc_stderr": 0.02909720956841195,
|
324 |
-
"acc_norm": 0.35661764705882354,
|
325 |
-
"acc_norm_stderr": 0.02909720956841195
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.24489795918367346,
|
329 |
-
"acc_stderr": 0.027529637440174934,
|
330 |
-
"acc_norm": 0.24489795918367346,
|
331 |
-
"acc_norm_stderr": 0.027529637440174934
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.29535864978902954,
|
335 |
-
"acc_stderr": 0.029696338713422882,
|
336 |
-
"acc_norm": 0.29535864978902954,
|
337 |
-
"acc_norm_stderr": 0.029696338713422882
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2653194263363755,
|
341 |
-
"acc_stderr": 0.011276198843958876,
|
342 |
-
"acc_norm": 0.2653194263363755,
|
343 |
-
"acc_norm_stderr": 0.011276198843958876
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25,
|
347 |
-
"acc_stderr": 0.03039153369274154,
|
348 |
-
"acc_norm": 0.25,
|
349 |
-
"acc_norm_stderr": 0.03039153369274154
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.28484848484848485,
|
353 |
-
"acc_stderr": 0.03524390844511785,
|
354 |
-
"acc_norm": 0.28484848484848485,
|
355 |
-
"acc_norm_stderr": 0.03524390844511785
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.25458996328029376,
|
359 |
-
"mc1_stderr": 0.015250117079156475,
|
360 |
-
"mc2": 0.3966274374680779,
|
361 |
-
"mc2_stderr": 0.014846518193358589
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3695395513577332,
|
365 |
-
"acc_stderr": 0.01659488340568542,
|
366 |
-
"acc_norm": 0.51357733175915,
|
367 |
-
"acc_norm_stderr": 0.01718401506040145
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "The-matt/llama2_ko-7b_sandy-fire-170_1530",
|
436 |
-
"model_sha": "b963fcf8d7249c3f360ccfa5db70c0b20bddeb08",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.31569965870307165,
|
5 |
-
"acc_stderr": 0.013582571095815291,
|
6 |
-
"acc_norm": 0.36945392491467577,
|
7 |
-
"acc_norm_stderr": 0.014104578366491902
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3768173670583549,
|
11 |
-
"acc_stderr": 0.004835981632401594,
|
12 |
-
"acc_norm": 0.4987054371639116,
|
13 |
-
"acc_norm_stderr": 0.004989764686738838
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.2807017543859649,
|
17 |
-
"acc_stderr": 0.034462962170884265,
|
18 |
-
"acc_norm": 0.2807017543859649,
|
19 |
-
"acc_norm_stderr": 0.034462962170884265
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.27184466019417475,
|
23 |
-
"acc_stderr": 0.044052680241409216,
|
24 |
-
"acc_norm": 0.27184466019417475,
|
25 |
-
"acc_norm_stderr": 0.044052680241409216
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3128991060025543,
|
29 |
-
"acc_stderr": 0.016580935940304055,
|
30 |
-
"acc_norm": 0.3128991060025543,
|
31 |
-
"acc_norm_stderr": 0.016580935940304055
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3037037037037037,
|
35 |
-
"acc_stderr": 0.039725528847851375,
|
36 |
-
"acc_norm": 0.3037037037037037,
|
37 |
-
"acc_norm_stderr": 0.039725528847851375
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2425531914893617,
|
47 |
-
"acc_stderr": 0.02802022627120022,
|
48 |
-
"acc_norm": 0.2425531914893617,
|
49 |
-
"acc_norm_stderr": 0.02802022627120022
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.35542168674698793,
|
53 |
-
"acc_stderr": 0.03726214354322415,
|
54 |
-
"acc_norm": 0.35542168674698793,
|
55 |
-
"acc_norm_stderr": 0.03726214354322415
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3247588424437299,
|
59 |
-
"acc_stderr": 0.026596782287697046,
|
60 |
-
"acc_norm": 0.3247588424437299,
|
61 |
-
"acc_norm_stderr": 0.026596782287697046
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2914798206278027,
|
65 |
-
"acc_stderr": 0.030500283176545906,
|
66 |
-
"acc_norm": 0.2914798206278027,
|
67 |
-
"acc_norm_stderr": 0.030500283176545906
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3816793893129771,
|
71 |
-
"acc_stderr": 0.0426073515764456,
|
72 |
-
"acc_norm": 0.3816793893129771,
|
73 |
-
"acc_norm_stderr": 0.0426073515764456
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.25,
|
77 |
-
"acc_stderr": 0.04351941398892446,
|
78 |
-
"acc_norm": 0.25,
|
79 |
-
"acc_norm_stderr": 0.04351941398892446
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3838383838383838,
|
83 |
-
"acc_stderr": 0.03464881675016338,
|
84 |
-
"acc_norm": 0.3838383838383838,
|
85 |
-
"acc_norm_stderr": 0.03464881675016338
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.32413793103448274,
|
89 |
-
"acc_stderr": 0.03900432069185555,
|
90 |
-
"acc_norm": 0.32413793103448274,
|
91 |
-
"acc_norm_stderr": 0.03900432069185555
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.17647058823529413,
|
95 |
-
"acc_stderr": 0.03793281185307811,
|
96 |
-
"acc_norm": 0.17647058823529413,
|
97 |
-
"acc_norm_stderr": 0.03793281185307811
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.2605042016806723,
|
101 |
-
"acc_stderr": 0.028510251512341947,
|
102 |
-
"acc_norm": 0.2605042016806723,
|
103 |
-
"acc_norm_stderr": 0.028510251512341947
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.22564102564102564,
|
107 |
-
"acc_stderr": 0.021193632525148522,
|
108 |
-
"acc_norm": 0.22564102564102564,
|
109 |
-
"acc_norm_stderr": 0.021193632525148522
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.32,
|
113 |
-
"acc_stderr": 0.04688261722621505,
|
114 |
-
"acc_norm": 0.32,
|
115 |
-
"acc_norm_stderr": 0.04688261722621505
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25,
|
125 |
-
"acc_stderr": 0.04186091791394607,
|
126 |
-
"acc_norm": 0.25,
|
127 |
-
"acc_norm_stderr": 0.04186091791394607
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3251231527093596,
|
131 |
-
"acc_stderr": 0.032957975663112704,
|
132 |
-
"acc_norm": 0.3251231527093596,
|
133 |
-
"acc_norm_stderr": 0.032957975663112704
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3064516129032258,
|
137 |
-
"acc_stderr": 0.02622648565255388,
|
138 |
-
"acc_norm": 0.3064516129032258,
|
139 |
-
"acc_norm_stderr": 0.02622648565255388
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.3418803418803419,
|
143 |
-
"acc_stderr": 0.031075028526507755,
|
144 |
-
"acc_norm": 0.3418803418803419,
|
145 |
-
"acc_norm_stderr": 0.031075028526507755
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3320754716981132,
|
149 |
-
"acc_stderr": 0.028985455652334395,
|
150 |
-
"acc_norm": 0.3320754716981132,
|
151 |
-
"acc_norm_stderr": 0.028985455652334395
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.24545454545454545,
|
155 |
-
"acc_stderr": 0.04122066502878284,
|
156 |
-
"acc_norm": 0.24545454545454545,
|
157 |
-
"acc_norm_stderr": 0.04122066502878284
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.026719240783712166,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.026719240783712166
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.36318407960199006,
|
173 |
-
"acc_stderr": 0.034005985055990146,
|
174 |
-
"acc_norm": 0.36318407960199006,
|
175 |
-
"acc_norm_stderr": 0.034005985055990146
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.26011560693641617,
|
179 |
-
"acc_stderr": 0.03345036916788989,
|
180 |
-
"acc_norm": 0.26011560693641617,
|
181 |
-
"acc_norm_stderr": 0.03345036916788989
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25396825396825395,
|
185 |
-
"acc_stderr": 0.022418042891113946,
|
186 |
-
"acc_norm": 0.25396825396825395,
|
187 |
-
"acc_norm_stderr": 0.022418042891113946
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2569444444444444,
|
191 |
-
"acc_stderr": 0.03653946969442099,
|
192 |
-
"acc_norm": 0.2569444444444444,
|
193 |
-
"acc_norm_stderr": 0.03653946969442099
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.37,
|
197 |
-
"acc_stderr": 0.04852365870939099,
|
198 |
-
"acc_norm": 0.37,
|
199 |
-
"acc_norm_stderr": 0.04852365870939099
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.33,
|
203 |
-
"acc_stderr": 0.04725815626252603,
|
204 |
-
"acc_norm": 0.33,
|
205 |
-
"acc_norm_stderr": 0.04725815626252603
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2745664739884393,
|
209 |
-
"acc_stderr": 0.024027745155265016,
|
210 |
-
"acc_norm": 0.2745664739884393,
|
211 |
-
"acc_norm_stderr": 0.024027745155265016
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.26380368098159507,
|
215 |
-
"acc_stderr": 0.03462419931615625,
|
216 |
-
"acc_norm": 0.26380368098159507,
|
217 |
-
"acc_norm_stderr": 0.03462419931615625
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2654320987654321,
|
221 |
-
"acc_stderr": 0.024569223600460845,
|
222 |
-
"acc_norm": 0.2654320987654321,
|
223 |
-
"acc_norm_stderr": 0.024569223600460845
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.045604802157206845,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.045604802157206845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.3005181347150259,
|
233 |
-
"acc_stderr": 0.033088185944157494,
|
234 |
-
"acc_norm": 0.3005181347150259,
|
235 |
-
"acc_norm_stderr": 0.033088185944157494
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022056,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022056
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3467889908256881,
|
245 |
-
"acc_stderr": 0.020406097104093027,
|
246 |
-
"acc_norm": 0.3467889908256881,
|
247 |
-
"acc_norm_stderr": 0.020406097104093027
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.1746031746031746,
|
251 |
-
"acc_stderr": 0.033954900208561116,
|
252 |
-
"acc_norm": 0.1746031746031746,
|
253 |
-
"acc_norm_stderr": 0.033954900208561116
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.35947712418300654,
|
257 |
-
"acc_stderr": 0.027475969910660952,
|
258 |
-
"acc_norm": 0.35947712418300654,
|
259 |
-
"acc_norm_stderr": 0.027475969910660952
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.27,
|
263 |
-
"acc_stderr": 0.04461960433384741,
|
264 |
-
"acc_norm": 0.27,
|
265 |
-
"acc_norm_stderr": 0.04461960433384741
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.3884297520661157,
|
269 |
-
"acc_stderr": 0.044492703500683815,
|
270 |
-
"acc_norm": 0.3884297520661157,
|
271 |
-
"acc_norm_stderr": 0.044492703500683815
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3881578947368421,
|
275 |
-
"acc_stderr": 0.03965842097512744,
|
276 |
-
"acc_norm": 0.3881578947368421,
|
277 |
-
"acc_norm_stderr": 0.03965842097512744
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.23529411764705882,
|
281 |
-
"acc_stderr": 0.01716058723504635,
|
282 |
-
"acc_norm": 0.23529411764705882,
|
283 |
-
"acc_norm_stderr": 0.01716058723504635
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24822695035460993,
|
287 |
-
"acc_stderr": 0.025770015644290403,
|
288 |
-
"acc_norm": 0.24822695035460993,
|
289 |
-
"acc_norm_stderr": 0.025770015644290403
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.30357142857142855,
|
293 |
-
"acc_stderr": 0.04364226155841044,
|
294 |
-
"acc_norm": 0.30357142857142855,
|
295 |
-
"acc_norm_stderr": 0.04364226155841044
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3888888888888889,
|
299 |
-
"acc_stderr": 0.033247089118091176,
|
300 |
-
"acc_norm": 0.3888888888888889,
|
301 |
-
"acc_norm_stderr": 0.033247089118091176
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.21,
|
317 |
-
"acc_stderr": 0.040936018074033256,
|
318 |
-
"acc_norm": 0.21,
|
319 |
-
"acc_norm_stderr": 0.040936018074033256
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4264705882352941,
|
323 |
-
"acc_stderr": 0.03004261583271486,
|
324 |
-
"acc_norm": 0.4264705882352941,
|
325 |
-
"acc_norm_stderr": 0.03004261583271486
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.39591836734693875,
|
329 |
-
"acc_stderr": 0.03130802899065686,
|
330 |
-
"acc_norm": 0.39591836734693875,
|
331 |
-
"acc_norm_stderr": 0.03130802899065686
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.31223628691983124,
|
335 |
-
"acc_stderr": 0.03016513786784701,
|
336 |
-
"acc_norm": 0.31223628691983124,
|
337 |
-
"acc_norm_stderr": 0.03016513786784701
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2757496740547588,
|
341 |
-
"acc_stderr": 0.011413813609161005,
|
342 |
-
"acc_norm": 0.2757496740547588,
|
343 |
-
"acc_norm_stderr": 0.011413813609161005
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.29411764705882354,
|
347 |
-
"acc_stderr": 0.03198001660115073,
|
348 |
-
"acc_norm": 0.29411764705882354,
|
349 |
-
"acc_norm_stderr": 0.03198001660115073
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.30303030303030304,
|
353 |
-
"acc_stderr": 0.035886248000917075,
|
354 |
-
"acc_norm": 0.30303030303030304,
|
355 |
-
"acc_norm_stderr": 0.035886248000917075
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24969400244798043,
|
359 |
-
"mc1_stderr": 0.015152286907148125,
|
360 |
-
"mc2": 0.3905558403820087,
|
361 |
-
"mc2_stderr": 0.014722115029998253
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.33766233766233766,
|
365 |
-
"acc_stderr": 0.01625907578475496,
|
366 |
-
"acc_norm": 0.4639905548996458,
|
367 |
-
"acc_norm_stderr": 0.017145715365486654
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "The-matt/llama2_ko-7b_stilted-lion-205_1530",
|
436 |
-
"model_sha": "948480784c612e413d857c89d2a343b32c704498",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3191126279863481,
|
5 |
-
"acc_stderr": 0.013621696119173302,
|
6 |
-
"acc_norm": 0.37627986348122866,
|
7 |
-
"acc_norm_stderr": 0.014157022555407166
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.36138219478191597,
|
11 |
-
"acc_stderr": 0.004794191785967945,
|
12 |
-
"acc_norm": 0.46614220274845647,
|
13 |
-
"acc_norm_stderr": 0.004978328190775522
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.40350877192982454,
|
17 |
-
"acc_stderr": 0.03762738699917055,
|
18 |
-
"acc_norm": 0.40350877192982454,
|
19 |
-
"acc_norm_stderr": 0.03762738699917055
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5145631067961165,
|
23 |
-
"acc_stderr": 0.04948637324026637,
|
24 |
-
"acc_norm": 0.5145631067961165,
|
25 |
-
"acc_norm_stderr": 0.04948637324026637
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4789272030651341,
|
29 |
-
"acc_stderr": 0.0178640767862129,
|
30 |
-
"acc_norm": 0.4789272030651341,
|
31 |
-
"acc_norm_stderr": 0.0178640767862129
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37037037037037035,
|
35 |
-
"acc_stderr": 0.04171654161354543,
|
36 |
-
"acc_norm": 0.37037037037037035,
|
37 |
-
"acc_norm_stderr": 0.04171654161354543
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.35,
|
41 |
-
"acc_stderr": 0.0479372485441102,
|
42 |
-
"acc_norm": 0.35,
|
43 |
-
"acc_norm_stderr": 0.0479372485441102
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.39148936170212767,
|
47 |
-
"acc_stderr": 0.03190701242326812,
|
48 |
-
"acc_norm": 0.39148936170212767,
|
49 |
-
"acc_norm_stderr": 0.03190701242326812
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.39759036144578314,
|
53 |
-
"acc_stderr": 0.038099730845402184,
|
54 |
-
"acc_norm": 0.39759036144578314,
|
55 |
-
"acc_norm_stderr": 0.038099730845402184
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4437299035369775,
|
59 |
-
"acc_stderr": 0.02821768355665232,
|
60 |
-
"acc_norm": 0.4437299035369775,
|
61 |
-
"acc_norm_stderr": 0.02821768355665232
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3721973094170404,
|
65 |
-
"acc_stderr": 0.03244305283008731,
|
66 |
-
"acc_norm": 0.3721973094170404,
|
67 |
-
"acc_norm_stderr": 0.03244305283008731
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5419847328244275,
|
71 |
-
"acc_stderr": 0.04369802690578756,
|
72 |
-
"acc_norm": 0.5419847328244275,
|
73 |
-
"acc_norm_stderr": 0.04369802690578756
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.34,
|
77 |
-
"acc_stderr": 0.04760952285695236,
|
78 |
-
"acc_norm": 0.34,
|
79 |
-
"acc_norm_stderr": 0.04760952285695236
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.51010101010101,
|
83 |
-
"acc_stderr": 0.035616254886737454,
|
84 |
-
"acc_norm": 0.51010101010101,
|
85 |
-
"acc_norm_stderr": 0.035616254886737454
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4413793103448276,
|
89 |
-
"acc_stderr": 0.04137931034482758,
|
90 |
-
"acc_norm": 0.4413793103448276,
|
91 |
-
"acc_norm_stderr": 0.04137931034482758
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.453781512605042,
|
101 |
-
"acc_stderr": 0.03233943468182088,
|
102 |
-
"acc_norm": 0.453781512605042,
|
103 |
-
"acc_norm_stderr": 0.03233943468182088
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3974358974358974,
|
107 |
-
"acc_stderr": 0.024811920017903836,
|
108 |
-
"acc_norm": 0.3974358974358974,
|
109 |
-
"acc_norm_stderr": 0.024811920017903836
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.47,
|
113 |
-
"acc_stderr": 0.050161355804659205,
|
114 |
-
"acc_norm": 0.47,
|
115 |
-
"acc_norm_stderr": 0.050161355804659205
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.32,
|
119 |
-
"acc_stderr": 0.04688261722621504,
|
120 |
-
"acc_norm": 0.32,
|
121 |
-
"acc_norm_stderr": 0.04688261722621504
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5185185185185185,
|
125 |
-
"acc_stderr": 0.04830366024635331,
|
126 |
-
"acc_norm": 0.5185185185185185,
|
127 |
-
"acc_norm_stderr": 0.04830366024635331
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.35467980295566504,
|
131 |
-
"acc_stderr": 0.03366124489051449,
|
132 |
-
"acc_norm": 0.35467980295566504,
|
133 |
-
"acc_norm_stderr": 0.03366124489051449
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.47419354838709676,
|
137 |
-
"acc_stderr": 0.028406095057653315,
|
138 |
-
"acc_norm": 0.47419354838709676,
|
139 |
-
"acc_norm_stderr": 0.028406095057653315
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5897435897435898,
|
143 |
-
"acc_stderr": 0.03222414045241107,
|
144 |
-
"acc_norm": 0.5897435897435898,
|
145 |
-
"acc_norm_stderr": 0.03222414045241107
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3849056603773585,
|
149 |
-
"acc_stderr": 0.029946498567699945,
|
150 |
-
"acc_norm": 0.3849056603773585,
|
151 |
-
"acc_norm_stderr": 0.029946498567699945
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4818181818181818,
|
155 |
-
"acc_stderr": 0.04785964010794916,
|
156 |
-
"acc_norm": 0.4818181818181818,
|
157 |
-
"acc_norm_stderr": 0.04785964010794916
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.23703703703703705,
|
161 |
-
"acc_stderr": 0.02592887613276611,
|
162 |
-
"acc_norm": 0.23703703703703705,
|
163 |
-
"acc_norm_stderr": 0.02592887613276611
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5422885572139303,
|
173 |
-
"acc_stderr": 0.035228658640995975,
|
174 |
-
"acc_norm": 0.5422885572139303,
|
175 |
-
"acc_norm_stderr": 0.035228658640995975
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.37572254335260113,
|
179 |
-
"acc_stderr": 0.036928207672648664,
|
180 |
-
"acc_norm": 0.37572254335260113,
|
181 |
-
"acc_norm_stderr": 0.036928207672648664
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2830687830687831,
|
185 |
-
"acc_stderr": 0.023201392938194978,
|
186 |
-
"acc_norm": 0.2830687830687831,
|
187 |
-
"acc_norm_stderr": 0.023201392938194978
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2986111111111111,
|
191 |
-
"acc_stderr": 0.03827052357950756,
|
192 |
-
"acc_norm": 0.2986111111111111,
|
193 |
-
"acc_norm_stderr": 0.03827052357950756
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.37,
|
197 |
-
"acc_stderr": 0.04852365870939099,
|
198 |
-
"acc_norm": 0.37,
|
199 |
-
"acc_norm_stderr": 0.04852365870939099
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.54,
|
203 |
-
"acc_stderr": 0.05009082659620333,
|
204 |
-
"acc_norm": 0.54,
|
205 |
-
"acc_norm_stderr": 0.05009082659620333
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4595375722543353,
|
209 |
-
"acc_stderr": 0.02683080599895224,
|
210 |
-
"acc_norm": 0.4595375722543353,
|
211 |
-
"acc_norm_stderr": 0.02683080599895224
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3619631901840491,
|
215 |
-
"acc_stderr": 0.037757007291414416,
|
216 |
-
"acc_norm": 0.3619631901840491,
|
217 |
-
"acc_norm_stderr": 0.037757007291414416
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4012345679012346,
|
221 |
-
"acc_stderr": 0.0272725828498398,
|
222 |
-
"acc_norm": 0.4012345679012346,
|
223 |
-
"acc_norm_stderr": 0.0272725828498398
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.32,
|
227 |
-
"acc_stderr": 0.04688261722621504,
|
228 |
-
"acc_norm": 0.32,
|
229 |
-
"acc_norm_stderr": 0.04688261722621504
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.49740932642487046,
|
233 |
-
"acc_stderr": 0.03608390745384486,
|
234 |
-
"acc_norm": 0.49740932642487046,
|
235 |
-
"acc_norm_stderr": 0.03608390745384486
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.3333333333333333,
|
239 |
-
"acc_stderr": 0.04434600701584926,
|
240 |
-
"acc_norm": 0.3333333333333333,
|
241 |
-
"acc_norm_stderr": 0.04434600701584926
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.44770642201834865,
|
245 |
-
"acc_stderr": 0.021319754962425462,
|
246 |
-
"acc_norm": 0.44770642201834865,
|
247 |
-
"acc_norm_stderr": 0.021319754962425462
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.373015873015873,
|
251 |
-
"acc_stderr": 0.04325506042017086,
|
252 |
-
"acc_norm": 0.373015873015873,
|
253 |
-
"acc_norm_stderr": 0.04325506042017086
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4411764705882353,
|
257 |
-
"acc_stderr": 0.028431095444176643,
|
258 |
-
"acc_norm": 0.4411764705882353,
|
259 |
-
"acc_norm_stderr": 0.028431095444176643
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.38,
|
263 |
-
"acc_stderr": 0.04878317312145633,
|
264 |
-
"acc_norm": 0.38,
|
265 |
-
"acc_norm_stderr": 0.04878317312145633
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5702479338842975,
|
269 |
-
"acc_stderr": 0.04519082021319774,
|
270 |
-
"acc_norm": 0.5702479338842975,
|
271 |
-
"acc_norm_stderr": 0.04519082021319774
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.48026315789473684,
|
275 |
-
"acc_stderr": 0.040657710025626036,
|
276 |
-
"acc_norm": 0.48026315789473684,
|
277 |
-
"acc_norm_stderr": 0.040657710025626036
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.31699346405228757,
|
281 |
-
"acc_stderr": 0.018824219512706207,
|
282 |
-
"acc_norm": 0.31699346405228757,
|
283 |
-
"acc_norm_stderr": 0.018824219512706207
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.30851063829787234,
|
287 |
-
"acc_stderr": 0.027553366165101373,
|
288 |
-
"acc_norm": 0.30851063829787234,
|
289 |
-
"acc_norm_stderr": 0.027553366165101373
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.22321428571428573,
|
293 |
-
"acc_stderr": 0.039523019677025116,
|
294 |
-
"acc_norm": 0.22321428571428573,
|
295 |
-
"acc_norm_stderr": 0.039523019677025116
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3888888888888889,
|
299 |
-
"acc_stderr": 0.03324708911809117,
|
300 |
-
"acc_norm": 0.3888888888888889,
|
301 |
-
"acc_norm_stderr": 0.03324708911809117
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.34,
|
311 |
-
"acc_stderr": 0.04760952285695235,
|
312 |
-
"acc_norm": 0.34,
|
313 |
-
"acc_norm_stderr": 0.04760952285695235
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.37,
|
317 |
-
"acc_stderr": 0.04852365870939099,
|
318 |
-
"acc_norm": 0.37,
|
319 |
-
"acc_norm_stderr": 0.04852365870939099
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.375,
|
323 |
-
"acc_stderr": 0.029408372932278746,
|
324 |
-
"acc_norm": 0.375,
|
325 |
-
"acc_norm_stderr": 0.029408372932278746
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.43673469387755104,
|
329 |
-
"acc_stderr": 0.031751952375833226,
|
330 |
-
"acc_norm": 0.43673469387755104,
|
331 |
-
"acc_norm_stderr": 0.031751952375833226
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.39662447257383965,
|
335 |
-
"acc_stderr": 0.03184399873811226,
|
336 |
-
"acc_norm": 0.39662447257383965,
|
337 |
-
"acc_norm_stderr": 0.03184399873811226
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.31681877444589307,
|
341 |
-
"acc_stderr": 0.011882349954723015,
|
342 |
-
"acc_norm": 0.31681877444589307,
|
343 |
-
"acc_norm_stderr": 0.011882349954723015
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4019607843137255,
|
347 |
-
"acc_stderr": 0.03441190023482466,
|
348 |
-
"acc_norm": 0.4019607843137255,
|
349 |
-
"acc_norm_stderr": 0.03441190023482466
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.42424242424242425,
|
353 |
-
"acc_stderr": 0.038592681420702615,
|
354 |
-
"acc_norm": 0.42424242424242425,
|
355 |
-
"acc_norm_stderr": 0.038592681420702615
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24357405140758873,
|
359 |
-
"mc1_stderr": 0.015026354824910782,
|
360 |
-
"mc2": 0.41395274449910313,
|
361 |
-
"mc2_stderr": 0.015033140507060082
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3825265643447462,
|
365 |
-
"acc_stderr": 0.016709165387228806,
|
366 |
-
"acc_norm": 0.4781582054309327,
|
367 |
-
"acc_norm_stderr": 0.017173944474294378
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "TheBloke/Llama-2-13B-fp16",
|
436 |
-
"model_sha": "b2e65e8ad4bb35e5abaee0170ebd5fc2134a50bb",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|