MaziyarPanahi commited on
Commit
8fe6a22
1 Parent(s): db8fde5

Update README.md (#1)

Browse files

- Update README.md (cc29e273d00bd9e6f218ce2cf9e899f0ebf48a6e)

Files changed (1) hide show
  1. README.md +384 -0
README.md CHANGED
@@ -11,3 +11,387 @@ tags:
11
  Merge of top 7B models with SLERP method.
12
 
13
  > mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  Merge of top 7B models with SLERP method.
12
 
13
  > mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.
14
+ >
15
+ > ## Eval
16
+ > ```python
17
+ > {
18
+ "all": {
19
+ "acc": 0.6545868511485138,
20
+ "acc_stderr": 0.031980293841566164,
21
+ "acc_norm": 0.6542757501692061,
22
+ "acc_norm_stderr": 0.03263807517879597,
23
+ "mc1": 0.45165238678090575,
24
+ "mc1_stderr": 0.017421480300277643,
25
+ "mc2": 0.6217500644350165,
26
+ "mc2_stderr": 0.015583825644663436
27
+ },
28
+ "harness|arc:challenge|25": {
29
+ "acc": 0.6723549488054608,
30
+ "acc_stderr": 0.01371584794071934,
31
+ "acc_norm": 0.6945392491467577,
32
+ "acc_norm_stderr": 0.01346008047800251
33
+ },
34
+ "harness|hellaswag|10": {
35
+ "acc": 0.7046405098585939,
36
+ "acc_stderr": 0.0045527183605131,
37
+ "acc_norm": 0.871539533957379,
38
+ "acc_norm_stderr": 0.0033391798350182853
39
+ },
40
+ "harness|hendrycksTest-abstract_algebra|5": {
41
+ "acc": 0.37,
42
+ "acc_stderr": 0.048523658709391,
43
+ "acc_norm": 0.37,
44
+ "acc_norm_stderr": 0.048523658709391
45
+ },
46
+ "harness|hendrycksTest-anatomy|5": {
47
+ "acc": 0.6148148148148148,
48
+ "acc_stderr": 0.04203921040156279,
49
+ "acc_norm": 0.6148148148148148,
50
+ "acc_norm_stderr": 0.04203921040156279
51
+ },
52
+ "harness|hendrycksTest-astronomy|5": {
53
+ "acc": 0.6907894736842105,
54
+ "acc_stderr": 0.037610708698674805,
55
+ "acc_norm": 0.6907894736842105,
56
+ "acc_norm_stderr": 0.037610708698674805
57
+ },
58
+ "harness|hendrycksTest-business_ethics|5": {
59
+ "acc": 0.63,
60
+ "acc_stderr": 0.04852365870939099,
61
+ "acc_norm": 0.63,
62
+ "acc_norm_stderr": 0.04852365870939099
63
+ },
64
+ "harness|hendrycksTest-clinical_knowledge|5": {
65
+ "acc": 0.7132075471698113,
66
+ "acc_stderr": 0.02783491252754407,
67
+ "acc_norm": 0.7132075471698113,
68
+ "acc_norm_stderr": 0.02783491252754407
69
+ },
70
+ "harness|hendrycksTest-college_biology|5": {
71
+ "acc": 0.7638888888888888,
72
+ "acc_stderr": 0.03551446610810826,
73
+ "acc_norm": 0.7638888888888888,
74
+ "acc_norm_stderr": 0.03551446610810826
75
+ },
76
+ "harness|hendrycksTest-college_chemistry|5": {
77
+ "acc": 0.47,
78
+ "acc_stderr": 0.050161355804659205,
79
+ "acc_norm": 0.47,
80
+ "acc_norm_stderr": 0.050161355804659205
81
+ },
82
+ "harness|hendrycksTest-college_computer_science|5": {
83
+ "acc": 0.57,
84
+ "acc_stderr": 0.04975698519562428,
85
+ "acc_norm": 0.57,
86
+ "acc_norm_stderr": 0.04975698519562428
87
+ },
88
+ "harness|hendrycksTest-college_mathematics|5": {
89
+ "acc": 0.27,
90
+ "acc_stderr": 0.0446196043338474,
91
+ "acc_norm": 0.27,
92
+ "acc_norm_stderr": 0.0446196043338474
93
+ },
94
+ "harness|hendrycksTest-college_medicine|5": {
95
+ "acc": 0.6820809248554913,
96
+ "acc_stderr": 0.0355068398916558,
97
+ "acc_norm": 0.6820809248554913,
98
+ "acc_norm_stderr": 0.0355068398916558
99
+ },
100
+ "harness|hendrycksTest-college_physics|5": {
101
+ "acc": 0.4019607843137255,
102
+ "acc_stderr": 0.04878608714466996,
103
+ "acc_norm": 0.4019607843137255,
104
+ "acc_norm_stderr": 0.04878608714466996
105
+ },
106
+ "harness|hendrycksTest-computer_security|5": {
107
+ "acc": 0.77,
108
+ "acc_stderr": 0.04229525846816506,
109
+ "acc_norm": 0.77,
110
+ "acc_norm_stderr": 0.04229525846816506
111
+ },
112
+ "harness|hendrycksTest-conceptual_physics|5": {
113
+ "acc": 0.5829787234042553,
114
+ "acc_stderr": 0.03223276266711712,
115
+ "acc_norm": 0.5829787234042553,
116
+ "acc_norm_stderr": 0.03223276266711712
117
+ },
118
+ "harness|hendrycksTest-econometrics|5": {
119
+ "acc": 0.5175438596491229,
120
+ "acc_stderr": 0.04700708033551038,
121
+ "acc_norm": 0.5175438596491229,
122
+ "acc_norm_stderr": 0.04700708033551038
123
+ },
124
+ "harness|hendrycksTest-electrical_engineering|5": {
125
+ "acc": 0.5586206896551724,
126
+ "acc_stderr": 0.04137931034482757,
127
+ "acc_norm": 0.5586206896551724,
128
+ "acc_norm_stderr": 0.04137931034482757
129
+ },
130
+ "harness|hendrycksTest-elementary_mathematics|5": {
131
+ "acc": 0.41798941798941797,
132
+ "acc_stderr": 0.025402555503260912,
133
+ "acc_norm": 0.41798941798941797,
134
+ "acc_norm_stderr": 0.025402555503260912
135
+ },
136
+ "harness|hendrycksTest-formal_logic|5": {
137
+ "acc": 0.47619047619047616,
138
+ "acc_stderr": 0.04467062628403273,
139
+ "acc_norm": 0.47619047619047616,
140
+ "acc_norm_stderr": 0.04467062628403273
141
+ },
142
+ "harness|hendrycksTest-global_facts|5": {
143
+ "acc": 0.34,
144
+ "acc_stderr": 0.04760952285695235,
145
+ "acc_norm": 0.34,
146
+ "acc_norm_stderr": 0.04760952285695235
147
+ },
148
+ "harness|hendrycksTest-high_school_biology|5": {
149
+ "acc": 0.7903225806451613,
150
+ "acc_stderr": 0.023157879349083522,
151
+ "acc_norm": 0.7903225806451613,
152
+ "acc_norm_stderr": 0.023157879349083522
153
+ },
154
+ "harness|hendrycksTest-high_school_chemistry|5": {
155
+ "acc": 0.4975369458128079,
156
+ "acc_stderr": 0.03517945038691063,
157
+ "acc_norm": 0.4975369458128079,
158
+ "acc_norm_stderr": 0.03517945038691063
159
+ },
160
+ "harness|hendrycksTest-high_school_computer_science|5": {
161
+ "acc": 0.68,
162
+ "acc_stderr": 0.04688261722621505,
163
+ "acc_norm": 0.68,
164
+ "acc_norm_stderr": 0.04688261722621505
165
+ },
166
+ "harness|hendrycksTest-high_school_european_history|5": {
167
+ "acc": 0.7696969696969697,
168
+ "acc_stderr": 0.0328766675860349,
169
+ "acc_norm": 0.7696969696969697,
170
+ "acc_norm_stderr": 0.0328766675860349
171
+ },
172
+ "harness|hendrycksTest-high_school_geography|5": {
173
+ "acc": 0.7828282828282829,
174
+ "acc_stderr": 0.029376616484945633,
175
+ "acc_norm": 0.7828282828282829,
176
+ "acc_norm_stderr": 0.029376616484945633
177
+ },
178
+ "harness|hendrycksTest-high_school_government_and_politics|5": {
179
+ "acc": 0.9015544041450777,
180
+ "acc_stderr": 0.021500249576033456,
181
+ "acc_norm": 0.9015544041450777,
182
+ "acc_norm_stderr": 0.021500249576033456
183
+ },
184
+ "harness|hendrycksTest-high_school_macroeconomics|5": {
185
+ "acc": 0.6717948717948717,
186
+ "acc_stderr": 0.023807633198657266,
187
+ "acc_norm": 0.6717948717948717,
188
+ "acc_norm_stderr": 0.023807633198657266
189
+ },
190
+ "harness|hendrycksTest-high_school_mathematics|5": {
191
+ "acc": 0.34444444444444444,
192
+ "acc_stderr": 0.02897264888484427,
193
+ "acc_norm": 0.34444444444444444,
194
+ "acc_norm_stderr": 0.02897264888484427
195
+ },
196
+ "harness|hendrycksTest-high_school_microeconomics|5": {
197
+ "acc": 0.6638655462184874,
198
+ "acc_stderr": 0.030684737115135363,
199
+ "acc_norm": 0.6638655462184874,
200
+ "acc_norm_stderr": 0.030684737115135363
201
+ },
202
+ "harness|hendrycksTest-high_school_physics|5": {
203
+ "acc": 0.304635761589404,
204
+ "acc_stderr": 0.03757949922943343,
205
+ "acc_norm": 0.304635761589404,
206
+ "acc_norm_stderr": 0.03757949922943343
207
+ },
208
+ "harness|hendrycksTest-high_school_psychology|5": {
209
+ "acc": 0.8458715596330275,
210
+ "acc_stderr": 0.015480826865374303,
211
+ "acc_norm": 0.8458715596330275,
212
+ "acc_norm_stderr": 0.015480826865374303
213
+ },
214
+ "harness|hendrycksTest-high_school_statistics|5": {
215
+ "acc": 0.5185185185185185,
216
+ "acc_stderr": 0.03407632093854051,
217
+ "acc_norm": 0.5185185185185185,
218
+ "acc_norm_stderr": 0.03407632093854051
219
+ },
220
+ "harness|hendrycksTest-high_school_us_history|5": {
221
+ "acc": 0.8382352941176471,
222
+ "acc_stderr": 0.025845017986926917,
223
+ "acc_norm": 0.8382352941176471,
224
+ "acc_norm_stderr": 0.025845017986926917
225
+ },
226
+ "harness|hendrycksTest-high_school_world_history|5": {
227
+ "acc": 0.810126582278481,
228
+ "acc_stderr": 0.02553010046023349,
229
+ "acc_norm": 0.810126582278481,
230
+ "acc_norm_stderr": 0.02553010046023349
231
+ },
232
+ "harness|hendrycksTest-human_aging|5": {
233
+ "acc": 0.6905829596412556,
234
+ "acc_stderr": 0.03102441174057221,
235
+ "acc_norm": 0.6905829596412556,
236
+ "acc_norm_stderr": 0.03102441174057221
237
+ },
238
+ "harness|hendrycksTest-human_sexuality|5": {
239
+ "acc": 0.7786259541984732,
240
+ "acc_stderr": 0.036412970813137296,
241
+ "acc_norm": 0.7786259541984732,
242
+ "acc_norm_stderr": 0.036412970813137296
243
+ },
244
+ "harness|hendrycksTest-international_law|5": {
245
+ "acc": 0.8099173553719008,
246
+ "acc_stderr": 0.03581796951709282,
247
+ "acc_norm": 0.8099173553719008,
248
+ "acc_norm_stderr": 0.03581796951709282
249
+ },
250
+ "harness|hendrycksTest-jurisprudence|5": {
251
+ "acc": 0.7685185185185185,
252
+ "acc_stderr": 0.04077494709252627,
253
+ "acc_norm": 0.7685185185185185,
254
+ "acc_norm_stderr": 0.04077494709252627
255
+ },
256
+ "harness|hendrycksTest-logical_fallacies|5": {
257
+ "acc": 0.7607361963190185,
258
+ "acc_stderr": 0.0335195387952127,
259
+ "acc_norm": 0.7607361963190185,
260
+ "acc_norm_stderr": 0.0335195387952127
261
+ },
262
+ "harness|hendrycksTest-machine_learning|5": {
263
+ "acc": 0.45535714285714285,
264
+ "acc_stderr": 0.047268355537191,
265
+ "acc_norm": 0.45535714285714285,
266
+ "acc_norm_stderr": 0.047268355537191
267
+ },
268
+ "harness|hendrycksTest-management|5": {
269
+ "acc": 0.8058252427184466,
270
+ "acc_stderr": 0.03916667762822584,
271
+ "acc_norm": 0.8058252427184466,
272
+ "acc_norm_stderr": 0.03916667762822584
273
+ },
274
+ "harness|hendrycksTest-marketing|5": {
275
+ "acc": 0.8675213675213675,
276
+ "acc_stderr": 0.022209309073165612,
277
+ "acc_norm": 0.8675213675213675,
278
+ "acc_norm_stderr": 0.022209309073165612
279
+ },
280
+ "harness|hendrycksTest-medical_genetics|5": {
281
+ "acc": 0.71,
282
+ "acc_stderr": 0.045604802157206845,
283
+ "acc_norm": 0.71,
284
+ "acc_norm_stderr": 0.045604802157206845
285
+ },
286
+ "harness|hendrycksTest-miscellaneous|5": {
287
+ "acc": 0.8352490421455939,
288
+ "acc_stderr": 0.013265346261323788,
289
+ "acc_norm": 0.8352490421455939,
290
+ "acc_norm_stderr": 0.013265346261323788
291
+ },
292
+ "harness|hendrycksTest-moral_disputes|5": {
293
+ "acc": 0.7543352601156069,
294
+ "acc_stderr": 0.023176298203992005,
295
+ "acc_norm": 0.7543352601156069,
296
+ "acc_norm_stderr": 0.023176298203992005
297
+ },
298
+ "harness|hendrycksTest-moral_scenarios|5": {
299
+ "acc": 0.4547486033519553,
300
+ "acc_stderr": 0.016653875777524006,
301
+ "acc_norm": 0.4547486033519553,
302
+ "acc_norm_stderr": 0.016653875777524006
303
+ },
304
+ "harness|hendrycksTest-nutrition|5": {
305
+ "acc": 0.7483660130718954,
306
+ "acc_stderr": 0.0248480182638752,
307
+ "acc_norm": 0.7483660130718954,
308
+ "acc_norm_stderr": 0.0248480182638752
309
+ },
310
+ "harness|hendrycksTest-philosophy|5": {
311
+ "acc": 0.7202572347266881,
312
+ "acc_stderr": 0.02549425935069491,
313
+ "acc_norm": 0.7202572347266881,
314
+ "acc_norm_stderr": 0.02549425935069491
315
+ },
316
+ "harness|hendrycksTest-prehistory|5": {
317
+ "acc": 0.7592592592592593,
318
+ "acc_stderr": 0.02378858355165854,
319
+ "acc_norm": 0.7592592592592593,
320
+ "acc_norm_stderr": 0.02378858355165854
321
+ },
322
+ "harness|hendrycksTest-professional_accounting|5": {
323
+ "acc": 0.4787234042553192,
324
+ "acc_stderr": 0.029800481645628693,
325
+ "acc_norm": 0.4787234042553192,
326
+ "acc_norm_stderr": 0.029800481645628693
327
+ },
328
+ "harness|hendrycksTest-professional_law|5": {
329
+ "acc": 0.4745762711864407,
330
+ "acc_stderr": 0.012753716929101008,
331
+ "acc_norm": 0.4745762711864407,
332
+ "acc_norm_stderr": 0.012753716929101008
333
+ },
334
+ "harness|hendrycksTest-professional_medicine|5": {
335
+ "acc": 0.7095588235294118,
336
+ "acc_stderr": 0.027576468622740536,
337
+ "acc_norm": 0.7095588235294118,
338
+ "acc_norm_stderr": 0.027576468622740536
339
+ },
340
+ "harness|hendrycksTest-professional_psychology|5": {
341
+ "acc": 0.6928104575163399,
342
+ "acc_stderr": 0.01866335967146367,
343
+ "acc_norm": 0.6928104575163399,
344
+ "acc_norm_stderr": 0.01866335967146367
345
+ },
346
+ "harness|hendrycksTest-public_relations|5": {
347
+ "acc": 0.6727272727272727,
348
+ "acc_stderr": 0.0449429086625209,
349
+ "acc_norm": 0.6727272727272727,
350
+ "acc_norm_stderr": 0.0449429086625209
351
+ },
352
+ "harness|hendrycksTest-security_studies|5": {
353
+ "acc": 0.7387755102040816,
354
+ "acc_stderr": 0.02812342933514278,
355
+ "acc_norm": 0.7387755102040816,
356
+ "acc_norm_stderr": 0.02812342933514278
357
+ },
358
+ "harness|hendrycksTest-sociology|5": {
359
+ "acc": 0.845771144278607,
360
+ "acc_stderr": 0.025538433368578337,
361
+ "acc_norm": 0.845771144278607,
362
+ "acc_norm_stderr": 0.025538433368578337
363
+ },
364
+ "harness|hendrycksTest-us_foreign_policy|5": {
365
+ "acc": 0.86,
366
+ "acc_stderr": 0.0348735088019777,
367
+ "acc_norm": 0.86,
368
+ "acc_norm_stderr": 0.0348735088019777
369
+ },
370
+ "harness|hendrycksTest-virology|5": {
371
+ "acc": 0.5481927710843374,
372
+ "acc_stderr": 0.03874371556587953,
373
+ "acc_norm": 0.5481927710843374,
374
+ "acc_norm_stderr": 0.03874371556587953
375
+ },
376
+ "harness|hendrycksTest-world_religions|5": {
377
+ "acc": 0.8362573099415205,
378
+ "acc_stderr": 0.028380919596145866,
379
+ "acc_norm": 0.8362573099415205,
380
+ "acc_norm_stderr": 0.028380919596145866
381
+ },
382
+ "harness|truthfulqa:mc|0": {
383
+ "mc1": 0.45165238678090575,
384
+ "mc1_stderr": 0.017421480300277643,
385
+ "mc2": 0.6217500644350165,
386
+ "mc2_stderr": 0.015583825644663436
387
+ },
388
+ "harness|winogrande|5": {
389
+ "acc": 0.7963693764798737,
390
+ "acc_stderr": 0.011317798781626913
391
+ },
392
+ "harness|gsm8k|5": {
393
+ "acc": 0.7202426080363912,
394
+ "acc_stderr": 0.01236438401673532
395
+ }
396
+ }
397
+ ```