MaziyarPanahi commited on
Commit
2d927c3
1 Parent(s): cce7c94

Update README.md (#1)

Browse files

- Update README.md (dc37ca8f9f976a9ac07327f7d5089d68b9a851f0)

Files changed (1) hide show
  1. README.md +386 -1
README.md CHANGED
@@ -8,4 +8,389 @@ tags:
8
 
9
  Merge of top 7B models and the SLERP of other 7B models
10
 
11
- > mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  Merge of top 7B models and the SLERP of other 7B models
10
 
11
+ > mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.
12
+ >
13
+ > ## Eval
14
+ > ```python
15
+ > {
16
+ "all": {
17
+ "acc": 0.6571641282160704,
18
+ "acc_stderr": 0.031918970852064334,
19
+ "acc_norm": 0.6561506230894164,
20
+ "acc_norm_stderr": 0.03258982989656136,
21
+ "mc1": 0.4834761321909425,
22
+ "mc1_stderr": 0.017493940190057723,
23
+ "mc2": 0.6447306680251751,
24
+ "mc2_stderr": 0.015519245883344577
25
+ },
26
+ "harness|arc:challenge|25": {
27
+ "acc": 0.689419795221843,
28
+ "acc_stderr": 0.01352229209805306,
29
+ "acc_norm": 0.7090443686006825,
30
+ "acc_norm_stderr": 0.013273077865907595
31
+ },
32
+ "harness|hellaswag|10": {
33
+ "acc": 0.7168890659231228,
34
+ "acc_stderr": 0.004495891440519419,
35
+ "acc_norm": 0.8800039832702649,
36
+ "acc_norm_stderr": 0.0032429275808698544
37
+ },
38
+ "harness|hendrycksTest-abstract_algebra|5": {
39
+ "acc": 0.33,
40
+ "acc_stderr": 0.047258156262526045,
41
+ "acc_norm": 0.33,
42
+ "acc_norm_stderr": 0.047258156262526045
43
+ },
44
+ "harness|hendrycksTest-anatomy|5": {
45
+ "acc": 0.6370370370370371,
46
+ "acc_stderr": 0.04153948404742398,
47
+ "acc_norm": 0.6370370370370371,
48
+ "acc_norm_stderr": 0.04153948404742398
49
+ },
50
+ "harness|hendrycksTest-astronomy|5": {
51
+ "acc": 0.7105263157894737,
52
+ "acc_stderr": 0.03690677986137283,
53
+ "acc_norm": 0.7105263157894737,
54
+ "acc_norm_stderr": 0.03690677986137283
55
+ },
56
+ "harness|hendrycksTest-business_ethics|5": {
57
+ "acc": 0.65,
58
+ "acc_stderr": 0.0479372485441102,
59
+ "acc_norm": 0.65,
60
+ "acc_norm_stderr": 0.0479372485441102
61
+ },
62
+ "harness|hendrycksTest-clinical_knowledge|5": {
63
+ "acc": 0.6981132075471698,
64
+ "acc_stderr": 0.02825420034443866,
65
+ "acc_norm": 0.6981132075471698,
66
+ "acc_norm_stderr": 0.02825420034443866
67
+ },
68
+ "harness|hendrycksTest-college_biology|5": {
69
+ "acc": 0.7638888888888888,
70
+ "acc_stderr": 0.03551446610810826,
71
+ "acc_norm": 0.7638888888888888,
72
+ "acc_norm_stderr": 0.03551446610810826
73
+ },
74
+ "harness|hendrycksTest-college_chemistry|5": {
75
+ "acc": 0.48,
76
+ "acc_stderr": 0.050211673156867795,
77
+ "acc_norm": 0.48,
78
+ "acc_norm_stderr": 0.050211673156867795
79
+ },
80
+ "harness|hendrycksTest-college_computer_science|5": {
81
+ "acc": 0.56,
82
+ "acc_stderr": 0.049888765156985884,
83
+ "acc_norm": 0.56,
84
+ "acc_norm_stderr": 0.049888765156985884
85
+ },
86
+ "harness|hendrycksTest-college_mathematics|5": {
87
+ "acc": 0.27,
88
+ "acc_stderr": 0.0446196043338474,
89
+ "acc_norm": 0.27,
90
+ "acc_norm_stderr": 0.0446196043338474
91
+ },
92
+ "harness|hendrycksTest-college_medicine|5": {
93
+ "acc": 0.6589595375722543,
94
+ "acc_stderr": 0.03614665424180826,
95
+ "acc_norm": 0.6589595375722543,
96
+ "acc_norm_stderr": 0.03614665424180826
97
+ },
98
+ "harness|hendrycksTest-college_physics|5": {
99
+ "acc": 0.4117647058823529,
100
+ "acc_stderr": 0.048971049527263666,
101
+ "acc_norm": 0.4117647058823529,
102
+ "acc_norm_stderr": 0.048971049527263666
103
+ },
104
+ "harness|hendrycksTest-computer_security|5": {
105
+ "acc": 0.75,
106
+ "acc_stderr": 0.04351941398892446,
107
+ "acc_norm": 0.75,
108
+ "acc_norm_stderr": 0.04351941398892446
109
+ },
110
+ "harness|hendrycksTest-conceptual_physics|5": {
111
+ "acc": 0.5787234042553191,
112
+ "acc_stderr": 0.03227834510146268,
113
+ "acc_norm": 0.5787234042553191,
114
+ "acc_norm_stderr": 0.03227834510146268
115
+ },
116
+ "harness|hendrycksTest-econometrics|5": {
117
+ "acc": 0.5175438596491229,
118
+ "acc_stderr": 0.04700708033551038,
119
+ "acc_norm": 0.5175438596491229,
120
+ "acc_norm_stderr": 0.04700708033551038
121
+ },
122
+ "harness|hendrycksTest-electrical_engineering|5": {
123
+ "acc": 0.5655172413793104,
124
+ "acc_stderr": 0.04130740879555497,
125
+ "acc_norm": 0.5655172413793104,
126
+ "acc_norm_stderr": 0.04130740879555497
127
+ },
128
+ "harness|hendrycksTest-elementary_mathematics|5": {
129
+ "acc": 0.4312169312169312,
130
+ "acc_stderr": 0.02550648169813821,
131
+ "acc_norm": 0.4312169312169312,
132
+ "acc_norm_stderr": 0.02550648169813821
133
+ },
134
+ "harness|hendrycksTest-formal_logic|5": {
135
+ "acc": 0.48412698412698413,
136
+ "acc_stderr": 0.04469881854072606,
137
+ "acc_norm": 0.48412698412698413,
138
+ "acc_norm_stderr": 0.04469881854072606
139
+ },
140
+ "harness|hendrycksTest-global_facts|5": {
141
+ "acc": 0.33,
142
+ "acc_stderr": 0.04725815626252604,
143
+ "acc_norm": 0.33,
144
+ "acc_norm_stderr": 0.04725815626252604
145
+ },
146
+ "harness|hendrycksTest-high_school_biology|5": {
147
+ "acc": 0.7838709677419354,
148
+ "acc_stderr": 0.02341529343356853,
149
+ "acc_norm": 0.7838709677419354,
150
+ "acc_norm_stderr": 0.02341529343356853
151
+ },
152
+ "harness|hendrycksTest-high_school_chemistry|5": {
153
+ "acc": 0.4975369458128079,
154
+ "acc_stderr": 0.03517945038691063,
155
+ "acc_norm": 0.4975369458128079,
156
+ "acc_norm_stderr": 0.03517945038691063
157
+ },
158
+ "harness|hendrycksTest-high_school_computer_science|5": {
159
+ "acc": 0.67,
160
+ "acc_stderr": 0.04725815626252607,
161
+ "acc_norm": 0.67,
162
+ "acc_norm_stderr": 0.04725815626252607
163
+ },
164
+ "harness|hendrycksTest-high_school_european_history|5": {
165
+ "acc": 0.7878787878787878,
166
+ "acc_stderr": 0.031922715695483,
167
+ "acc_norm": 0.7878787878787878,
168
+ "acc_norm_stderr": 0.031922715695483
169
+ },
170
+ "harness|hendrycksTest-high_school_geography|5": {
171
+ "acc": 0.7929292929292929,
172
+ "acc_stderr": 0.028869778460267045,
173
+ "acc_norm": 0.7929292929292929,
174
+ "acc_norm_stderr": 0.028869778460267045
175
+ },
176
+ "harness|hendrycksTest-high_school_government_and_politics|5": {
177
+ "acc": 0.9015544041450777,
178
+ "acc_stderr": 0.021500249576033456,
179
+ "acc_norm": 0.9015544041450777,
180
+ "acc_norm_stderr": 0.021500249576033456
181
+ },
182
+ "harness|hendrycksTest-high_school_macroeconomics|5": {
183
+ "acc": 0.6666666666666666,
184
+ "acc_stderr": 0.023901157979402534,
185
+ "acc_norm": 0.6666666666666666,
186
+ "acc_norm_stderr": 0.023901157979402534
187
+ },
188
+ "harness|hendrycksTest-high_school_mathematics|5": {
189
+ "acc": 0.34814814814814815,
190
+ "acc_stderr": 0.029045600290616255,
191
+ "acc_norm": 0.34814814814814815,
192
+ "acc_norm_stderr": 0.029045600290616255
193
+ },
194
+ "harness|hendrycksTest-high_school_microeconomics|5": {
195
+ "acc": 0.680672268907563,
196
+ "acc_stderr": 0.030283995525884396,
197
+ "acc_norm": 0.680672268907563,
198
+ "acc_norm_stderr": 0.030283995525884396
199
+ },
200
+ "harness|hendrycksTest-high_school_physics|5": {
201
+ "acc": 0.33112582781456956,
202
+ "acc_stderr": 0.038425817186598696,
203
+ "acc_norm": 0.33112582781456956,
204
+ "acc_norm_stderr": 0.038425817186598696
205
+ },
206
+ "harness|hendrycksTest-high_school_psychology|5": {
207
+ "acc": 0.8385321100917431,
208
+ "acc_stderr": 0.015776239256163224,
209
+ "acc_norm": 0.8385321100917431,
210
+ "acc_norm_stderr": 0.015776239256163224
211
+ },
212
+ "harness|hendrycksTest-high_school_statistics|5": {
213
+ "acc": 0.5138888888888888,
214
+ "acc_stderr": 0.03408655867977749,
215
+ "acc_norm": 0.5138888888888888,
216
+ "acc_norm_stderr": 0.03408655867977749
217
+ },
218
+ "harness|hendrycksTest-high_school_us_history|5": {
219
+ "acc": 0.8578431372549019,
220
+ "acc_stderr": 0.024509803921568603,
221
+ "acc_norm": 0.8578431372549019,
222
+ "acc_norm_stderr": 0.024509803921568603
223
+ },
224
+ "harness|hendrycksTest-high_school_world_history|5": {
225
+ "acc": 0.8143459915611815,
226
+ "acc_stderr": 0.025310495376944856,
227
+ "acc_norm": 0.8143459915611815,
228
+ "acc_norm_stderr": 0.025310495376944856
229
+ },
230
+ "harness|hendrycksTest-human_aging|5": {
231
+ "acc": 0.6860986547085202,
232
+ "acc_stderr": 0.031146796482972465,
233
+ "acc_norm": 0.6860986547085202,
234
+ "acc_norm_stderr": 0.031146796482972465
235
+ },
236
+ "harness|hendrycksTest-human_sexuality|5": {
237
+ "acc": 0.7862595419847328,
238
+ "acc_stderr": 0.0359546161177469,
239
+ "acc_norm": 0.7862595419847328,
240
+ "acc_norm_stderr": 0.0359546161177469
241
+ },
242
+ "harness|hendrycksTest-international_law|5": {
243
+ "acc": 0.8099173553719008,
244
+ "acc_stderr": 0.03581796951709282,
245
+ "acc_norm": 0.8099173553719008,
246
+ "acc_norm_stderr": 0.03581796951709282
247
+ },
248
+ "harness|hendrycksTest-jurisprudence|5": {
249
+ "acc": 0.7962962962962963,
250
+ "acc_stderr": 0.03893542518824847,
251
+ "acc_norm": 0.7962962962962963,
252
+ "acc_norm_stderr": 0.03893542518824847
253
+ },
254
+ "harness|hendrycksTest-logical_fallacies|5": {
255
+ "acc": 0.7730061349693251,
256
+ "acc_stderr": 0.03291099578615769,
257
+ "acc_norm": 0.7730061349693251,
258
+ "acc_norm_stderr": 0.03291099578615769
259
+ },
260
+ "harness|hendrycksTest-machine_learning|5": {
261
+ "acc": 0.5,
262
+ "acc_stderr": 0.04745789978762494,
263
+ "acc_norm": 0.5,
264
+ "acc_norm_stderr": 0.04745789978762494
265
+ },
266
+ "harness|hendrycksTest-management|5": {
267
+ "acc": 0.7961165048543689,
268
+ "acc_stderr": 0.03989139859531771,
269
+ "acc_norm": 0.7961165048543689,
270
+ "acc_norm_stderr": 0.03989139859531771
271
+ },
272
+ "harness|hendrycksTest-marketing|5": {
273
+ "acc": 0.8760683760683761,
274
+ "acc_stderr": 0.02158649400128137,
275
+ "acc_norm": 0.8760683760683761,
276
+ "acc_norm_stderr": 0.02158649400128137
277
+ },
278
+ "harness|hendrycksTest-medical_genetics|5": {
279
+ "acc": 0.73,
280
+ "acc_stderr": 0.0446196043338474,
281
+ "acc_norm": 0.73,
282
+ "acc_norm_stderr": 0.0446196043338474
283
+ },
284
+ "harness|hendrycksTest-miscellaneous|5": {
285
+ "acc": 0.8288633461047255,
286
+ "acc_stderr": 0.013468201614066307,
287
+ "acc_norm": 0.8288633461047255,
288
+ "acc_norm_stderr": 0.013468201614066307
289
+ },
290
+ "harness|hendrycksTest-moral_disputes|5": {
291
+ "acc": 0.7514450867052023,
292
+ "acc_stderr": 0.023267528432100174,
293
+ "acc_norm": 0.7514450867052023,
294
+ "acc_norm_stderr": 0.023267528432100174
295
+ },
296
+ "harness|hendrycksTest-moral_scenarios|5": {
297
+ "acc": 0.4480446927374302,
298
+ "acc_stderr": 0.016631976628930595,
299
+ "acc_norm": 0.4480446927374302,
300
+ "acc_norm_stderr": 0.016631976628930595
301
+ },
302
+ "harness|hendrycksTest-nutrition|5": {
303
+ "acc": 0.7320261437908496,
304
+ "acc_stderr": 0.025360603796242553,
305
+ "acc_norm": 0.7320261437908496,
306
+ "acc_norm_stderr": 0.025360603796242553
307
+ },
308
+ "harness|hendrycksTest-philosophy|5": {
309
+ "acc": 0.707395498392283,
310
+ "acc_stderr": 0.02583989833487798,
311
+ "acc_norm": 0.707395498392283,
312
+ "acc_norm_stderr": 0.02583989833487798
313
+ },
314
+ "harness|hendrycksTest-prehistory|5": {
315
+ "acc": 0.7530864197530864,
316
+ "acc_stderr": 0.023993501709042107,
317
+ "acc_norm": 0.7530864197530864,
318
+ "acc_norm_stderr": 0.023993501709042107
319
+ },
320
+ "harness|hendrycksTest-professional_accounting|5": {
321
+ "acc": 0.4787234042553192,
322
+ "acc_stderr": 0.029800481645628693,
323
+ "acc_norm": 0.4787234042553192,
324
+ "acc_norm_stderr": 0.029800481645628693
325
+ },
326
+ "harness|hendrycksTest-professional_law|5": {
327
+ "acc": 0.4791395045632334,
328
+ "acc_stderr": 0.012759117066518015,
329
+ "acc_norm": 0.4791395045632334,
330
+ "acc_norm_stderr": 0.012759117066518015
331
+ },
332
+ "harness|hendrycksTest-professional_medicine|5": {
333
+ "acc": 0.7058823529411765,
334
+ "acc_stderr": 0.02767846864214472,
335
+ "acc_norm": 0.7058823529411765,
336
+ "acc_norm_stderr": 0.02767846864214472
337
+ },
338
+ "harness|hendrycksTest-professional_psychology|5": {
339
+ "acc": 0.6862745098039216,
340
+ "acc_stderr": 0.018771683893528176,
341
+ "acc_norm": 0.6862745098039216,
342
+ "acc_norm_stderr": 0.018771683893528176
343
+ },
344
+ "harness|hendrycksTest-public_relations|5": {
345
+ "acc": 0.6818181818181818,
346
+ "acc_stderr": 0.04461272175910509,
347
+ "acc_norm": 0.6818181818181818,
348
+ "acc_norm_stderr": 0.04461272175910509
349
+ },
350
+ "harness|hendrycksTest-security_studies|5": {
351
+ "acc": 0.7346938775510204,
352
+ "acc_stderr": 0.028263889943784603,
353
+ "acc_norm": 0.7346938775510204,
354
+ "acc_norm_stderr": 0.028263889943784603
355
+ },
356
+ "harness|hendrycksTest-sociology|5": {
357
+ "acc": 0.835820895522388,
358
+ "acc_stderr": 0.026193923544454115,
359
+ "acc_norm": 0.835820895522388,
360
+ "acc_norm_stderr": 0.026193923544454115
361
+ },
362
+ "harness|hendrycksTest-us_foreign_policy|5": {
363
+ "acc": 0.85,
364
+ "acc_stderr": 0.03588702812826371,
365
+ "acc_norm": 0.85,
366
+ "acc_norm_stderr": 0.03588702812826371
367
+ },
368
+ "harness|hendrycksTest-virology|5": {
369
+ "acc": 0.5481927710843374,
370
+ "acc_stderr": 0.03874371556587953,
371
+ "acc_norm": 0.5481927710843374,
372
+ "acc_norm_stderr": 0.03874371556587953
373
+ },
374
+ "harness|hendrycksTest-world_religions|5": {
375
+ "acc": 0.8362573099415205,
376
+ "acc_stderr": 0.028380919596145866,
377
+ "acc_norm": 0.8362573099415205,
378
+ "acc_norm_stderr": 0.028380919596145866
379
+ },
380
+ "harness|truthfulqa:mc|0": {
381
+ "mc1": 0.4834761321909425,
382
+ "mc1_stderr": 0.017493940190057723,
383
+ "mc2": 0.6447306680251751,
384
+ "mc2_stderr": 0.015519245883344577
385
+ },
386
+ "harness|winogrande|5": {
387
+ "acc": 0.8366219415943172,
388
+ "acc_stderr": 0.010390695970273764
389
+ },
390
+ "harness|gsm8k|5": {
391
+ "acc": 0.7202426080363912,
392
+ "acc_stderr": 0.012364384016735319
393
+ }
394
+ }
395
+
396
+ ```