MaziyarPanahi commited on
Commit
a667b51
1 Parent(s): dd841bf

Update README.md (#1)

Browse files

- Update README.md (83356040506eaee4f6cf8c598e6aae6dcd781232)

Files changed (1) hide show
  1. README.md +389 -1
README.md CHANGED
@@ -8,4 +8,392 @@ tags:
8
 
9
  Merge of top 7B models with TIES method
10
 
11
- > mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  Merge of top 7B models with TIES method
10
 
11
+ > mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.
12
+
13
+ ## Eval
14
+
15
+
16
+ ![image/png](https://cdn-uploads.huggingface.co/production/uploads/5fd5e18a90b6dc4633f6d292/YdjDNbmytFAPv-VGiuhx7.png)
17
+
18
+ ```python
19
+ {
20
+ "all": {
21
+ "acc": 0.6487801278765712,
22
+ "acc_stderr": 0.03219011246717665,
23
+ "acc_norm": 0.6479445077777353,
24
+ "acc_norm_stderr": 0.032868022907407396,
25
+ "mc1": 0.5862913096695227,
26
+ "mc1_stderr": 0.0172408618120998,
27
+ "mc2": 0.7078078883926877,
28
+ "mc2_stderr": 0.015097515102384168
29
+ },
30
+ "harness|arc:challenge|25": {
31
+ "acc": 0.7167235494880546,
32
+ "acc_stderr": 0.013167478735134575,
33
+ "acc_norm": 0.7363481228668942,
34
+ "acc_norm_stderr": 0.012875929151297044
35
+ },
36
+ "harness|hellaswag|10": {
37
+ "acc": 0.7321250746863175,
38
+ "acc_stderr": 0.004419469983939178,
39
+ "acc_norm": 0.8884684325831508,
40
+ "acc_norm_stderr": 0.0031414591751392717
41
+ },
42
+ "harness|hendrycksTest-abstract_algebra|5": {
43
+ "acc": 0.31,
44
+ "acc_stderr": 0.04648231987117316,
45
+ "acc_norm": 0.31,
46
+ "acc_norm_stderr": 0.04648231987117316
47
+ },
48
+ "harness|hendrycksTest-anatomy|5": {
49
+ "acc": 0.6518518518518519,
50
+ "acc_stderr": 0.041153246103369526,
51
+ "acc_norm": 0.6518518518518519,
52
+ "acc_norm_stderr": 0.041153246103369526
53
+ },
54
+ "harness|hendrycksTest-astronomy|5": {
55
+ "acc": 0.7039473684210527,
56
+ "acc_stderr": 0.03715062154998904,
57
+ "acc_norm": 0.7039473684210527,
58
+ "acc_norm_stderr": 0.03715062154998904
59
+ },
60
+ "harness|hendrycksTest-business_ethics|5": {
61
+ "acc": 0.61,
62
+ "acc_stderr": 0.04902071300001975,
63
+ "acc_norm": 0.61,
64
+ "acc_norm_stderr": 0.04902071300001975
65
+ },
66
+ "harness|hendrycksTest-clinical_knowledge|5": {
67
+ "acc": 0.7132075471698113,
68
+ "acc_stderr": 0.02783491252754407,
69
+ "acc_norm": 0.7132075471698113,
70
+ "acc_norm_stderr": 0.02783491252754407
71
+ },
72
+ "harness|hendrycksTest-college_biology|5": {
73
+ "acc": 0.75,
74
+ "acc_stderr": 0.03621034121889507,
75
+ "acc_norm": 0.75,
76
+ "acc_norm_stderr": 0.03621034121889507
77
+ },
78
+ "harness|hendrycksTest-college_chemistry|5": {
79
+ "acc": 0.46,
80
+ "acc_stderr": 0.05009082659620333,
81
+ "acc_norm": 0.46,
82
+ "acc_norm_stderr": 0.05009082659620333
83
+ },
84
+ "harness|hendrycksTest-college_computer_science|5": {
85
+ "acc": 0.55,
86
+ "acc_stderr": 0.05,
87
+ "acc_norm": 0.55,
88
+ "acc_norm_stderr": 0.05
89
+ },
90
+ "harness|hendrycksTest-college_mathematics|5": {
91
+ "acc": 0.3,
92
+ "acc_stderr": 0.046056618647183814,
93
+ "acc_norm": 0.3,
94
+ "acc_norm_stderr": 0.046056618647183814
95
+ },
96
+ "harness|hendrycksTest-college_medicine|5": {
97
+ "acc": 0.6589595375722543,
98
+ "acc_stderr": 0.036146654241808254,
99
+ "acc_norm": 0.6589595375722543,
100
+ "acc_norm_stderr": 0.036146654241808254
101
+ },
102
+ "harness|hendrycksTest-college_physics|5": {
103
+ "acc": 0.43137254901960786,
104
+ "acc_stderr": 0.04928099597287534,
105
+ "acc_norm": 0.43137254901960786,
106
+ "acc_norm_stderr": 0.04928099597287534
107
+ },
108
+ "harness|hendrycksTest-computer_security|5": {
109
+ "acc": 0.77,
110
+ "acc_stderr": 0.04229525846816506,
111
+ "acc_norm": 0.77,
112
+ "acc_norm_stderr": 0.04229525846816506
113
+ },
114
+ "harness|hendrycksTest-conceptual_physics|5": {
115
+ "acc": 0.548936170212766,
116
+ "acc_stderr": 0.032529096196131965,
117
+ "acc_norm": 0.548936170212766,
118
+ "acc_norm_stderr": 0.032529096196131965
119
+ },
120
+ "harness|hendrycksTest-econometrics|5": {
121
+ "acc": 0.49122807017543857,
122
+ "acc_stderr": 0.04702880432049615,
123
+ "acc_norm": 0.49122807017543857,
124
+ "acc_norm_stderr": 0.04702880432049615
125
+ },
126
+ "harness|hendrycksTest-electrical_engineering|5": {
127
+ "acc": 0.5517241379310345,
128
+ "acc_stderr": 0.04144311810878152,
129
+ "acc_norm": 0.5517241379310345,
130
+ "acc_norm_stderr": 0.04144311810878152
131
+ },
132
+ "harness|hendrycksTest-elementary_mathematics|5": {
133
+ "acc": 0.4126984126984127,
134
+ "acc_stderr": 0.025355741263055277,
135
+ "acc_norm": 0.4126984126984127,
136
+ "acc_norm_stderr": 0.025355741263055277
137
+ },
138
+ "harness|hendrycksTest-formal_logic|5": {
139
+ "acc": 0.49206349206349204,
140
+ "acc_stderr": 0.044715725362943486,
141
+ "acc_norm": 0.49206349206349204,
142
+ "acc_norm_stderr": 0.044715725362943486
143
+ },
144
+ "harness|hendrycksTest-global_facts|5": {
145
+ "acc": 0.35,
146
+ "acc_stderr": 0.047937248544110196,
147
+ "acc_norm": 0.35,
148
+ "acc_norm_stderr": 0.047937248544110196
149
+ },
150
+ "harness|hendrycksTest-high_school_biology|5": {
151
+ "acc": 0.7967741935483871,
152
+ "acc_stderr": 0.02289168798455496,
153
+ "acc_norm": 0.7967741935483871,
154
+ "acc_norm_stderr": 0.02289168798455496
155
+ },
156
+ "harness|hendrycksTest-high_school_chemistry|5": {
157
+ "acc": 0.5024630541871922,
158
+ "acc_stderr": 0.035179450386910616,
159
+ "acc_norm": 0.5024630541871922,
160
+ "acc_norm_stderr": 0.035179450386910616
161
+ },
162
+ "harness|hendrycksTest-high_school_computer_science|5": {
163
+ "acc": 0.7,
164
+ "acc_stderr": 0.046056618647183814,
165
+ "acc_norm": 0.7,
166
+ "acc_norm_stderr": 0.046056618647183814
167
+ },
168
+ "harness|hendrycksTest-high_school_european_history|5": {
169
+ "acc": 0.7575757575757576,
170
+ "acc_stderr": 0.03346409881055953,
171
+ "acc_norm": 0.7575757575757576,
172
+ "acc_norm_stderr": 0.03346409881055953
173
+ },
174
+ "harness|hendrycksTest-high_school_geography|5": {
175
+ "acc": 0.803030303030303,
176
+ "acc_stderr": 0.028335609732463362,
177
+ "acc_norm": 0.803030303030303,
178
+ "acc_norm_stderr": 0.028335609732463362
179
+ },
180
+ "harness|hendrycksTest-high_school_government_and_politics|5": {
181
+ "acc": 0.9067357512953368,
182
+ "acc_stderr": 0.020986854593289733,
183
+ "acc_norm": 0.9067357512953368,
184
+ "acc_norm_stderr": 0.020986854593289733
185
+ },
186
+ "harness|hendrycksTest-high_school_macroeconomics|5": {
187
+ "acc": 0.6487179487179487,
188
+ "acc_stderr": 0.024203665177902803,
189
+ "acc_norm": 0.6487179487179487,
190
+ "acc_norm_stderr": 0.024203665177902803
191
+ },
192
+ "harness|hendrycksTest-high_school_mathematics|5": {
193
+ "acc": 0.3333333333333333,
194
+ "acc_stderr": 0.02874204090394848,
195
+ "acc_norm": 0.3333333333333333,
196
+ "acc_norm_stderr": 0.02874204090394848
197
+ },
198
+ "harness|hendrycksTest-high_school_microeconomics|5": {
199
+ "acc": 0.6554621848739496,
200
+ "acc_stderr": 0.03086868260412162,
201
+ "acc_norm": 0.6554621848739496,
202
+ "acc_norm_stderr": 0.03086868260412162
203
+ },
204
+ "harness|hendrycksTest-high_school_physics|5": {
205
+ "acc": 0.32450331125827814,
206
+ "acc_stderr": 0.038227469376587525,
207
+ "acc_norm": 0.32450331125827814,
208
+ "acc_norm_stderr": 0.038227469376587525
209
+ },
210
+ "harness|hendrycksTest-high_school_psychology|5": {
211
+ "acc": 0.8403669724770643,
212
+ "acc_stderr": 0.015703498348461763,
213
+ "acc_norm": 0.8403669724770643,
214
+ "acc_norm_stderr": 0.015703498348461763
215
+ },
216
+ "harness|hendrycksTest-high_school_statistics|5": {
217
+ "acc": 0.5046296296296297,
218
+ "acc_stderr": 0.03409825519163572,
219
+ "acc_norm": 0.5046296296296297,
220
+ "acc_norm_stderr": 0.03409825519163572
221
+ },
222
+ "harness|hendrycksTest-high_school_us_history|5": {
223
+ "acc": 0.8235294117647058,
224
+ "acc_stderr": 0.026756401538078962,
225
+ "acc_norm": 0.8235294117647058,
226
+ "acc_norm_stderr": 0.026756401538078962
227
+ },
228
+ "harness|hendrycksTest-high_school_world_history|5": {
229
+ "acc": 0.7721518987341772,
230
+ "acc_stderr": 0.02730348459906944,
231
+ "acc_norm": 0.7721518987341772,
232
+ "acc_norm_stderr": 0.02730348459906944
233
+ },
234
+ "harness|hendrycksTest-human_aging|5": {
235
+ "acc": 0.6816143497757847,
236
+ "acc_stderr": 0.03126580522513713,
237
+ "acc_norm": 0.6816143497757847,
238
+ "acc_norm_stderr": 0.03126580522513713
239
+ },
240
+ "harness|hendrycksTest-human_sexuality|5": {
241
+ "acc": 0.7862595419847328,
242
+ "acc_stderr": 0.0359546161177469,
243
+ "acc_norm": 0.7862595419847328,
244
+ "acc_norm_stderr": 0.0359546161177469
245
+ },
246
+ "harness|hendrycksTest-international_law|5": {
247
+ "acc": 0.7851239669421488,
248
+ "acc_stderr": 0.037494924487096966,
249
+ "acc_norm": 0.7851239669421488,
250
+ "acc_norm_stderr": 0.037494924487096966
251
+ },
252
+ "harness|hendrycksTest-jurisprudence|5": {
253
+ "acc": 0.7777777777777778,
254
+ "acc_stderr": 0.0401910747255735,
255
+ "acc_norm": 0.7777777777777778,
256
+ "acc_norm_stderr": 0.0401910747255735
257
+ },
258
+ "harness|hendrycksTest-logical_fallacies|5": {
259
+ "acc": 0.7423312883435583,
260
+ "acc_stderr": 0.03436150827846917,
261
+ "acc_norm": 0.7423312883435583,
262
+ "acc_norm_stderr": 0.03436150827846917
263
+ },
264
+ "harness|hendrycksTest-machine_learning|5": {
265
+ "acc": 0.42857142857142855,
266
+ "acc_stderr": 0.04697113923010212,
267
+ "acc_norm": 0.42857142857142855,
268
+ "acc_norm_stderr": 0.04697113923010212
269
+ },
270
+ "harness|hendrycksTest-management|5": {
271
+ "acc": 0.7475728155339806,
272
+ "acc_stderr": 0.04301250399690878,
273
+ "acc_norm": 0.7475728155339806,
274
+ "acc_norm_stderr": 0.04301250399690878
275
+ },
276
+ "harness|hendrycksTest-marketing|5": {
277
+ "acc": 0.8846153846153846,
278
+ "acc_stderr": 0.02093019318517933,
279
+ "acc_norm": 0.8846153846153846,
280
+ "acc_norm_stderr": 0.02093019318517933
281
+ },
282
+ "harness|hendrycksTest-medical_genetics|5": {
283
+ "acc": 0.7,
284
+ "acc_stderr": 0.046056618647183814,
285
+ "acc_norm": 0.7,
286
+ "acc_norm_stderr": 0.046056618647183814
287
+ },
288
+ "harness|hendrycksTest-miscellaneous|5": {
289
+ "acc": 0.80970625798212,
290
+ "acc_stderr": 0.014036945850381396,
291
+ "acc_norm": 0.80970625798212,
292
+ "acc_norm_stderr": 0.014036945850381396
293
+ },
294
+ "harness|hendrycksTest-moral_disputes|5": {
295
+ "acc": 0.7369942196531792,
296
+ "acc_stderr": 0.023703099525258172,
297
+ "acc_norm": 0.7369942196531792,
298
+ "acc_norm_stderr": 0.023703099525258172
299
+ },
300
+ "harness|hendrycksTest-moral_scenarios|5": {
301
+ "acc": 0.47150837988826816,
302
+ "acc_stderr": 0.016695329746015796,
303
+ "acc_norm": 0.47150837988826816,
304
+ "acc_norm_stderr": 0.016695329746015796
305
+ },
306
+ "harness|hendrycksTest-nutrition|5": {
307
+ "acc": 0.7189542483660131,
308
+ "acc_stderr": 0.025738854797818733,
309
+ "acc_norm": 0.7189542483660131,
310
+ "acc_norm_stderr": 0.025738854797818733
311
+ },
312
+ "harness|hendrycksTest-philosophy|5": {
313
+ "acc": 0.7170418006430869,
314
+ "acc_stderr": 0.025583062489984813,
315
+ "acc_norm": 0.7170418006430869,
316
+ "acc_norm_stderr": 0.025583062489984813
317
+ },
318
+ "harness|hendrycksTest-prehistory|5": {
319
+ "acc": 0.7407407407407407,
320
+ "acc_stderr": 0.024383665531035457,
321
+ "acc_norm": 0.7407407407407407,
322
+ "acc_norm_stderr": 0.024383665531035457
323
+ },
324
+ "harness|hendrycksTest-professional_accounting|5": {
325
+ "acc": 0.475177304964539,
326
+ "acc_stderr": 0.029790719243829727,
327
+ "acc_norm": 0.475177304964539,
328
+ "acc_norm_stderr": 0.029790719243829727
329
+ },
330
+ "harness|hendrycksTest-professional_law|5": {
331
+ "acc": 0.470013037809648,
332
+ "acc_stderr": 0.01274724896707906,
333
+ "acc_norm": 0.470013037809648,
334
+ "acc_norm_stderr": 0.01274724896707906
335
+ },
336
+ "harness|hendrycksTest-professional_medicine|5": {
337
+ "acc": 0.6691176470588235,
338
+ "acc_stderr": 0.028582709753898445,
339
+ "acc_norm": 0.6691176470588235,
340
+ "acc_norm_stderr": 0.028582709753898445
341
+ },
342
+ "harness|hendrycksTest-professional_psychology|5": {
343
+ "acc": 0.6584967320261438,
344
+ "acc_stderr": 0.019184639328092487,
345
+ "acc_norm": 0.6584967320261438,
346
+ "acc_norm_stderr": 0.019184639328092487
347
+ },
348
+ "harness|hendrycksTest-public_relations|5": {
349
+ "acc": 0.6818181818181818,
350
+ "acc_stderr": 0.044612721759105085,
351
+ "acc_norm": 0.6818181818181818,
352
+ "acc_norm_stderr": 0.044612721759105085
353
+ },
354
+ "harness|hendrycksTest-security_studies|5": {
355
+ "acc": 0.7306122448979592,
356
+ "acc_stderr": 0.02840125202902294,
357
+ "acc_norm": 0.7306122448979592,
358
+ "acc_norm_stderr": 0.02840125202902294
359
+ },
360
+ "harness|hendrycksTest-sociology|5": {
361
+ "acc": 0.835820895522388,
362
+ "acc_stderr": 0.026193923544454125,
363
+ "acc_norm": 0.835820895522388,
364
+ "acc_norm_stderr": 0.026193923544454125
365
+ },
366
+ "harness|hendrycksTest-us_foreign_policy|5": {
367
+ "acc": 0.85,
368
+ "acc_stderr": 0.03588702812826371,
369
+ "acc_norm": 0.85,
370
+ "acc_norm_stderr": 0.03588702812826371
371
+ },
372
+ "harness|hendrycksTest-virology|5": {
373
+ "acc": 0.5542168674698795,
374
+ "acc_stderr": 0.03869543323472101,
375
+ "acc_norm": 0.5542168674698795,
376
+ "acc_norm_stderr": 0.03869543323472101
377
+ },
378
+ "harness|hendrycksTest-world_religions|5": {
379
+ "acc": 0.8245614035087719,
380
+ "acc_stderr": 0.029170885500727665,
381
+ "acc_norm": 0.8245614035087719,
382
+ "acc_norm_stderr": 0.029170885500727665
383
+ },
384
+ "harness|truthfulqa:mc|0": {
385
+ "mc1": 0.5862913096695227,
386
+ "mc1_stderr": 0.0172408618120998,
387
+ "mc2": 0.7078078883926877,
388
+ "mc2_stderr": 0.015097515102384168
389
+ },
390
+ "harness|winogrande|5": {
391
+ "acc": 0.8579321231254933,
392
+ "acc_stderr": 0.009812000391679367
393
+ },
394
+ "harness|gsm8k|5": {
395
+ "acc": 0.6648976497346475,
396
+ "acc_stderr": 0.013001948176422954
397
+ }
398
+ }
399
+ ```