burtenshaw commited on
Commit
297e09a
1 Parent(s): 9c0b9ea

notebook: add groupby examples

Browse files
Files changed (1) hide show
  1. groupby_based_on_type.ipynb +776 -0
groupby_based_on_type.ipynb ADDED
@@ -0,0 +1,776 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 3,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "df = pd.read_csv(\n",
19
+ " \"model_repos_100_most_used_bases_250_most_downloaded - sorted-by-author.csv\"\n",
20
+ ")"
21
+ ]
22
+ },
23
+ {
24
+ "cell_type": "code",
25
+ "execution_count": 11,
26
+ "metadata": {},
27
+ "outputs": [
28
+ {
29
+ "data": {
30
+ "text/html": [
31
+ "<div>\n",
32
+ "<style scoped>\n",
33
+ " .dataframe tbody tr th:only-of-type {\n",
34
+ " vertical-align: middle;\n",
35
+ " }\n",
36
+ "\n",
37
+ " .dataframe tbody tr th {\n",
38
+ " vertical-align: top;\n",
39
+ " }\n",
40
+ "\n",
41
+ " .dataframe thead th {\n",
42
+ " text-align: right;\n",
43
+ " }\n",
44
+ "</style>\n",
45
+ "<table border=\"1\" class=\"dataframe\">\n",
46
+ " <thead>\n",
47
+ " <tr style=\"text-align: right;\">\n",
48
+ " <th></th>\n",
49
+ " <th>Unnamed: 0</th>\n",
50
+ " <th>_id</th>\n",
51
+ " <th>link</th>\n",
52
+ " <th>id</th>\n",
53
+ " <th>author</th>\n",
54
+ " <th>model-producer-category</th>\n",
55
+ " <th>data-url</th>\n",
56
+ " <th>release-collaborators [y/n]</th>\n",
57
+ " <th>notes (e.g., mention collaborators and tasks)</th>\n",
58
+ " <th>verified by [y]</th>\n",
59
+ " <th>...</th>\n",
60
+ " <th>parent-model-release-collaborators [y/n]</th>\n",
61
+ " <th>notes (e.g., mention collaborators and tasks).1</th>\n",
62
+ " <th>verified by</th>\n",
63
+ " <th>base_model_downloads</th>\n",
64
+ " <th>base_model_pipeline</th>\n",
65
+ " <th>base_model_children_count</th>\n",
66
+ " <th>adapter</th>\n",
67
+ " <th>merge</th>\n",
68
+ " <th>finetune</th>\n",
69
+ " <th>quantized</th>\n",
70
+ " </tr>\n",
71
+ " </thead>\n",
72
+ " <tbody>\n",
73
+ " <tr>\n",
74
+ " <th>0</th>\n",
75
+ " <td>365296</td>\n",
76
+ " <td>65441824276a59f143ebfe1e</td>\n",
77
+ " <td>https://huggingface.co/TheBloke/OpenHermes-2.5...</td>\n",
78
+ " <td>TheBloke/OpenHermes-2.5-Mistral-7B-AWQ</td>\n",
79
+ " <td>TheBloke</td>\n",
80
+ " <td>Individual</td>\n",
81
+ " <td>NaN</td>\n",
82
+ " <td>NaN</td>\n",
83
+ " <td>NaN</td>\n",
84
+ " <td>NaN</td>\n",
85
+ " <td>...</td>\n",
86
+ " <td>NaN</td>\n",
87
+ " <td>Cofounder of Nous Research, https://x.com/tekn...</td>\n",
88
+ " <td>NaN</td>\n",
89
+ " <td>202</td>\n",
90
+ " <td>text-generation</td>\n",
91
+ " <td>202</td>\n",
92
+ " <td>0</td>\n",
93
+ " <td>0</td>\n",
94
+ " <td>0</td>\n",
95
+ " <td>0</td>\n",
96
+ " </tr>\n",
97
+ " <tr>\n",
98
+ " <th>1</th>\n",
99
+ " <td>365297</td>\n",
100
+ " <td>654418242996405c238f5eea</td>\n",
101
+ " <td>https://huggingface.co/TheBloke/OpenHermes-2.5...</td>\n",
102
+ " <td>TheBloke/OpenHermes-2.5-Mistral-7B-GGUF</td>\n",
103
+ " <td>TheBloke</td>\n",
104
+ " <td>Individual</td>\n",
105
+ " <td>NaN</td>\n",
106
+ " <td>NaN</td>\n",
107
+ " <td>NaN</td>\n",
108
+ " <td>NaN</td>\n",
109
+ " <td>...</td>\n",
110
+ " <td>NaN</td>\n",
111
+ " <td>Cofounder of Nous Research, https://x.com/tekn...</td>\n",
112
+ " <td>NaN</td>\n",
113
+ " <td>202</td>\n",
114
+ " <td>text-generation</td>\n",
115
+ " <td>202</td>\n",
116
+ " <td>0</td>\n",
117
+ " <td>0</td>\n",
118
+ " <td>1</td>\n",
119
+ " <td>0</td>\n",
120
+ " </tr>\n",
121
+ " <tr>\n",
122
+ " <th>2</th>\n",
123
+ " <td>760607</td>\n",
124
+ " <td>6698d8a0653e4babe21e1e7d</td>\n",
125
+ " <td>https://huggingface.co/meta-llama/Llama-3.1-8B...</td>\n",
126
+ " <td>meta-llama/Llama-3.1-8B-Instruct</td>\n",
127
+ " <td>meta-llama</td>\n",
128
+ " <td>Large enterprise</td>\n",
129
+ " <td>https://huggingface.co/meta-llama</td>\n",
130
+ " <td>NaN</td>\n",
131
+ " <td>Meta's Llama team</td>\n",
132
+ " <td>NaN</td>\n",
133
+ " <td>...</td>\n",
134
+ " <td>No</td>\n",
135
+ " <td>NaN</td>\n",
136
+ " <td>NaN</td>\n",
137
+ " <td>254</td>\n",
138
+ " <td>text-generation</td>\n",
139
+ " <td>254</td>\n",
140
+ " <td>377</td>\n",
141
+ " <td>22</td>\n",
142
+ " <td>241</td>\n",
143
+ " <td>182</td>\n",
144
+ " </tr>\n",
145
+ " <tr>\n",
146
+ " <th>3</th>\n",
147
+ " <td>718125</td>\n",
148
+ " <td>667928d58601cd537f63c846</td>\n",
149
+ " <td>https://huggingface.co/google/gemma-2-9b-it</td>\n",
150
+ " <td>google/gemma-2-9b-it</td>\n",
151
+ " <td>google</td>\n",
152
+ " <td>Large enterprise</td>\n",
153
+ " <td>NaN</td>\n",
154
+ " <td>NaN</td>\n",
155
+ " <td>NaN</td>\n",
156
+ " <td>NaN</td>\n",
157
+ " <td>...</td>\n",
158
+ " <td>No</td>\n",
159
+ " <td>NaN</td>\n",
160
+ " <td>NaN</td>\n",
161
+ " <td>105</td>\n",
162
+ " <td>text-generation</td>\n",
163
+ " <td>105</td>\n",
164
+ " <td>25</td>\n",
165
+ " <td>6</td>\n",
166
+ " <td>33</td>\n",
167
+ " <td>69</td>\n",
168
+ " </tr>\n",
169
+ " <tr>\n",
170
+ " <th>4</th>\n",
171
+ " <td>680671</td>\n",
172
+ " <td>665ee74789b4fd787a568664</td>\n",
173
+ " <td>https://huggingface.co/Qwen/Qwen2-7B-Instruct</td>\n",
174
+ " <td>Qwen/Qwen2-7B-Instruct</td>\n",
175
+ " <td>Qwen</td>\n",
176
+ " <td>Large enterprise</td>\n",
177
+ " <td>NaN</td>\n",
178
+ " <td>NaN</td>\n",
179
+ " <td>NaN</td>\n",
180
+ " <td>NaN</td>\n",
181
+ " <td>...</td>\n",
182
+ " <td>No</td>\n",
183
+ " <td>NaN</td>\n",
184
+ " <td>NaN</td>\n",
185
+ " <td>266</td>\n",
186
+ " <td>text-generation</td>\n",
187
+ " <td>266</td>\n",
188
+ " <td>49</td>\n",
189
+ " <td>3</td>\n",
190
+ " <td>42</td>\n",
191
+ " <td>69</td>\n",
192
+ " </tr>\n",
193
+ " </tbody>\n",
194
+ "</table>\n",
195
+ "<p>5 rows × 39 columns</p>\n",
196
+ "</div>"
197
+ ],
198
+ "text/plain": [
199
+ " Unnamed: 0 _id \\\n",
200
+ "0 365296 65441824276a59f143ebfe1e \n",
201
+ "1 365297 654418242996405c238f5eea \n",
202
+ "2 760607 6698d8a0653e4babe21e1e7d \n",
203
+ "3 718125 667928d58601cd537f63c846 \n",
204
+ "4 680671 665ee74789b4fd787a568664 \n",
205
+ "\n",
206
+ " link \\\n",
207
+ "0 https://huggingface.co/TheBloke/OpenHermes-2.5... \n",
208
+ "1 https://huggingface.co/TheBloke/OpenHermes-2.5... \n",
209
+ "2 https://huggingface.co/meta-llama/Llama-3.1-8B... \n",
210
+ "3 https://huggingface.co/google/gemma-2-9b-it \n",
211
+ "4 https://huggingface.co/Qwen/Qwen2-7B-Instruct \n",
212
+ "\n",
213
+ " id author \\\n",
214
+ "0 TheBloke/OpenHermes-2.5-Mistral-7B-AWQ TheBloke \n",
215
+ "1 TheBloke/OpenHermes-2.5-Mistral-7B-GGUF TheBloke \n",
216
+ "2 meta-llama/Llama-3.1-8B-Instruct meta-llama \n",
217
+ "3 google/gemma-2-9b-it google \n",
218
+ "4 Qwen/Qwen2-7B-Instruct Qwen \n",
219
+ "\n",
220
+ " model-producer-category data-url \\\n",
221
+ "0 Individual NaN \n",
222
+ "1 Individual NaN \n",
223
+ "2 Large enterprise https://huggingface.co/meta-llama \n",
224
+ "3 Large enterprise NaN \n",
225
+ "4 Large enterprise NaN \n",
226
+ "\n",
227
+ " release-collaborators [y/n] notes (e.g., mention collaborators and tasks) \\\n",
228
+ "0 NaN NaN \n",
229
+ "1 NaN NaN \n",
230
+ "2 NaN Meta's Llama team \n",
231
+ "3 NaN NaN \n",
232
+ "4 NaN NaN \n",
233
+ "\n",
234
+ " verified by [y] ... parent-model-release-collaborators [y/n] \\\n",
235
+ "0 NaN ... NaN \n",
236
+ "1 NaN ... NaN \n",
237
+ "2 NaN ... No \n",
238
+ "3 NaN ... No \n",
239
+ "4 NaN ... No \n",
240
+ "\n",
241
+ " notes (e.g., mention collaborators and tasks).1 verified by \\\n",
242
+ "0 Cofounder of Nous Research, https://x.com/tekn... NaN \n",
243
+ "1 Cofounder of Nous Research, https://x.com/tekn... NaN \n",
244
+ "2 NaN NaN \n",
245
+ "3 NaN NaN \n",
246
+ "4 NaN NaN \n",
247
+ "\n",
248
+ " base_model_downloads base_model_pipeline base_model_children_count \\\n",
249
+ "0 202 text-generation 202 \n",
250
+ "1 202 text-generation 202 \n",
251
+ "2 254 text-generation 254 \n",
252
+ "3 105 text-generation 105 \n",
253
+ "4 266 text-generation 266 \n",
254
+ "\n",
255
+ " adapter merge finetune quantized \n",
256
+ "0 0 0 0 0 \n",
257
+ "1 0 0 1 0 \n",
258
+ "2 377 22 241 182 \n",
259
+ "3 25 6 33 69 \n",
260
+ "4 49 3 42 69 \n",
261
+ "\n",
262
+ "[5 rows x 39 columns]"
263
+ ]
264
+ },
265
+ "execution_count": 11,
266
+ "metadata": {},
267
+ "output_type": "execute_result"
268
+ }
269
+ ],
270
+ "source": [
271
+ "df.head()"
272
+ ]
273
+ },
274
+ {
275
+ "cell_type": "code",
276
+ "execution_count": 16,
277
+ "metadata": {},
278
+ "outputs": [
279
+ {
280
+ "data": {
281
+ "text/plain": [
282
+ "base-model-producer-category model-producer-category \n",
283
+ "Individual Individual 2\n",
284
+ "Large enterprise Individual 44\n",
285
+ " Small- & medium-sized enterprise 40\n",
286
+ " Non-profit / grassroots entity 16\n",
287
+ " Not enough info 14\n",
288
+ " University / public research institute 14\n",
289
+ " Large enterprise 13\n",
290
+ "Public research institute Small- & medium-sized enterprise 1\n",
291
+ "Small- & medium-sized enterprise Individual 41\n",
292
+ " Small- & medium-sized enterprise 40\n",
293
+ " Not enough info 7\n",
294
+ " Large enterprise 6\n",
295
+ " Non-profit / grassroots entity 5\n",
296
+ " University / public research institute 1\n",
297
+ "University / public research institute Individual 3\n",
298
+ " Not enough info 2\n",
299
+ " Small- & medium-sized enterprise 1\n",
300
+ "Name: count, dtype: int64"
301
+ ]
302
+ },
303
+ "execution_count": 16,
304
+ "metadata": {},
305
+ "output_type": "execute_result"
306
+ }
307
+ ],
308
+ "source": [
309
+ "df.groupby([\"base-model-producer-category\"])[\"model-producer-category\"].value_counts()"
310
+ ]
311
+ },
312
+ {
313
+ "cell_type": "code",
314
+ "execution_count": 17,
315
+ "metadata": {},
316
+ "outputs": [
317
+ {
318
+ "data": {
319
+ "text/plain": [
320
+ "base-model-producer-category hereditary_type model-producer-category \n",
321
+ "Individual quantize Individual 2\n",
322
+ "Large enterprise finetune Small- & medium-sized enterprise 28\n",
323
+ " Individual 22\n",
324
+ " University / public research institute 13\n",
325
+ " Not enough info 11\n",
326
+ " Non-profit / grassroots entity 7\n",
327
+ " Large enterprise 5\n",
328
+ " quantize Individual 22\n",
329
+ " Small- & medium-sized enterprise 12\n",
330
+ " Non-profit / grassroots entity 9\n",
331
+ " Large enterprise 8\n",
332
+ " Not enough info 3\n",
333
+ " University / public research institute 1\n",
334
+ "Public research institute finetune Small- & medium-sized enterprise 1\n",
335
+ "Small- & medium-sized enterprise finetune Small- & medium-sized enterprise 28\n",
336
+ " Individual 18\n",
337
+ " Not enough info 7\n",
338
+ " Large enterprise 6\n",
339
+ " Non-profit / grassroots entity 4\n",
340
+ " University / public research institute 1\n",
341
+ " merge Small- & medium-sized enterprise 2\n",
342
+ " quantize Individual 23\n",
343
+ " Small- & medium-sized enterprise 10\n",
344
+ " Non-profit / grassroots entity 1\n",
345
+ "University / public research institute finetune Not enough info 2\n",
346
+ " Small- & medium-sized enterprise 1\n",
347
+ " merge Individual 1\n",
348
+ " quantize Individual 2\n",
349
+ "Name: count, dtype: int64"
350
+ ]
351
+ },
352
+ "execution_count": 17,
353
+ "metadata": {},
354
+ "output_type": "execute_result"
355
+ }
356
+ ],
357
+ "source": [
358
+ "df.groupby([\"base-model-producer-category\", \"hereditary_type\"])[\n",
359
+ " \"model-producer-category\"\n",
360
+ "].value_counts()"
361
+ ]
362
+ },
363
+ {
364
+ "cell_type": "code",
365
+ "execution_count": 51,
366
+ "metadata": {},
367
+ "outputs": [
368
+ {
369
+ "name": "stderr",
370
+ "output_type": "stream",
371
+ "text": [
372
+ "/var/folders/8z/jnnncfnj7_lfxym0262z4p180000gn/T/ipykernel_19294/1733077006.py:6: DeprecationWarning: DataFrameGroupBy.apply operated on the grouping columns. This behavior is deprecated, and in a future version of pandas the grouping columns will be excluded from the operation. Either pass `include_groups=False` to exclude the groupings or explicitly select the grouping columns after groupby to silence this warning.\n",
373
+ " ).apply(\n"
374
+ ]
375
+ },
376
+ {
377
+ "data": {
378
+ "text/html": [
379
+ "<div>\n",
380
+ "<style scoped>\n",
381
+ " .dataframe tbody tr th:only-of-type {\n",
382
+ " vertical-align: middle;\n",
383
+ " }\n",
384
+ "\n",
385
+ " .dataframe tbody tr th {\n",
386
+ " vertical-align: top;\n",
387
+ " }\n",
388
+ "\n",
389
+ " .dataframe thead th {\n",
390
+ " text-align: right;\n",
391
+ " }\n",
392
+ "</style>\n",
393
+ "<table border=\"1\" class=\"dataframe\">\n",
394
+ " <thead>\n",
395
+ " <tr style=\"text-align: right;\">\n",
396
+ " <th></th>\n",
397
+ " <th>base-model-producer-category</th>\n",
398
+ " <th>hereditary_type</th>\n",
399
+ " <th>model-producer-category</th>\n",
400
+ " <th>id</th>\n",
401
+ " <th>downloads</th>\n",
402
+ " </tr>\n",
403
+ " </thead>\n",
404
+ " <tbody>\n",
405
+ " <tr>\n",
406
+ " <th>0</th>\n",
407
+ " <td>Individual</td>\n",
408
+ " <td>quantize</td>\n",
409
+ " <td>Individual</td>\n",
410
+ " <td>TheBloke/OpenHermes-2.5-Mistral-7B-AWQ</td>\n",
411
+ " <td>11221</td>\n",
412
+ " </tr>\n",
413
+ " <tr>\n",
414
+ " <th>1</th>\n",
415
+ " <td>Large enterprise</td>\n",
416
+ " <td>finetune</td>\n",
417
+ " <td>Individual</td>\n",
418
+ " <td>aaditya/Llama3-OpenBioLLM-8B</td>\n",
419
+ " <td>162094</td>\n",
420
+ " </tr>\n",
421
+ " <tr>\n",
422
+ " <th>2</th>\n",
423
+ " <td>Large enterprise</td>\n",
424
+ " <td>finetune</td>\n",
425
+ " <td>Large enterprise</td>\n",
426
+ " <td>meta-llama/Llama-3.1-8B-Instruct</td>\n",
427
+ " <td>3134749</td>\n",
428
+ " </tr>\n",
429
+ " <tr>\n",
430
+ " <th>3</th>\n",
431
+ " <td>Large enterprise</td>\n",
432
+ " <td>finetune</td>\n",
433
+ " <td>Non-profit / grassroots entity</td>\n",
434
+ " <td>maum-ai/Llama-3-MAAL-8B-Instruct-v0.1</td>\n",
435
+ " <td>18172</td>\n",
436
+ " </tr>\n",
437
+ " <tr>\n",
438
+ " <th>4</th>\n",
439
+ " <td>Large enterprise</td>\n",
440
+ " <td>finetune</td>\n",
441
+ " <td>Not enough info</td>\n",
442
+ " <td>smeby/Qwen-Qwen1.5-7B-1724632776</td>\n",
443
+ " <td>49744</td>\n",
444
+ " </tr>\n",
445
+ " <tr>\n",
446
+ " <th>5</th>\n",
447
+ " <td>Large enterprise</td>\n",
448
+ " <td>finetune</td>\n",
449
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
450
+ " <td>NousResearch/Hermes-3-Llama-3.1-8B</td>\n",
451
+ " <td>42774</td>\n",
452
+ " </tr>\n",
453
+ " <tr>\n",
454
+ " <th>6</th>\n",
455
+ " <td>Large enterprise</td>\n",
456
+ " <td>finetune</td>\n",
457
+ " <td>University / public research institute</td>\n",
458
+ " <td>princeton-nlp/gemma-2-9b-it-SimPO</td>\n",
459
+ " <td>19520</td>\n",
460
+ " </tr>\n",
461
+ " <tr>\n",
462
+ " <th>7</th>\n",
463
+ " <td>Large enterprise</td>\n",
464
+ " <td>quantize</td>\n",
465
+ " <td>Individual</td>\n",
466
+ " <td>TheBloke/Llama-2-7B-Chat-GPTQ</td>\n",
467
+ " <td>110199</td>\n",
468
+ " </tr>\n",
469
+ " <tr>\n",
470
+ " <th>8</th>\n",
471
+ " <td>Large enterprise</td>\n",
472
+ " <td>quantize</td>\n",
473
+ " <td>Large enterprise</td>\n",
474
+ " <td>Qwen/Qwen2-0.5B-Instruct</td>\n",
475
+ " <td>270072</td>\n",
476
+ " </tr>\n",
477
+ " <tr>\n",
478
+ " <th>9</th>\n",
479
+ " <td>Large enterprise</td>\n",
480
+ " <td>quantize</td>\n",
481
+ " <td>Non-profit / grassroots entity</td>\n",
482
+ " <td>mlc-ai/Llama-2-7b-chat-hf-q4f32_1-MLC</td>\n",
483
+ " <td>28221</td>\n",
484
+ " </tr>\n",
485
+ " <tr>\n",
486
+ " <th>10</th>\n",
487
+ " <td>Large enterprise</td>\n",
488
+ " <td>quantize</td>\n",
489
+ " <td>Not enough info</td>\n",
490
+ " <td>QuantFactory/Qwen2-0.5B-GGUF</td>\n",
491
+ " <td>15865</td>\n",
492
+ " </tr>\n",
493
+ " <tr>\n",
494
+ " <th>11</th>\n",
495
+ " <td>Large enterprise</td>\n",
496
+ " <td>quantize</td>\n",
497
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
498
+ " <td>SanctumAI/Meta-Llama-3.1-8B-Instruct-GGUF</td>\n",
499
+ " <td>269192</td>\n",
500
+ " </tr>\n",
501
+ " <tr>\n",
502
+ " <th>12</th>\n",
503
+ " <td>Large enterprise</td>\n",
504
+ " <td>quantize</td>\n",
505
+ " <td>University / public research institute</td>\n",
506
+ " <td>MLP-KTLim/llama-3-Korean-Bllossom-8B</td>\n",
507
+ " <td>85510</td>\n",
508
+ " </tr>\n",
509
+ " <tr>\n",
510
+ " <th>13</th>\n",
511
+ " <td>Public research institute</td>\n",
512
+ " <td>finetune</td>\n",
513
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
514
+ " <td>neuralmagic/TinyLlama-1.1B-Chat-v1.0-marlin</td>\n",
515
+ " <td>5152</td>\n",
516
+ " </tr>\n",
517
+ " <tr>\n",
518
+ " <th>14</th>\n",
519
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
520
+ " <td>finetune</td>\n",
521
+ " <td>Individual</td>\n",
522
+ " <td>silent666/01-ai-Yi-1.5-9B-1725825607</td>\n",
523
+ " <td>20833</td>\n",
524
+ " </tr>\n",
525
+ " <tr>\n",
526
+ " <th>15</th>\n",
527
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
528
+ " <td>finetune</td>\n",
529
+ " <td>Large enterprise</td>\n",
530
+ " <td>mistralai/Mixtral-8x7B-Instruct-v0.1</td>\n",
531
+ " <td>578188</td>\n",
532
+ " </tr>\n",
533
+ " <tr>\n",
534
+ " <th>16</th>\n",
535
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
536
+ " <td>finetune</td>\n",
537
+ " <td>Non-profit / grassroots entity</td>\n",
538
+ " <td>openchat/openchat-3.5-0106</td>\n",
539
+ " <td>45756</td>\n",
540
+ " </tr>\n",
541
+ " <tr>\n",
542
+ " <th>17</th>\n",
543
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
544
+ " <td>finetune</td>\n",
545
+ " <td>Not enough info</td>\n",
546
+ " <td>nekokiku/01-ai-Yi-1.5-9B-Chat-1725250843</td>\n",
547
+ " <td>4073</td>\n",
548
+ " </tr>\n",
549
+ " <tr>\n",
550
+ " <th>18</th>\n",
551
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
552
+ " <td>finetune</td>\n",
553
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
554
+ " <td>HuggingFaceH4/zephyr-7b-beta</td>\n",
555
+ " <td>749121</td>\n",
556
+ " </tr>\n",
557
+ " <tr>\n",
558
+ " <th>19</th>\n",
559
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
560
+ " <td>finetune</td>\n",
561
+ " <td>University / public research institute</td>\n",
562
+ " <td>INSAIT-Institute/BgGPT-7B-Instruct-v0.2</td>\n",
563
+ " <td>2629</td>\n",
564
+ " </tr>\n",
565
+ " <tr>\n",
566
+ " <th>20</th>\n",
567
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
568
+ " <td>merge</td>\n",
569
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
570
+ " <td>third-intellect/Phi-3-mini-4k-instruct-orca-ma...</td>\n",
571
+ " <td>5242</td>\n",
572
+ " </tr>\n",
573
+ " <tr>\n",
574
+ " <th>21</th>\n",
575
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
576
+ " <td>quantize</td>\n",
577
+ " <td>Individual</td>\n",
578
+ " <td>thesven/Mistral-7B-Instruct-v0.3-GPTQ</td>\n",
579
+ " <td>345189</td>\n",
580
+ " </tr>\n",
581
+ " <tr>\n",
582
+ " <th>22</th>\n",
583
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
584
+ " <td>quantize</td>\n",
585
+ " <td>Non-profit / grassroots entity</td>\n",
586
+ " <td>solidrust/Mistral-7B-Instruct-v0.3-AWQ</td>\n",
587
+ " <td>1841</td>\n",
588
+ " </tr>\n",
589
+ " <tr>\n",
590
+ " <th>23</th>\n",
591
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
592
+ " <td>quantize</td>\n",
593
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
594
+ " <td>neuralmagic/Mistral-7B-Instruct-v0.3-GPTQ-4bit</td>\n",
595
+ " <td>186770</td>\n",
596
+ " </tr>\n",
597
+ " <tr>\n",
598
+ " <th>24</th>\n",
599
+ " <td>University / public research institute</td>\n",
600
+ " <td>finetune</td>\n",
601
+ " <td>Not enough info</td>\n",
602
+ " <td>Kabster/BioMistral-Zephyr-Beta-SLERP</td>\n",
603
+ " <td>2503</td>\n",
604
+ " </tr>\n",
605
+ " <tr>\n",
606
+ " <th>25</th>\n",
607
+ " <td>University / public research institute</td>\n",
608
+ " <td>finetune</td>\n",
609
+ " <td>Small- &amp; medium-sized enterprise</td>\n",
610
+ " <td>johnsnowlabs/BioLing-7B-Dare</td>\n",
611
+ " <td>2249</td>\n",
612
+ " </tr>\n",
613
+ " <tr>\n",
614
+ " <th>26</th>\n",
615
+ " <td>University / public research institute</td>\n",
616
+ " <td>merge</td>\n",
617
+ " <td>Individual</td>\n",
618
+ " <td>skfrost19/BioMistralMerged</td>\n",
619
+ " <td>3359</td>\n",
620
+ " </tr>\n",
621
+ " <tr>\n",
622
+ " <th>27</th>\n",
623
+ " <td>University / public research institute</td>\n",
624
+ " <td>quantize</td>\n",
625
+ " <td>Individual</td>\n",
626
+ " <td>TheBloke/TinyLlama-1.1B-Chat-v1.0-GPTQ</td>\n",
627
+ " <td>67033</td>\n",
628
+ " </tr>\n",
629
+ " </tbody>\n",
630
+ "</table>\n",
631
+ "</div>"
632
+ ],
633
+ "text/plain": [
634
+ " base-model-producer-category hereditary_type \\\n",
635
+ "0 Individual quantize \n",
636
+ "1 Large enterprise finetune \n",
637
+ "2 Large enterprise finetune \n",
638
+ "3 Large enterprise finetune \n",
639
+ "4 Large enterprise finetune \n",
640
+ "5 Large enterprise finetune \n",
641
+ "6 Large enterprise finetune \n",
642
+ "7 Large enterprise quantize \n",
643
+ "8 Large enterprise quantize \n",
644
+ "9 Large enterprise quantize \n",
645
+ "10 Large enterprise quantize \n",
646
+ "11 Large enterprise quantize \n",
647
+ "12 Large enterprise quantize \n",
648
+ "13 Public research institute finetune \n",
649
+ "14 Small- & medium-sized enterprise finetune \n",
650
+ "15 Small- & medium-sized enterprise finetune \n",
651
+ "16 Small- & medium-sized enterprise finetune \n",
652
+ "17 Small- & medium-sized enterprise finetune \n",
653
+ "18 Small- & medium-sized enterprise finetune \n",
654
+ "19 Small- & medium-sized enterprise finetune \n",
655
+ "20 Small- & medium-sized enterprise merge \n",
656
+ "21 Small- & medium-sized enterprise quantize \n",
657
+ "22 Small- & medium-sized enterprise quantize \n",
658
+ "23 Small- & medium-sized enterprise quantize \n",
659
+ "24 University / public research institute finetune \n",
660
+ "25 University / public research institute finetune \n",
661
+ "26 University / public research institute merge \n",
662
+ "27 University / public research institute quantize \n",
663
+ "\n",
664
+ " model-producer-category \\\n",
665
+ "0 Individual \n",
666
+ "1 Individual \n",
667
+ "2 Large enterprise \n",
668
+ "3 Non-profit / grassroots entity \n",
669
+ "4 Not enough info \n",
670
+ "5 Small- & medium-sized enterprise \n",
671
+ "6 University / public research institute \n",
672
+ "7 Individual \n",
673
+ "8 Large enterprise \n",
674
+ "9 Non-profit / grassroots entity \n",
675
+ "10 Not enough info \n",
676
+ "11 Small- & medium-sized enterprise \n",
677
+ "12 University / public research institute \n",
678
+ "13 Small- & medium-sized enterprise \n",
679
+ "14 Individual \n",
680
+ "15 Large enterprise \n",
681
+ "16 Non-profit / grassroots entity \n",
682
+ "17 Not enough info \n",
683
+ "18 Small- & medium-sized enterprise \n",
684
+ "19 University / public research institute \n",
685
+ "20 Small- & medium-sized enterprise \n",
686
+ "21 Individual \n",
687
+ "22 Non-profit / grassroots entity \n",
688
+ "23 Small- & medium-sized enterprise \n",
689
+ "24 Not enough info \n",
690
+ "25 Small- & medium-sized enterprise \n",
691
+ "26 Individual \n",
692
+ "27 Individual \n",
693
+ "\n",
694
+ " id downloads \n",
695
+ "0 TheBloke/OpenHermes-2.5-Mistral-7B-AWQ 11221 \n",
696
+ "1 aaditya/Llama3-OpenBioLLM-8B 162094 \n",
697
+ "2 meta-llama/Llama-3.1-8B-Instruct 3134749 \n",
698
+ "3 maum-ai/Llama-3-MAAL-8B-Instruct-v0.1 18172 \n",
699
+ "4 smeby/Qwen-Qwen1.5-7B-1724632776 49744 \n",
700
+ "5 NousResearch/Hermes-3-Llama-3.1-8B 42774 \n",
701
+ "6 princeton-nlp/gemma-2-9b-it-SimPO 19520 \n",
702
+ "7 TheBloke/Llama-2-7B-Chat-GPTQ 110199 \n",
703
+ "8 Qwen/Qwen2-0.5B-Instruct 270072 \n",
704
+ "9 mlc-ai/Llama-2-7b-chat-hf-q4f32_1-MLC 28221 \n",
705
+ "10 QuantFactory/Qwen2-0.5B-GGUF 15865 \n",
706
+ "11 SanctumAI/Meta-Llama-3.1-8B-Instruct-GGUF 269192 \n",
707
+ "12 MLP-KTLim/llama-3-Korean-Bllossom-8B 85510 \n",
708
+ "13 neuralmagic/TinyLlama-1.1B-Chat-v1.0-marlin 5152 \n",
709
+ "14 silent666/01-ai-Yi-1.5-9B-1725825607 20833 \n",
710
+ "15 mistralai/Mixtral-8x7B-Instruct-v0.1 578188 \n",
711
+ "16 openchat/openchat-3.5-0106 45756 \n",
712
+ "17 nekokiku/01-ai-Yi-1.5-9B-Chat-1725250843 4073 \n",
713
+ "18 HuggingFaceH4/zephyr-7b-beta 749121 \n",
714
+ "19 INSAIT-Institute/BgGPT-7B-Instruct-v0.2 2629 \n",
715
+ "20 third-intellect/Phi-3-mini-4k-instruct-orca-ma... 5242 \n",
716
+ "21 thesven/Mistral-7B-Instruct-v0.3-GPTQ 345189 \n",
717
+ "22 solidrust/Mistral-7B-Instruct-v0.3-AWQ 1841 \n",
718
+ "23 neuralmagic/Mistral-7B-Instruct-v0.3-GPTQ-4bit 186770 \n",
719
+ "24 Kabster/BioMistral-Zephyr-Beta-SLERP 2503 \n",
720
+ "25 johnsnowlabs/BioLing-7B-Dare 2249 \n",
721
+ "26 skfrost19/BioMistralMerged 3359 \n",
722
+ "27 TheBloke/TinyLlama-1.1B-Chat-v1.0-GPTQ 67033 "
723
+ ]
724
+ },
725
+ "execution_count": 51,
726
+ "metadata": {},
727
+ "output_type": "execute_result"
728
+ }
729
+ ],
730
+ "source": [
731
+ "from PIL.ImageOps import expand\n",
732
+ "\n",
733
+ "\n",
734
+ "df.groupby(\n",
735
+ " [\"base-model-producer-category\", \"hereditary_type\", \"model-producer-category\"]\n",
736
+ ").apply(\n",
737
+ " # return the id column of the max downloads and the download count\n",
738
+ " lambda x: {\n",
739
+ " \"id\": x.loc[x[\"downloads\"].idxmax(), \"id\"],\n",
740
+ " \"downloads\": x[\"downloads\"].max(),\n",
741
+ " },\n",
742
+ ").apply(\n",
743
+ " pd.Series\n",
744
+ ").reset_index()"
745
+ ]
746
+ },
747
+ {
748
+ "cell_type": "code",
749
+ "execution_count": null,
750
+ "metadata": {},
751
+ "outputs": [],
752
+ "source": []
753
+ }
754
+ ],
755
+ "metadata": {
756
+ "kernelspec": {
757
+ "display_name": "venv",
758
+ "language": "python",
759
+ "name": "python3"
760
+ },
761
+ "language_info": {
762
+ "codemirror_mode": {
763
+ "name": "ipython",
764
+ "version": 3
765
+ },
766
+ "file_extension": ".py",
767
+ "mimetype": "text/x-python",
768
+ "name": "python",
769
+ "nbconvert_exporter": "python",
770
+ "pygments_lexer": "ipython3",
771
+ "version": "3.11.9"
772
+ }
773
+ },
774
+ "nbformat": 4,
775
+ "nbformat_minor": 2
776
+ }