Matteo Rinaldi commited on
Commit
5ad75a6
1 Parent(s): 3d678fa

Delete testing_conversion.ipynb

Browse files
Files changed (1) hide show
  1. testing_conversion.ipynb +0 -555
testing_conversion.ipynb DELETED
@@ -1,555 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": 1,
6
- "metadata": {},
7
- "outputs": [],
8
- "source": [
9
- "from datasets import DatasetBuilder, SplitGenerator, Split, Features, Value, ClassLabel, BuilderConfig, Version, DatasetInfo, GeneratorBasedBuilder, DownloadManager\n",
10
- "import glob\n",
11
- "import json\n",
12
- "import multiprocessing as mp\n",
13
- "import os\n",
14
- "import pyarrow as pa\n",
15
- "import pyarrow.parquet as pq\n",
16
- "import pandas as pd\n",
17
- "import pyarrow as pa\n",
18
- "import pyarrow.json\n",
19
- "# jsonl \n",
20
- "\n",
21
- "pattern=\"*.bz2\"\n",
22
- "\n",
23
- "paths=glob.glob(pattern)\n",
24
- "\n",
25
- "# exclude txt files\n",
26
- "\n",
27
- "paths=[file for file in paths if not \".txt.\" in file]\n",
28
- "\n",
29
- "n_files=len(paths)\n",
30
- "\n",
31
- "# labels are file names without the extension .jsonl.bz2\n",
32
- "\n",
33
- "labels=[file.replace(\".jsonl.bz2\",\"\") for file in paths]\n",
34
- "\n",
35
- "\n",
36
- "\n",
37
- "## handle parquet conversion\n",
38
- "\n",
39
- "# create parquet directory\n",
40
- "\n",
41
- "dl_manager = DownloadManager()\n",
42
- "\n",
43
- "parquet_dir=\"parquet\"\n",
44
- "\n",
45
- "\n",
46
- " \n",
47
- "\n",
48
- "\n",
49
- "\n",
50
- " "
51
- ]
52
- },
53
- {
54
- "cell_type": "code",
55
- "execution_count": 2,
56
- "metadata": {},
57
- "outputs": [
58
- {
59
- "name": "stderr",
60
- "output_type": "stream",
61
- "text": [
62
- "/home/rlazzaroni/anaconda3/lib/python3.11/site-packages/datasets/download/download_manager.py:536: FutureWarning: 'num_proc' was deprecated in version 2.6.2 and will be removed in 3.0.0. Pass `DownloadConfig(num_proc=<num_proc>)` to the initializer instead.\n",
63
- " warnings.warn(\n"
64
- ]
65
- }
66
- ],
67
- "source": [
68
- "extracted_files=[]\n",
69
- "n_concurrent=25\n",
70
- "\n",
71
- "extracted_files = []\n",
72
- "n_concurrent = 25\n",
73
- "\n",
74
- "for i in range(0, len(paths), n_concurrent):\n",
75
- " batch_paths = paths[i:i+n_concurrent]\n",
76
- " for file in batch_paths:\n",
77
- " extracted_files.append(dl_manager.extract(file, num_proc=n_concurrent))\n",
78
- " \n"
79
- ]
80
- },
81
- {
82
- "cell_type": "code",
83
- "execution_count": 3,
84
- "metadata": {},
85
- "outputs": [
86
- {
87
- "data": {
88
- "text/plain": [
89
- "565"
90
- ]
91
- },
92
- "execution_count": 3,
93
- "metadata": {},
94
- "output_type": "execute_result"
95
- }
96
- ],
97
- "source": [
98
- "extracted_files.__len__()\n"
99
- ]
100
- },
101
- {
102
- "cell_type": "code",
103
- "execution_count": 4,
104
- "metadata": {},
105
- "outputs": [
106
- {
107
- "data": {
108
- "text/plain": [
109
- "565"
110
- ]
111
- },
112
- "execution_count": 4,
113
- "metadata": {},
114
- "output_type": "execute_result"
115
- }
116
- ],
117
- "source": [
118
- "n_files"
119
- ]
120
- },
121
- {
122
- "cell_type": "code",
123
- "execution_count": 6,
124
- "metadata": {},
125
- "outputs": [],
126
- "source": [
127
- "\n",
128
- "def convert_jsonl_to_parquet(file_list, parquet_dir, chunk_size=5000000):\n",
129
- " \"\"\"Converts JSONL files to Parquet with memory efficiency.\n",
130
- "\n",
131
- " Args:\n",
132
- " file_list (list): List of JSONL file paths.\n",
133
- " parquet_dir (str): Path to store output Parquet files.\n",
134
- " chunk_size (int): Number of records to write to each Parquet file.\n",
135
- " \"\"\"\n",
136
- "\n",
137
- " os.makedirs(parquet_dir, exist_ok=True) # Create output directory\n",
138
- "\n",
139
- " parquet_file_index = 0\n",
140
- " # current_records = [] \n",
141
- " current_batches=[]\n",
142
- " current_batches_size=0\n",
143
- "\n",
144
- " file_index = 0\n",
145
- " for file in file_list:\n",
146
- " try:\n",
147
- " reader = pa.json.read_json(file) # PyArrow JSON reader\n",
148
- " file_index += 1\n",
149
- " # iterate over the batches of records \n",
150
- " for batch in reader.to_batches():\n",
151
- " batch_chunk_size = batch.num_rows\n",
152
- " # pandas_df = batch.to_pandas()\n",
153
- " # current_records.extend(pandas_df.to_dict('list'))\n",
154
- " current_batches.append(batch)\n",
155
- " current_batches_size+=batch_chunk_size\n",
156
- " if current_batches_size >= chunk_size:\n",
157
- " # table = pa.Table.from_pandas(pd.DataFrame(current_records))\n",
158
- " table = pa.Table.from_batches(current_batches)\n",
159
- " parquet_filename = f\"usenet_converted_{parquet_file_index}.parquet\"\n",
160
- " parquet_path = os.path.join(parquet_dir, parquet_filename)\n",
161
- " pq.write_table(table, parquet_path)\n",
162
- " print(f\"Wrote {parquet_filename} with {current_batches_size} records\")\n",
163
- "\n",
164
- " current_records = [] \n",
165
- " parquet_file_index += 1\n",
166
- " \n",
167
- " current_batches_size=0\n",
168
- " current_batches=[]\n",
169
- " except Exception as e:\n",
170
- " print(f\"Error in file {file} with error {e}\")\n",
171
- " file_index += 1\n",
172
- " # every 50 files print the progress\n",
173
- " if file_index % 50 == 0:\n",
174
- " print(f\"Finished processing file {file_index} of {len(file_list)}\")\n",
175
- " print(f\"Writing last chunk to parquet file {parquet_file_index}\")\n",
176
- " # Write any remaining data in the last chunk\n",
177
- " if current_records:\n",
178
- " table = pa.Table.from_pandas(pd.DataFrame(current_records))\n",
179
- " parquet_filename = f\"output_{parquet_file_index}.parquet\"\n",
180
- " parquet_path = os.path.join(parquet_dir, parquet_filename)\n",
181
- " pq.write_table(table, parquet_path)\n",
182
- " \n",
183
- " print(f\"Conversion complete, wrote {parquet_file_index} Parquet files.\")\n",
184
- "\n",
185
- "\n",
186
- "\n",
187
- "\n",
188
- "\n",
189
- "if os.path.exists(parquet_dir):\n",
190
- " pass\n",
191
- "else:\n",
192
- " convert_jsonl_to_parquet(extracted_files, parquet_dir)"
193
- ]
194
- },
195
- {
196
- "cell_type": "code",
197
- "execution_count": 7,
198
- "metadata": {},
199
- "outputs": [
200
- {
201
- "data": {
202
- "text/plain": [
203
- "['parquet/usenet_converted_6.parquet',\n",
204
- " 'parquet/usenet_converted_9.parquet',\n",
205
- " 'parquet/usenet_converted_15.parquet',\n",
206
- " 'parquet/usenet_converted_10.parquet',\n",
207
- " 'parquet/usenet_converted_11.parquet',\n",
208
- " 'parquet/usenet_converted_12.parquet',\n",
209
- " 'parquet/usenet_converted_16.parquet',\n",
210
- " 'parquet/usenet_converted_13.parquet',\n",
211
- " 'parquet/usenet_converted_14.parquet',\n",
212
- " 'parquet/usenet_converted_4.parquet',\n",
213
- " 'parquet/usenet_converted_5.parquet',\n",
214
- " 'parquet/usenet_converted_0.parquet',\n",
215
- " 'parquet/usenet_converted_8.parquet',\n",
216
- " 'parquet/usenet_converted_7.parquet',\n",
217
- " 'parquet/usenet_converted_1.parquet',\n",
218
- " 'parquet/usenet_converted_3.parquet',\n",
219
- " 'parquet/usenet_converted_2.parquet']"
220
- ]
221
- },
222
- "execution_count": 7,
223
- "metadata": {},
224
- "output_type": "execute_result"
225
- }
226
- ],
227
- "source": [
228
- "## test parquet conversion\n",
229
- "\n",
230
- "parquet_files=glob.glob(parquet_dir+\"/*.parquet\")\n",
231
- "\n",
232
- "parquet_files\n"
233
- ]
234
- },
235
- {
236
- "cell_type": "code",
237
- "execution_count": null,
238
- "metadata": {},
239
- "outputs": [],
240
- "source": [
241
- "# load a df\n",
242
- "df = pq.read_table(parquet_files[0]).to_pandas()"
243
- ]
244
- },
245
- {
246
- "cell_type": "code",
247
- "execution_count": null,
248
- "metadata": {
249
- "notebookRunGroups": {
250
- "groupValue": "2"
251
- }
252
- },
253
- "outputs": [
254
- {
255
- "data": {
256
- "text/html": [
257
- "<div>\n",
258
- "<style scoped>\n",
259
- " .dataframe tbody tr th:only-of-type {\n",
260
- " vertical-align: middle;\n",
261
- " }\n",
262
- "\n",
263
- " .dataframe tbody tr th {\n",
264
- " vertical-align: top;\n",
265
- " }\n",
266
- "\n",
267
- " .dataframe thead th {\n",
268
- " text-align: right;\n",
269
- " }\n",
270
- "</style>\n",
271
- "<table border=\"1\" class=\"dataframe\">\n",
272
- " <thead>\n",
273
- " <tr style=\"text-align: right;\">\n",
274
- " <th></th>\n",
275
- " <th>title</th>\n",
276
- " <th>author</th>\n",
277
- " <th>id</th>\n",
278
- " <th>progressive_number</th>\n",
279
- " <th>timestamp</th>\n",
280
- " <th>newsgroup</th>\n",
281
- " <th>original_url</th>\n",
282
- " <th>text</th>\n",
283
- " </tr>\n",
284
- " </thead>\n",
285
- " <tbody>\n",
286
- " <tr>\n",
287
- " <th>0</th>\n",
288
- " <td>Q.p.g.a. dopo i primissimi ascolti...</td>\n",
289
- " <td>Alessia</td>\n",
290
- " <td>1132</td>\n",
291
- " <td>4</td>\n",
292
- " <td>2009-11-29 15:16:26</td>\n",
293
- " <td>it.fan.musica.baglioni</td>\n",
294
- " <td>https://groups.google.com/g/it.fan.musica.bagl...</td>\n",
295
- " <td>&gt; mia moglie esce pazza per 'sto baglioni.\\nMe...</td>\n",
296
- " </tr>\n",
297
- " <tr>\n",
298
- " <th>1</th>\n",
299
- " <td>Q.p.g.a. dopo i primissimi ascolti...</td>\n",
300
- " <td>Paolo</td>\n",
301
- " <td>1132</td>\n",
302
- " <td>5</td>\n",
303
- " <td>2009-11-29 18:11:59</td>\n",
304
- " <td>it.fan.musica.baglioni</td>\n",
305
- " <td>https://groups.google.com/g/it.fan.musica.bagl...</td>\n",
306
- " <td>\"Alessia\" &lt;\\[email protected]\\n&gt; ha scritto...</td>\n",
307
- " </tr>\n",
308
- " <tr>\n",
309
- " <th>2</th>\n",
310
- " <td>Avrai...?</td>\n",
311
- " <td>Lock</td>\n",
312
- " <td>1133</td>\n",
313
- " <td>1</td>\n",
314
- " <td>2003-06-18 15:50:02</td>\n",
315
- " <td>it.fan.musica.baglioni</td>\n",
316
- " <td>https://groups.google.com/g/it.fan.musica.bagl...</td>\n",
317
- " <td>Ragazzi, chi di voi ce l'ha non live?\\n-- \\nLø...</td>\n",
318
- " </tr>\n",
319
- " <tr>\n",
320
- " <th>3</th>\n",
321
- " <td>Avrai...?</td>\n",
322
- " <td>Deep</td>\n",
323
- " <td>1133</td>\n",
324
- " <td>2</td>\n",
325
- " <td>2003-06-18 16:08:54</td>\n",
326
- " <td>it.fan.musica.baglioni</td>\n",
327
- " <td>https://groups.google.com/g/it.fan.musica.bagl...</td>\n",
328
- " <td>Lock ha scritto:\\n&gt; Ragazzi, chi di voi ce l'h...</td>\n",
329
- " </tr>\n",
330
- " <tr>\n",
331
- " <th>4</th>\n",
332
- " <td>Avrai...?</td>\n",
333
- " <td>Clab</td>\n",
334
- " <td>1133</td>\n",
335
- " <td>3</td>\n",
336
- " <td>2003-06-18 16:18:12</td>\n",
337
- " <td>it.fan.musica.baglioni</td>\n",
338
- " <td>https://groups.google.com/g/it.fan.musica.bagl...</td>\n",
339
- " <td>\"Lock\" &lt;\\[email protected]\\n&gt; ha scritto nel...</td>\n",
340
- " </tr>\n",
341
- " <tr>\n",
342
- " <th>...</th>\n",
343
- " <td>...</td>\n",
344
- " <td>...</td>\n",
345
- " <td>...</td>\n",
346
- " <td>...</td>\n",
347
- " <td>...</td>\n",
348
- " <td>...</td>\n",
349
- " <td>...</td>\n",
350
- " <td>...</td>\n",
351
- " </tr>\n",
352
- " <tr>\n",
353
- " <th>5000013</th>\n",
354
- " <td>zidane lascia il calcio</td>\n",
355
- " <td>motion musso aka: sathia</td>\n",
356
- " <td>54066</td>\n",
357
- " <td>1</td>\n",
358
- " <td>2006-04-25 20:43:13</td>\n",
359
- " <td>it.sport.calcio</td>\n",
360
- " <td>https://groups.google.com/g/it.sport.calcio/c/...</td>\n",
361
- " <td>e questa č una notizia triste.\\nse ne andrą do...</td>\n",
362
- " </tr>\n",
363
- " <tr>\n",
364
- " <th>5000014</th>\n",
365
- " <td>UCS</td>\n",
366
- " <td>Andrea Logiudice</td>\n",
367
- " <td>54067</td>\n",
368
- " <td>1</td>\n",
369
- " <td>1997-12-31 09:00:00</td>\n",
370
- " <td>it.sport.calcio</td>\n",
371
- " <td>https://groups.google.com/g/it.sport.calcio/c/...</td>\n",
372
- " <td>Lo dico e lo ribadisco, FORZA SAMP!</td>\n",
373
- " </tr>\n",
374
- " <tr>\n",
375
- " <th>5000015</th>\n",
376
- " <td>INTER MAFIA ANCHE PER GLI STRANIERI!!</td>\n",
377
- " <td>alessandro</td>\n",
378
- " <td>54068</td>\n",
379
- " <td>1</td>\n",
380
- " <td>2002-11-13 12:55:42</td>\n",
381
- " <td>it.sport.calcio</td>\n",
382
- " <td>https://groups.google.com/g/it.sport.calcio/c/...</td>\n",
383
- " <td>\"Simon Schiffeleers\" &lt;\\[email protected]\\n&gt; ...</td>\n",
384
- " </tr>\n",
385
- " <tr>\n",
386
- " <th>5000016</th>\n",
387
- " <td>Silenzio...</td>\n",
388
- " <td>Vide</td>\n",
389
- " <td>54069</td>\n",
390
- " <td>1</td>\n",
391
- " <td>2004-04-28 00:16:53</td>\n",
392
- " <td>it.sport.calcio</td>\n",
393
- " <td>https://groups.google.com/g/it.sport.calcio/c/...</td>\n",
394
- " <td>Capisco che il NG sia per la maggior parte pop...</td>\n",
395
- " </tr>\n",
396
- " <tr>\n",
397
- " <th>5000017</th>\n",
398
- " <td>[OT] Abbonamento a +calcio gold precisazioni</td>\n",
399
- " <td>renatuzzo</td>\n",
400
- " <td>54070</td>\n",
401
- " <td>1</td>\n",
402
- " <td>2002-09-28 11:53:02</td>\n",
403
- " <td>it.sport.calcio</td>\n",
404
- " <td>https://groups.google.com/g/it.sport.calcio/c/...</td>\n",
405
- " <td>Scusate se magari la domanda è già stata posta...</td>\n",
406
- " </tr>\n",
407
- " </tbody>\n",
408
- "</table>\n",
409
- "<p>5000018 rows × 8 columns</p>\n",
410
- "</div>"
411
- ],
412
- "text/plain": [
413
- " title \\\n",
414
- "0 Q.p.g.a. dopo i primissimi ascolti... \n",
415
- "1 Q.p.g.a. dopo i primissimi ascolti... \n",
416
- "2 Avrai...? \n",
417
- "3 Avrai...? \n",
418
- "4 Avrai...? \n",
419
- "... ... \n",
420
- "5000013 zidane lascia il calcio \n",
421
- "5000014 UCS \n",
422
- "5000015 INTER MAFIA ANCHE PER GLI STRANIERI!! \n",
423
- "5000016 Silenzio... \n",
424
- "5000017 [OT] Abbonamento a +calcio gold precisazioni \n",
425
- "\n",
426
- " author id progressive_number \\\n",
427
- "0 Alessia 1132 4 \n",
428
- "1 Paolo 1132 5 \n",
429
- "2 Lock 1133 1 \n",
430
- "3 Deep 1133 2 \n",
431
- "4 Clab 1133 3 \n",
432
- "... ... ... ... \n",
433
- "5000013 motion musso aka: sathia 54066 1 \n",
434
- "5000014 Andrea Logiudice 54067 1 \n",
435
- "5000015 alessandro 54068 1 \n",
436
- "5000016 Vide 54069 1 \n",
437
- "5000017 renatuzzo 54070 1 \n",
438
- "\n",
439
- " timestamp newsgroup \\\n",
440
- "0 2009-11-29 15:16:26 it.fan.musica.baglioni \n",
441
- "1 2009-11-29 18:11:59 it.fan.musica.baglioni \n",
442
- "2 2003-06-18 15:50:02 it.fan.musica.baglioni \n",
443
- "3 2003-06-18 16:08:54 it.fan.musica.baglioni \n",
444
- "4 2003-06-18 16:18:12 it.fan.musica.baglioni \n",
445
- "... ... ... \n",
446
- "5000013 2006-04-25 20:43:13 it.sport.calcio \n",
447
- "5000014 1997-12-31 09:00:00 it.sport.calcio \n",
448
- "5000015 2002-11-13 12:55:42 it.sport.calcio \n",
449
- "5000016 2004-04-28 00:16:53 it.sport.calcio \n",
450
- "5000017 2002-09-28 11:53:02 it.sport.calcio \n",
451
- "\n",
452
- " original_url \\\n",
453
- "0 https://groups.google.com/g/it.fan.musica.bagl... \n",
454
- "1 https://groups.google.com/g/it.fan.musica.bagl... \n",
455
- "2 https://groups.google.com/g/it.fan.musica.bagl... \n",
456
- "3 https://groups.google.com/g/it.fan.musica.bagl... \n",
457
- "4 https://groups.google.com/g/it.fan.musica.bagl... \n",
458
- "... ... \n",
459
- "5000013 https://groups.google.com/g/it.sport.calcio/c/... \n",
460
- "5000014 https://groups.google.com/g/it.sport.calcio/c/... \n",
461
- "5000015 https://groups.google.com/g/it.sport.calcio/c/... \n",
462
- "5000016 https://groups.google.com/g/it.sport.calcio/c/... \n",
463
- "5000017 https://groups.google.com/g/it.sport.calcio/c/... \n",
464
- "\n",
465
- " text \n",
466
- "0 > mia moglie esce pazza per 'sto baglioni.\\nMe... \n",
467
- "1 \"Alessia\" <\\[email protected]\\n> ha scritto... \n",
468
- "2 Ragazzi, chi di voi ce l'ha non live?\\n-- \\nLø... \n",
469
- "3 Lock ha scritto:\\n> Ragazzi, chi di voi ce l'h... \n",
470
- "4 \"Lock\" <\\[email protected]\\n> ha scritto nel... \n",
471
- "... ... \n",
472
- "5000013 e questa č una notizia triste.\\nse ne andrą do... \n",
473
- "5000014 Lo dico e lo ribadisco, FORZA SAMP! \n",
474
- "5000015 \"Simon Schiffeleers\" <\\[email protected]\\n> ... \n",
475
- "5000016 Capisco che il NG sia per la maggior parte pop... \n",
476
- "5000017 Scusate se magari la domanda è già stata posta... \n",
477
- "\n",
478
- "[5000018 rows x 8 columns]"
479
- ]
480
- },
481
- "execution_count": 22,
482
- "metadata": {},
483
- "output_type": "execute_result"
484
- }
485
- ],
486
- "source": [
487
- "df"
488
- ]
489
- },
490
- {
491
- "cell_type": "code",
492
- "execution_count": 9,
493
- "metadata": {},
494
- "outputs": [],
495
- "source": [
496
- "\n",
497
- "table=pq.ParquetDataset(parquet_files)\n",
498
- "\n",
499
- "table_df=table.read(use_threads=True).to_pandas()\n",
500
- "def generate_examples():\n",
501
- " for index, row in table_df.iterrows():\n",
502
- " yield index, row.to_dict()"
503
- ]
504
- },
505
- {
506
- "cell_type": "code",
507
- "execution_count": null,
508
- "metadata": {},
509
- "outputs": [],
510
- "source": [
511
- "for example in generate_examples():\n",
512
- " print(example)"
513
- ]
514
- },
515
- {
516
- "cell_type": "code",
517
- "execution_count": null,
518
- "metadata": {},
519
- "outputs": [
520
- {
521
- "ename": "",
522
- "evalue": "",
523
- "output_type": "error",
524
- "traceback": [
525
- "\u001b[1;31mThe Kernel crashed while executing code in the the current cell or a previous cell. Please review the code in the cell(s) to identify a possible cause of the failure. Click <a href='https://aka.ms/vscodeJupyterKernelCrash'>here</a> for more info. View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
526
- ]
527
- }
528
- ],
529
- "source": [
530
- "table_df.head()"
531
- ]
532
- }
533
- ],
534
- "metadata": {
535
- "kernelspec": {
536
- "display_name": "base",
537
- "language": "python",
538
- "name": "python3"
539
- },
540
- "language_info": {
541
- "codemirror_mode": {
542
- "name": "ipython",
543
- "version": 3
544
- },
545
- "file_extension": ".py",
546
- "mimetype": "text/x-python",
547
- "name": "python",
548
- "nbconvert_exporter": "python",
549
- "pygments_lexer": "ipython3",
550
- "version": "3.11.5"
551
- }
552
- },
553
- "nbformat": 4,
554
- "nbformat_minor": 2
555
- }