KaraKaraWitch commited on
Commit
0d2a55f
0 Parent(s):

Super-squash branch 'main' using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +60 -0
  2. README.md +433 -0
  3. data/chunk_00/0xcert.jsonl +3 -0
  4. data/chunk_00/11mike10.jsonl +3 -0
  5. data/chunk_00/13ReasonsWhy.jsonl +3 -0
  6. data/chunk_00/196_butOnlyPorn.jsonl +3 -0
  7. data/chunk_00/1inch.jsonl +3 -0
  8. data/chunk_00/1stPersonAnimations.jsonl +3 -0
  9. data/chunk_00/209fuck.jsonl +3 -0
  10. data/chunk_00/20k.jsonl +3 -0
  11. data/chunk_00/20questions.jsonl +3 -0
  12. data/chunk_00/21savage.jsonl +3 -0
  13. data/chunk_00/21stCenturyQuotes.jsonl +3 -0
  14. data/chunk_00/2d20games.jsonl +3 -0
  15. data/chunk_00/2healthbars.jsonl +3 -0
  16. data/chunk_00/360Waves.jsonl +3 -0
  17. data/chunk_00/3dprintingdms.jsonl +3 -0
  18. data/chunk_00/3dshomebrew.jsonl +3 -0
  19. data/chunk_00/410freaks.jsonl +3 -0
  20. data/chunk_00/5050.jsonl +3 -0
  21. data/chunk_00/5thgen4runners.jsonl +3 -0
  22. data/chunk_00/806MensRoom.jsonl +3 -0
  23. data/chunk_00/8rack.jsonl +3 -0
  24. data/chunk_00/90DayFianceUK.jsonl +3 -0
  25. data/chunk_00/911Calls.jsonl +3 -0
  26. data/chunk_00/AAPL.jsonl +3 -0
  27. data/chunk_00/ABBA.jsonl +3 -0
  28. data/chunk_00/ACNHGardening.jsonl +3 -0
  29. data/chunk_00/AIK.jsonl +3 -0
  30. data/chunk_00/AI_Music.jsonl +3 -0
  31. data/chunk_00/ALevelChemistry.jsonl +3 -0
  32. data/chunk_00/AMCPlus.jsonl +3 -0
  33. data/chunk_00/AMERICANLEAN.jsonl +3 -0
  34. data/chunk_00/AMLCompliance.jsonl +3 -0
  35. data/chunk_00/ANVILVaultBreaker.jsonl +3 -0
  36. data/chunk_00/AO3.jsonl +3 -0
  37. data/chunk_00/API3.jsonl +3 -0
  38. data/chunk_00/AQuietPlace.jsonl +3 -0
  39. data/chunk_00/ASRT_stock.jsonl +3 -0
  40. data/chunk_00/AVINOC.jsonl +3 -0
  41. data/chunk_00/AZguns.jsonl +3 -0
  42. data/chunk_00/A_Cups.jsonl +3 -0
  43. data/chunk_00/AbandonedPorn.jsonl +3 -0
  44. data/chunk_00/AbbyBerner_fr.jsonl +3 -0
  45. data/chunk_00/Acadiana.jsonl +3 -0
  46. data/chunk_00/AcalaNetwork.jsonl +3 -0
  47. data/chunk_00/Actingclass.jsonl +3 -0
  48. data/chunk_00/ActualWomen.jsonl +3 -0
  49. data/chunk_00/AdamCarolla.jsonl +3 -0
  50. data/chunk_00/Adjuncts.jsonl +3 -0
.gitattributes ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.lz4 filter=lfs diff=lfs merge=lfs -text
12
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
13
+ *.model filter=lfs diff=lfs merge=lfs -text
14
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
15
+ *.npy filter=lfs diff=lfs merge=lfs -text
16
+ *.npz filter=lfs diff=lfs merge=lfs -text
17
+ *.onnx filter=lfs diff=lfs merge=lfs -text
18
+ *.ot filter=lfs diff=lfs merge=lfs -text
19
+ *.parquet filter=lfs diff=lfs merge=lfs -text
20
+ *.pb filter=lfs diff=lfs merge=lfs -text
21
+ *.pickle filter=lfs diff=lfs merge=lfs -text
22
+ *.pkl filter=lfs diff=lfs merge=lfs -text
23
+ *.pt filter=lfs diff=lfs merge=lfs -text
24
+ *.pth filter=lfs diff=lfs merge=lfs -text
25
+ *.rar filter=lfs diff=lfs merge=lfs -text
26
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
27
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
29
+ *.tar filter=lfs diff=lfs merge=lfs -text
30
+ *.tflite filter=lfs diff=lfs merge=lfs -text
31
+ *.tgz filter=lfs diff=lfs merge=lfs -text
32
+ *.wasm filter=lfs diff=lfs merge=lfs -text
33
+ *.xz filter=lfs diff=lfs merge=lfs -text
34
+ *.zip filter=lfs diff=lfs merge=lfs -text
35
+ *.zst filter=lfs diff=lfs merge=lfs -text
36
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
37
+ # Audio files - uncompressed
38
+ *.pcm filter=lfs diff=lfs merge=lfs -text
39
+ *.sam filter=lfs diff=lfs merge=lfs -text
40
+ *.raw filter=lfs diff=lfs merge=lfs -text
41
+ # Audio files - compressed
42
+ *.aac filter=lfs diff=lfs merge=lfs -text
43
+ *.flac filter=lfs diff=lfs merge=lfs -text
44
+ *.mp3 filter=lfs diff=lfs merge=lfs -text
45
+ *.ogg filter=lfs diff=lfs merge=lfs -text
46
+ *.wav filter=lfs diff=lfs merge=lfs -text
47
+ # Image files - uncompressed
48
+ *.bmp filter=lfs diff=lfs merge=lfs -text
49
+ *.gif filter=lfs diff=lfs merge=lfs -text
50
+ *.png filter=lfs diff=lfs merge=lfs -text
51
+ *.tiff filter=lfs diff=lfs merge=lfs -text
52
+ # Image files - compressed
53
+ *.jpg filter=lfs diff=lfs merge=lfs -text
54
+ *.jpeg filter=lfs diff=lfs merge=lfs -text
55
+ *.webp filter=lfs diff=lfs merge=lfs -text
56
+ # Video files - compressed
57
+ *.mp4 filter=lfs diff=lfs merge=lfs -text
58
+ *.webm filter=lfs diff=lfs merge=lfs -text
59
+ # Jsonl
60
+ *.jsonl filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,433 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ size_categories:
3
+ - 100M<n<1B
4
+
5
+ configs:
6
+ - config_name: default
7
+ data_files:
8
+ - split: text-focus
9
+ path: "data/chunk_*/*.jsonl"
10
+ ---
11
+
12
+ # OKReddit
13
+
14
+ ![image/png](https://cdn-uploads.huggingface.co/production/uploads/633e85093a17ab61de8d9073/iIT11kzCFgbKSc0E-p5S4.png)
15
+
16
+ # Dataset Summary
17
+
18
+ OKReddit is a filtered collection of **5TiB** of reddit submissions and comments from 2005 to 2023. This dataset has been prepared for research or archival purposes.
19
+
20
+ This dataset includes (obviously) a filtered list of subreddits.
21
+
22
+ - **Curated by:** KaraKaraWitch
23
+ - **Funded by:** Recursal.ai
24
+ - **Shared by:** KaraKaraWitch
25
+ - **Language(s) (NLP):** Mainly English. Other languages are available at smaller sizes.
26
+ - **License:** `Scripts` folder are Apache 2.0. Refer to [Licensing Information](#licensing-information) for data license.
27
+
28
+ ### Dataset Sources
29
+
30
+ - **Source Data:** [Academic Torrents](https://academictorrents.com/details/9c263fc85366c1ef8f5bb9da0203f4c8c8db75f4) by (stuck_in_the_matrix, Watchful1, RaiderBDev & pushshift folks.)
31
+
32
+ ## Supported Tasks and Leaderboards
33
+
34
+ The dataset may be used for a variety of natural language processing (NLP) tasks including:
35
+
36
+ - Text Classification: Classifying comments and posts into categories based on sentiment, topic, or subreddit.
37
+
38
+ - Language Modeling: Training language models to understand and generate conversational text.
39
+
40
+ - Sentiment Analysis: Analyzing the sentiment of comments and posts across different subreddits and topics.
41
+
42
+ - Topic Modeling: Identifying and modeling topics discussed in the posts and comments.
43
+
44
+ ## Languages
45
+
46
+ The primary language of the dataset is English, as the majority of redditors are English educated. However, posts in other languages may also be present in smaller quanitites.
47
+
48
+ ## Dataset Structure
49
+
50
+ ### Data Instances
51
+
52
+ Each data instance repreasents a submission thread within a subreddit.
53
+
54
+ - `thread_id`: The submission thread ID. Inclusive of the `t3_` that reddit uses to mark an id as a thread. `https://reddit.com/r/<SUBREDDIT>/comments/<THREAD_ID>/`
55
+ - `subreddit`: The name of the subreddit. Case-insensitive. Reddit just redirects you to the correct-cased subreddit.
56
+ - `namedconversation`: A OpenAI "compatible" conversation:
57
+ - `from`: The author username that posted the content. **It is not `user`, `system`,`model`!**
58
+ - `content`: The reddit markdown posted.
59
+ - The first value of `namedconversation` is the submission. The rest are replies.
60
+ - If a submission is marked as NSFW / Mature, a `[R-18]` is appended to the front of the title.
61
+ - `submission` / `comments`: The raw submission and comments respectively.
62
+
63
+ Unsure or Confused? We have provided a real sample below.
64
+
65
+ ### Data Sample
66
+
67
+ <details>
68
+ <summary>Sample Thread</summary>
69
+ <pre>
70
+ <code class="language-json">
71
+ {
72
+ "thread_id": "t3_of7h2",
73
+ "subreddit": "Gaben",
74
+ "namedconversation": [
75
+ {
76
+ "from": "[deleted]",
77
+ "content": "[13 Jan 2012, 07:01:07] TIL Half-Life 2's source code was hacked because the hacker guessed Gabe's password, which was \"gaben\"\n\nLink: half-life.wikia.com"
78
+ },
79
+ {
80
+ "from": "clydethefrog",
81
+ "content": "[15 Jan 2012, 18:01:06] That's my password too"
82
+ },
83
+ {
84
+ "from": "Dunge",
85
+ "content": "[29 Feb 2012, 02:02:34] \"Gembe was led into believing that Valve wanted to employ him as an in-house security auditor. He was to be offered a flight to the USA and was to be arrested on arrival by the FBI.\"\n\nWow that's sad"
86
+ },
87
+ {
88
+ "from": "captainregularr",
89
+ "content": "[13 Jan 2012, 14:01:14] Did you know gaben makes me gaben my gaben?"
90
+ },
91
+ {
92
+ "from": "Turellio",
93
+ "content": "[13 Jan 2012, 17:01:53] that's what gaben gaben"
94
+ },
95
+ {
96
+ "from": "captainregularr",
97
+ "content": "[13 Jan 2012, 17:01:05] I gaben to gaben's demands."
98
+ },
99
+ {
100
+ "from": "RagingRetard",
101
+ "content": "[13 Jan 2012, 17:01:49] Oh, quit your incessant gaben."
102
+ }
103
+ ],
104
+ "submission": {
105
+ "sub": {
106
+ "name": "Gaben",
107
+ "id": "2scx1",
108
+ "subs": null,
109
+ "type": null
110
+ },
111
+ "author": null,
112
+ "title": "TIL Half-Life 2's source code was hacked because the hacker guessed Gabe's password, which was \"gaben\"",
113
+ "score": 23,
114
+ "created": 1326440407.0,
115
+ "id": "of7h2",
116
+ "flags": "",
117
+ "link_flair": null,
118
+ "url": "http://half-life.wikia.com/wiki/Half-Life_2_Beta#Source_code_leak",
119
+ "text": "",
120
+ "removed": [],
121
+ "cross": []
122
+ },
123
+ "comments": [
124
+ {
125
+ "sub": {
126
+ "name": "Gaben",
127
+ "id": "2scx1",
128
+ "subs": -1,
129
+ "type": ""
130
+ },
131
+ "author": {
132
+ "name": "clydethefrog",
133
+ "uid": "",
134
+ "create": -1,
135
+ "flair": null,
136
+ "patreon": false,
137
+ "premium": false
138
+ },
139
+ "text": "That's my password too",
140
+ "score": 1,
141
+ "created": "1326652326",
142
+ "id": "c3hge04",
143
+ "parent_id": "t3_of7h2",
144
+ "thread_id": "t3_of7h2",
145
+ "flags": "A",
146
+ "children": []
147
+ },
148
+ {
149
+ "sub": {
150
+ "name": "Gaben",
151
+ "id": "2scx1",
152
+ "subs": -1,
153
+ "type": ""
154
+ },
155
+ "author": {
156
+ "name": "Dunge",
157
+ "uid": "",
158
+ "create": -1,
159
+ "flair": null,
160
+ "patreon": false,
161
+ "premium": false
162
+ },
163
+ "text": "\"Gembe was led into believing that Valve wanted to employ him as an in-house security auditor. He was to be offered a flight to the USA and was to be arrested on arrival by the FBI.\"\n\nWow that's sad",
164
+ "score": 3,
165
+ "created": "1330483894",
166
+ "id": "c3w2ulz",
167
+ "parent_id": "t3_of7h2",
168
+ "thread_id": "t3_of7h2",
169
+ "flags": "A",
170
+ "children": []
171
+ },
172
+ {
173
+ "sub": {
174
+ "name": "Gaben",
175
+ "id": "2scx1",
176
+ "subs": -1,
177
+ "type": ""
178
+ },
179
+ "author": {
180
+ "name": "captainregularr",
181
+ "uid": "",
182
+ "create": -1,
183
+ "flair": null,
184
+ "patreon": false,
185
+ "premium": false
186
+ },
187
+ "text": "Did you know gaben makes me gaben my gaben?",
188
+ "score": 5,
189
+ "created": "1326463514",
190
+ "id": "c3gsfkx",
191
+ "parent_id": "t3_of7h2",
192
+ "thread_id": "t3_of7h2",
193
+ "flags": "A",
194
+ "children": [
195
+ {
196
+ "sub": {
197
+ "name": "Gaben",
198
+ "id": "2scx1",
199
+ "subs": -1,
200
+ "type": ""
201
+ },
202
+ "author": {
203
+ "name": "Turellio",
204
+ "uid": "",
205
+ "create": -1,
206
+ "flair": null,
207
+ "patreon": false,
208
+ "premium": false
209
+ },
210
+ "text": "that's what gaben gaben",
211
+ "score": 3,
212
+ "created": "1326476873",
213
+ "id": "c3guihp",
214
+ "parent_id": "t1_c3gsfkx",
215
+ "thread_id": "t3_of7h2",
216
+ "flags": "A",
217
+ "children": [
218
+ {
219
+ "sub": {
220
+ "name": "Gaben",
221
+ "id": "2scx1",
222
+ "subs": -1,
223
+ "type": ""
224
+ },
225
+ "author": {
226
+ "name": "captainregularr",
227
+ "uid": "",
228
+ "create": -1,
229
+ "flair": null,
230
+ "patreon": false,
231
+ "premium": false
232
+ },
233
+ "text": "I gaben to gaben's demands.",
234
+ "score": 5,
235
+ "created": "1326477005",
236
+ "id": "c3guje0",
237
+ "parent_id": "t1_c3guihp",
238
+ "thread_id": "t3_of7h2",
239
+ "flags": "AE",
240
+ "children": [
241
+ {
242
+ "sub": {
243
+ "name": "Gaben",
244
+ "id": "2scx1",
245
+ "subs": -1,
246
+ "type": ""
247
+ },
248
+ "author": {
249
+ "name": "RagingRetard",
250
+ "uid": "",
251
+ "create": -1,
252
+ "flair": null,
253
+ "patreon": false,
254
+ "premium": false
255
+ },
256
+ "text": "Oh, quit your incessant gaben.",
257
+ "score": 2,
258
+ "created": "1326477409",
259
+ "id": "c3gulzh",
260
+ "parent_id": "t1_c3guje0",
261
+ "thread_id": "t3_of7h2",
262
+ "flags": "A",
263
+ "children": []
264
+ }
265
+ ]
266
+ }
267
+ ]
268
+ }
269
+ ]
270
+ }
271
+ ]
272
+ }
273
+ </code>
274
+ </pre>
275
+ </details>
276
+
277
+ # Dataset Creation
278
+
279
+ ### Curation Rationale
280
+
281
+ Reddit has graced the world with it's unique design and way of comments (Extremely nested comment chains).
282
+ However, we have noted that it's possible to flatten comment chains into 1 long conversation without the comversation looking too strange or out of place.
283
+
284
+ Additionally since Reddit goes back to 2005, it has a lot of data that is waiting to be explored and used.
285
+ (Plus, recent Large Language Models have been using reddit for quite some time!)
286
+
287
+ After reviewing UpVoteWeb's curation practices, we have taken upon ourselves to develop a more open dataset.
288
+ Recognising variety is the spice of life, we only pruned subreddits that do not contain useful data based on 3 metrics:
289
+
290
+ 1. Engagement (How active a submission is to the amount of comments recieved. Total Comments / Total submissions)
291
+ 2. Richness (The amount of media submissions to all the submissions squared)
292
+ 3. Diversity (The sum of unique authors from comments and submissions over the number of unique authors from submissions)
293
+
294
+ In practice, it looks something like this:
295
+
296
+ ```py
297
+ # ...
298
+
299
+ engagement = comment_data["comments"] / submission_data["submissions"]
300
+ richness = (submission_data["media"] / submission_data["submissions"]) ** 2
301
+ diversity = (
302
+ comment_data["authors"] + submission_data["authors"]
303
+ ) / submission_data["submissions"]
304
+ ```
305
+
306
+ We additionally employ some baseline metrics such as minimum submission, submission authors, comment and comment authors.
307
+
308
+ In practice:
309
+
310
+ ```py
311
+ if (
312
+ stats_data["submission"]["authors"] < 70 # Total unique authors
313
+ or stats_data["comment"]["authors"] < 20 # Total unique commentors
314
+ or stats_data["submission"]["submissions"] < 450 # Total submissions count
315
+ or stats_data["comment"]["comments"] < 585 # Total comments count
316
+ ):
317
+ # Skip the subreddit
318
+ ```
319
+
320
+ With the baseline and these 3 metrics, we filter out a host of low quality subreddits. By this stage, we have successfully selected ~62K subreddits that are of good to high quality.
321
+
322
+ After filtering subreddits, we then filter submissions and comments by the following:
323
+
324
+ 1. We skip submission threads with less than 5 comments
325
+ 2. We prune comments that have less than -4 score. (Score from reddit defaults.)
326
+ 3. For submissions with more than 50 comments, we drop all comments that have a nested depth of 6. (Inspired from a RES filter)
327
+ 4. If a comment chain drops below 0, we prune the rest of the content.
328
+ 5. For child comments that have a parent from (2,3,4), they are additionally pruned.
329
+
330
+ For more infomation, refer to the scripts provided alongside this repo. Specifically `RedditScoring.py` for subreddit filtering and `RedditThreader.py` for per thread filtering.
331
+
332
+ ### Source Data
333
+
334
+ This dataset is a filtered collection of posts and comments from the beginning of reddit to up to end of 2023.
335
+
336
+ # Considerations for Using the Data
337
+
338
+ ### Social Impact of Dataset
339
+
340
+ With the release of this dataset, we aim to make this development resource available to the community at large.
341
+
342
+ ### Discussion of Biases
343
+
344
+ We've decided **not to censor out NSFW or toxic content.** This allows for better toxic analysis and a varied dataset.
345
+
346
+ # Additional Information
347
+
348
+ ## Recursal's Vision
349
+
350
+ > To make AI accessible to everyone, regardless of language, or economical status
351
+
352
+ This is the collective goal of the `RWKV Open Source foundation` and `Recursal AI`, the commercial entity who backs it.
353
+
354
+ We believe that AI should not be controlled by a select few individual organization. And that it should be made accessible regardless if you are rich or poor, or a native speaker of english.
355
+
356
+ ### About RWKV
357
+
358
+ RWKV is an Open Source, non profit group, under the linux foundation. Focused on developing the RWKV AI architecture, in accordence to our vision.
359
+
360
+ The RWKV architecture scales efficiently and economically. As an RNN & Transformer hybrid, it is able to provide the performance similar to leading transformer models, while having the compute and energy efficiency of an RNN based architecture.
361
+
362
+ You can find out more about the project, and latest models, at the following
363
+
364
+ - [https://blog.rwkv.com](https://blog.rwkv.com)
365
+ - [https://wiki.rwkv.com](https://wiki.rwkv.com)
366
+
367
+
368
+ ### About Recursal AI
369
+
370
+ Recursal AI, is the commercial entity built to provide support for RWKV model development and users, while providing commercial services via its public cloud, or private-cloud / on-premise offerings.
371
+
372
+ As part of our vision. Our commitment, is to ensure open source development and access to the best foundational AI models and datasets.
373
+
374
+ The following dataset/models provided here, is part of that commitment.
375
+
376
+ You can find out more about recursal AI here
377
+
378
+ - [https://recursal.ai](https://recursal.ai)
379
+ - [https://blog.recursal.ai](https://blog.recursal.ai)
380
+
381
+ ### Licensing Information
382
+
383
+ Since this dataset is derived from a public crawl of reddit, the original content may be subject to copyright and other licensing terms set by the original site owner and/or the content creators.
384
+ Additionally, this dataset is for research and archival purposes only.
385
+
386
+ ### Citation Information
387
+
388
+ If you use this dataset in your research or project, please cite it as follows:
389
+ ```TeX
390
+ @dataset{OKReddit,
391
+ title = {OKReddit},
392
+ year = {2024},
393
+ publisher = {KaraKaraWitch},
394
+ url = {<https://huggingface.co/datasets/KaraKaraWitch/OKReddit>}
395
+ }
396
+ ```
397
+
398
+ Additionally, pleace cite the following source bibtex as well.
399
+ ```TeX
400
+ @article{,
401
+ title= {Reddit comments/submissions 2005-06 to 2023-12},
402
+ journal= {},
403
+ author= {stuck_in_the_matrix, Watchful1, RaiderBDev},
404
+ year= {},
405
+ url= {},
406
+ abstract= {Reddit comments and submissions from 2005-06 to 2023-09 collected by pushshift and u/RaiderBDev.
407
+
408
+ These are zstandard compressed ndjson files. Example python scripts for parsing the data can be found here https://github.com/Watchful1/PushshiftDumps
409
+
410
+ The more recent dumps are collected by u/RaiderBDev and questions can be submitted here https://github.com/ArthurHeitmann/arctic_shift},
411
+ keywords= {reddit},
412
+ terms= {},
413
+ license= {},
414
+ superseded= {}
415
+ }
416
+ ```
417
+
418
+ ## ...
419
+
420
+ ```
421
+ Qngnfrg Mra
422
+ - XnenXnenJvgpu @ erphefny.nv FRCG 24
423
+
424
+ - Nalguvat, naq rirelguvat pna or pbyyngrq vagb qngnfrg.
425
+ - Gb orpbzr bar jvgu gur qngn, bar zhfg or jvyyvat gb bcra gurve zvaqf.
426
+ - Ab znggre ubj phefrq vg znl frra, gurer'f nyjnlf zber jbefr guvatf bhg gurer.
427
+ - NCV Yvzvgf, Cnljnyyf, Fhofpevcgvbaf naq bgure yvzvgngvbaf ner n "fhttrfgvba".
428
+ - Vs nyy ryfr snvyf, cebkvrf naq nppbhagf.
429
+ - Bar funyy arire cehar pbagrag jvgubhg eulzr be ernfba.
430
+ - Hayrff vg'f pyrneyl NV-Fybc. Lbh'er serr gb tb unz.
431
+ - Qngnfrgf ner Rireterra, arire qrpvqhbhf.
432
+ - Ohvyq gb fpnyr, arire fvatyr-guernqrq.
433
+ ```
data/chunk_00/0xcert.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62d27b05ca84cb70ac80111838cbf0bc8904833d5c5840e6e28d35ad2672dc06
3
+ size 590479
data/chunk_00/11mike10.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b50b4948a5ecf0873e9adf15120a637b9c5c70c1f7d9369019e0b0b2024ce0d
3
+ size 448073
data/chunk_00/13ReasonsWhy.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05bde9cd7681acc29f0fba1e117c021babb370c4a8459faca06c94b1551a9078
3
+ size 253039958
data/chunk_00/196_butOnlyPorn.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f2af9a0e89740d85aa65f218fe8db20bda60320d82c5f0f1de562f76af34461
3
+ size 419593
data/chunk_00/1inch.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ae46fe3cfffe49b6fb6a2520e3a81beebf8a27c583929738295702eeb3f159b
3
+ size 5092450
data/chunk_00/1stPersonAnimations.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:893bac6ff1e0adf2db4be0c81310ac733e71a0d75a85d5a394202947cb9ed8de
3
+ size 1863872
data/chunk_00/209fuck.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b07ce664035f27f97db90d6d17d3b955c012a409ce26f82b62b06ba52d384414
3
+ size 5240092
data/chunk_00/20k.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0aa73360b7e187366255d22ec4939d803091c4a9c937f231d0c48b59c4e43a0
3
+ size 1336988
data/chunk_00/20questions.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33dc8803b5a4ab2e3c068f8148819d46aa3c0ba65d5e9d96bc4063c1c4d89b86
3
+ size 12171275
data/chunk_00/21savage.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60d7e08ae58b786a85ef3aed2fd863e7004c94fecb5b851b2743cab3f7350dad
3
+ size 11651661
data/chunk_00/21stCenturyQuotes.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:186982548b8a6425f2a19fdc2ca98844ad6d3f99dca11d6520c404a7d4900aef
3
+ size 461148
data/chunk_00/2d20games.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25c83f9cd2ac5abc6ca89fc12b21732bd818eedd375d3a52bdbca2958a1cbbbd
3
+ size 1351134
data/chunk_00/2healthbars.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c5c60f0162e68b795ced9df61e7864d8259a129fbb1f9629dd8d90f3a348c5b
3
+ size 105151342
data/chunk_00/360Waves.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58c1aea15321087af2172eb99d2b7672c9cdd26de3e1e55bc57680202e3d27b2
3
+ size 49224739
data/chunk_00/3dprintingdms.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dd5c2abf75b5ecddd90c9e753dc9edc43c70b156257db31b70981ca0b8e840a
3
+ size 5622669
data/chunk_00/3dshomebrew.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e6603f8cd536be21310e62297763fac0649f5c9342cac96dac6fb9da18e2b0e
3
+ size 1924483
data/chunk_00/410freaks.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de06671934da2cc7a3d4f568efe1577aa38db7c1f53364a68d9f9d3ebfad209c
3
+ size 742923
data/chunk_00/5050.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab1b656ed09b43c962fdd52257f5be4b402ad2a09b2b3dcb8b7db34f5a0aa771
3
+ size 11118962
data/chunk_00/5thgen4runners.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44567236c2763a11a94689e72119b262ea111c13b33ed35c9cad6f620ac72fbb
3
+ size 2027395
data/chunk_00/806MensRoom.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58ab061125b1e2a38134b44be7b4c96664cb4240f281aca5e9def350758a096b
3
+ size 2221018
data/chunk_00/8rack.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92765b32aa06ba5d6a8ab6e65913e4b566a7c52e1a31145d616720f315478e8e
3
+ size 7742858
data/chunk_00/90DayFianceUK.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28e1cae6c5d2376445f5c3a29f395567abcfa073761ec2b8d415afd79abf9afd
3
+ size 29751473
data/chunk_00/911Calls.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2036db1ee926a7ad9d4472af757ff8056170bcbb5cc9b0354baf497cef3a9521
3
+ size 880439
data/chunk_00/AAPL.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45a9095f3b94137a177a46d733027e036d58a6f3f76a79386f0d6a54b13c8bd9
3
+ size 5353460
data/chunk_00/ABBA.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fde937de2cf16fbcc32ca45fc819b1909a8b99bf7e208d9b3f216a4d93994798
3
+ size 14468777
data/chunk_00/ACNHGardening.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a758aab483d533ba70c3b304d41582556063983ecfa68fadf6a14a0da10c0ace
3
+ size 26511393
data/chunk_00/AIK.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd85a14f116e5dede72f5538788318f7e6531d98e07776d8f8ee241e02dc4866
3
+ size 4866196
data/chunk_00/AI_Music.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8dcb4bf1affb4071d976720ccd9c2286e397ccada0af5efaca49b28f02b596a7
3
+ size 778109
data/chunk_00/ALevelChemistry.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1320695a97b8de67aea18ee5274f6b77b4cbab2ff1194449b7c1626fc5474319
3
+ size 1828571
data/chunk_00/AMCPlus.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:961412d9242fffd7c9fd0d43eae3395bdc7f0d6880f6631c5b7efe00b22d2cc2
3
+ size 1437274
data/chunk_00/AMERICANLEAN.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b7a53265b13a5805cd62e69b4839dc6e91c4e04c215edf2a2ea967f16789f84
3
+ size 792823
data/chunk_00/AMLCompliance.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b02ecd87a5b6dcadcef09a7e30d34aefbb64db0291a4598f40759a1deb6e54f
3
+ size 4313321
data/chunk_00/ANVILVaultBreaker.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f40c554cfd5b490614a5d84827402b03eb5fb77a3c7d844a347538e8fa15affc
3
+ size 6469087
data/chunk_00/AO3.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9754a2a2106e8011b0f24a3d12eb247f96e4465a80dfde16aa90423923841555
3
+ size 471412057
data/chunk_00/API3.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea51b9742c354d1864e5cf777a742507189d5f79f54e722ae5989482770fddb2
3
+ size 832835
data/chunk_00/AQuietPlace.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b3756fe7f1acc650abf9215cc4abf5d1c4c5420aa6b4db246959d02ddae5f5b
3
+ size 1530628
data/chunk_00/ASRT_stock.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17207b7bca8ef7bd23a17e1ed7408aaa8b330b99f367460eb9ca9184b77a116b
3
+ size 5658675
data/chunk_00/AVINOC.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6082d94be3567140d2c59d2099150f156743e8426c4962590445eaba75f6425
3
+ size 818892
data/chunk_00/AZguns.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:647d6504bab173a8b110e2a2db1a44a9bad7fe62cab526644361d2526f745324
3
+ size 30876581
data/chunk_00/A_Cups.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d83e9d899e69521a4106365acde351e5ec44c7cb44f8e07bec52e38de6ef5e1
3
+ size 16670650
data/chunk_00/AbandonedPorn.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc10619aae56b986c01fc09640c7b9391b304fdd5ad7fd2e37feefd372e49dcd
3
+ size 53176430
data/chunk_00/AbbyBerner_fr.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c5608e4b1c8ae1cb289547c7543e324d4554c08c815566ce5ce2d1e02b96003
3
+ size 4602313
data/chunk_00/Acadiana.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc96fcc93c6bd7b5073bf82807e76f23648e5520cd8256e265a98f07e4beb1c1
3
+ size 182639422
data/chunk_00/AcalaNetwork.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a74c0314059df00d4b1f66622329cbdecf3837d49f17574b33cfce5628607a7
3
+ size 8960970
data/chunk_00/Actingclass.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bef4c92f5e54c48be75c72680fe26a61faaa1b258c5ed9557c5e10f995049ada
3
+ size 40849293
data/chunk_00/ActualWomen.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd80730a137e7204b38402748b034d6711c48dc09e2b8daf150122bc83bd3077
3
+ size 10940416
data/chunk_00/AdamCarolla.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:849bc8a082a20fbcae2e34baf6fa7d03fd87546bf395d977208975f5dd1547d3
3
+ size 336712349
data/chunk_00/Adjuncts.jsonl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86477bc04cc92097db85c4e6acc0bac3af7878c2251894e0a0be0ad925a335d4
3
+ size 10784470