update documentation
Browse files- README.md +12 -1
- github_preprocessing.py +5 -2
- query.sql +3 -0
README.md
CHANGED
@@ -178,4 +178,15 @@ The dataset was created in two steps:
|
|
178 |
|
179 |
## Considerations for Using the Data
|
180 |
|
181 |
-
The dataset consists of source code from a wide range of repositories. As such they can potentially include harmful or biased code as well as sensitive information like passwords or usernames.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
178 |
|
179 |
## Considerations for Using the Data
|
180 |
|
181 |
+
The dataset consists of source code from a wide range of repositories. As such they can potentially include harmful or biased code as well as sensitive information like passwords or usernames.
|
182 |
+
|
183 |
+
## Releases
|
184 |
+
|
185 |
+
### v1.0
|
186 |
+
- Initial release of dataset
|
187 |
+
- The query was executed on _Feb 14, 2022, 12:03:16 PM UTC+1_
|
188 |
+
|
189 |
+
### v1.1
|
190 |
+
- Fix missing Scala/TypeScript
|
191 |
+
- Fix deduplication issue with inconsistent Python `hash`
|
192 |
+
- The query was executed on _Mar 16, 2022, 6:23:39 PM UTC+1_
|
github_preprocessing.py
CHANGED
@@ -27,9 +27,12 @@ args = Namespace(**config)
|
|
27 |
PATTERN = re.compile(r'\s+')
|
28 |
|
29 |
|
|
|
|
|
|
|
30 |
def get_hash(example):
|
31 |
"""Get hash of content field."""
|
32 |
-
return {"hash":
|
33 |
|
34 |
|
35 |
def line_stats(example):
|
@@ -140,4 +143,4 @@ with Pool(16) as p:
|
|
140 |
list(tqdm(p.imap_unordered(save_shard, zip(filenames, shards), chunksize=4), total=num_shards))
|
141 |
print(f"Time to save dataset: {time.time()-t_start:.2f}")
|
142 |
|
143 |
-
# To push to hub run `git add
|
|
|
27 |
PATTERN = re.compile(r'\s+')
|
28 |
|
29 |
|
30 |
+
def hash_func(text):
|
31 |
+
return hashlib.md5(re.sub(PATTERN, '', text).encode("utf-8")).hexdigest()
|
32 |
+
|
33 |
def get_hash(example):
|
34 |
"""Get hash of content field."""
|
35 |
+
return {"hash": hash_func(example["content"])}
|
36 |
|
37 |
|
38 |
def line_stats(example):
|
|
|
143 |
list(tqdm(p.imap_unordered(save_shard, zip(filenames, shards), chunksize=4), total=num_shards))
|
144 |
print(f"Time to save dataset: {time.time()-t_start:.2f}")
|
145 |
|
146 |
+
# To push to hub run `git add/commit/push` inside dataset repo folder
|
query.sql
CHANGED
@@ -69,10 +69,13 @@ WHERE
|
|
69 |
OR f.path LIKE '%.rb'
|
70 |
OR f.path LIKE '%.rs'
|
71 |
OR f.path LIKE '%.sql'
|
|
|
72 |
OR f.path LIKE '%.sh'
|
73 |
OR f.path LIKE '%.bash'
|
74 |
OR f.path LIKE '%.command'
|
75 |
OR f.path LIKE '%.zsh'
|
|
|
|
|
76 |
OR f.path LIKE '%.tex'
|
77 |
OR f.path LIKE '%.vb'
|
78 |
OR f.path LIKE '%Dockerfile'
|
|
|
69 |
OR f.path LIKE '%.rb'
|
70 |
OR f.path LIKE '%.rs'
|
71 |
OR f.path LIKE '%.sql'
|
72 |
+
OR f.path LIKE '%.scala'
|
73 |
OR f.path LIKE '%.sh'
|
74 |
OR f.path LIKE '%.bash'
|
75 |
OR f.path LIKE '%.command'
|
76 |
OR f.path LIKE '%.zsh'
|
77 |
+
OR f.path LIKE '%.ts'
|
78 |
+
OR f.path LIKE '%.tsx'
|
79 |
OR f.path LIKE '%.tex'
|
80 |
OR f.path LIKE '%.vb'
|
81 |
OR f.path LIKE '%Dockerfile'
|