SWEb-Norwegian / download.py
versae's picture
Add files using upload-large-folder tool
fa2ba3b verified
raw
history blame
3.45 kB
import json
from pathlib import Path
from multiprocessing import Pool, cpu_count
from datasets import load_dataset
from tqdm import tqdm
import argparse
def process_config(config):
ds = load_dataset("AI-Sweden-Models/SWEb", name=config, streaming=True, split="train")
docs = []
count = 0
data_path = Path("data")
data_path.mkdir(parents=True, exist_ok=True)
config_path = data_path / config
config_path.mkdir(parents=True, exist_ok=True)
# Estimate the number of entries based on streaming behavior (no total, so just progress as files are created)
inner_progress = tqdm(desc=f"Processing {config}", position=1, leave=False)
for entry in ds:
if entry["language"] in ("no", "nn"):
shard_path = config_path / f"{count:04}.json"
if len(docs) == 1000:
shard_path.write_text("\n".join(docs))
docs = []
count += 1
inner_progress.update(1)
else:
docs.append(json.dumps(entry))
# Save any remaining documents
if docs:
(config_path / f"{count:04}.json").write_text("\n".join(docs))
inner_progress.update(1)
inner_progress.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Process multiple dataset configurations in parallel.")
parser.add_argument(
"--workers",
type=int,
default=cpu_count(),
help="Number of worker processes to use (default: number of CPU cores)."
)
args = parser.parse_args()
configs = [
"2013-20",
"2013-48",
"2014-10",
"2014-15",
"2014-23",
"2014-35",
"2014-41",
"2014-42",
"2014-49",
"2014-52",
"2015-06",
"2015-11",
"2015-14",
"2015-18",
"2015-22",
"2015-27",
"2015-32",
"2015-35",
"2015-40",
"2015-48",
"2016-07",
"2016-18",
"2016-22",
"2016-26",
"2016-30",
"2016-36",
"2016-40",
"2016-44",
"2016-50",
"2017-04",
"2017-09",
"2017-13",
"2017-17",
"2017-22",
"2017-26",
"2017-30",
"2017-34",
"2017-39",
"2017-43",
"2017-47",
"2017-51",
"2018-05",
"2018-09",
"2018-13",
"2018-17",
"2018-22",
"2018-26",
"2018-30",
"2018-34",
"2018-39",
"2018-43",
"2018-47",
"2018-51",
"2019-04",
"2019-09",
"2019-13",
"2019-18",
"2019-22",
"2019-26",
"2019-30",
"2019-35",
"2019-39",
"2019-43",
"2019-47",
"2019-51",
"2020-05",
"2020-10",
"2020-16",
"2020-24",
"2020-29",
"2020-34",
"2020-40",
"2020-45",
"2020-50",
"2021-04",
"2021-10",
"2021-17",
"2021-21",
"2021-25",
"2021-31",
"2021-39",
"2021-43",
"2021-49",
"2022-05",
"2022-21",
"2022-27",
"2022-33",
"2022-40",
"2022-49",
"2023-06",
"2023-14",
"2023-23",
"2023-40",
"2023-50",
"2024-10",
"2024-18",
"2024-22",
"2024-26"
]
# configs = ["config1", "config2", "config3"] # Replace with actual config names
num_workers = min(args.workers, len(configs)) # Use at most one process per config
# Wrap the Pool in tqdm for a progress bar
with Pool(num_workers) as pool:
for _ in tqdm(pool.imap(process_config, configs), total=len(configs), desc="Processing configs", position=0):
pass