Jannchie commited on
Commit
2461fa9
1 Parent(s): c97e27b

fix(db): ensure data saved to disk

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. danbooru_metadata.db +2 -2
  3. update_danbooru.py +4 -8
README.md CHANGED
@@ -10,4 +10,4 @@ Compared to the metadata available at <https://huggingface.co/datasets/nyanko7/d
10
 
11
  Here, we have attached both the script for updating metadata and the script for downloading images based on the metadata database.
12
 
13
- On my PC, We can download all metadata in about 1 hours.
 
10
 
11
  Here, we have attached both the script for updating metadata and the script for downloading images based on the metadata database.
12
 
13
+ Typically, the database becomes the bottleneck. On my PC, We can download all metadata in about 3 hours. A better SSD may speed up the process.
danbooru_metadata.db CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57f0d40383d45b546f8546c2fad51c6e398c472e92089d2137f4519e76533295
3
- size 11417427968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:627338de1fe712a22bc10ec1d892ac12dc29de9cafc6bc5c9a4c556421ed3f81
3
+ size 30303068160
update_danbooru.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import queue
2
  import threading
3
  import time
@@ -12,15 +13,14 @@ from rich.progress import (
12
  TimeRemainingColumn,
13
  )
14
  from sqlalchemy import text
15
- import argparse
16
 
17
  from db import get_engine, t_posts
18
 
19
  url_queue = queue.Queue(maxsize=64)
20
- data_queue = queue.Queue()
21
 
22
  parser = argparse.ArgumentParser()
23
- parser.add_argument("--start_id", type=int, default=1)
24
  parser.add_argument("--api_key", type=str)
25
  parser.add_argument("--login", type=str)
26
  parser.add_argument("--workers", type=int, default=16)
@@ -29,9 +29,6 @@ args = parser.parse_args()
29
  API_KEY = args.api_key
30
  LOGIN = args.login
31
 
32
-
33
- start_id = args.start_id
34
-
35
  base_api_url = (
36
  "https://danbooru.donmai.us/posts.json?page=a{}&limit=200&api_key={}&login={}"
37
  )
@@ -123,8 +120,7 @@ t_posts.metadata.create_all(danbooru_engine)
123
  with danbooru_engine.connect() as conn:
124
  res = conn.execute(text("SELECT MAX(id) FROM posts"))
125
  latest_id = res.fetchone()[0] or 1
126
-
127
- start_id = args.start_id
128
  print("start_id:", start_id)
129
 
130
 
 
1
+ import argparse
2
  import queue
3
  import threading
4
  import time
 
13
  TimeRemainingColumn,
14
  )
15
  from sqlalchemy import text
 
16
 
17
  from db import get_engine, t_posts
18
 
19
  url_queue = queue.Queue(maxsize=64)
20
+ data_queue = queue.Queue(maxsize=64)
21
 
22
  parser = argparse.ArgumentParser()
23
+ parser.add_argument("--start_id", type=int, default=0)
24
  parser.add_argument("--api_key", type=str)
25
  parser.add_argument("--login", type=str)
26
  parser.add_argument("--workers", type=int, default=16)
 
29
  API_KEY = args.api_key
30
  LOGIN = args.login
31
 
 
 
 
32
  base_api_url = (
33
  "https://danbooru.donmai.us/posts.json?page=a{}&limit=200&api_key={}&login={}"
34
  )
 
120
  with danbooru_engine.connect() as conn:
121
  res = conn.execute(text("SELECT MAX(id) FROM posts"))
122
  latest_id = res.fetchone()[0] or 1
123
+ start_id = args.start_id if args.start_id != 0 else latest_id
 
124
  print("start_id:", start_id)
125
 
126