danbooru_metadata / update_danbooru.py
Jannchie's picture
fix(db): ensure data saved to disk
2461fa9
import argparse
import queue
import threading
import time
from time import sleep
import requests
from rich.progress import (
BarColumn,
Progress,
TextColumn,
TimeElapsedColumn,
TimeRemainingColumn,
)
from sqlalchemy import text
from db import get_engine, t_posts
url_queue = queue.Queue(maxsize=64)
data_queue = queue.Queue(maxsize=64)
parser = argparse.ArgumentParser()
parser.add_argument("--start_id", type=int, default=0)
parser.add_argument("--api_key", type=str)
parser.add_argument("--login", type=str)
parser.add_argument("--workers", type=int, default=16)
args = parser.parse_args()
API_KEY = args.api_key
LOGIN = args.login
base_api_url = (
"https://danbooru.donmai.us/posts.json?page=a{}&limit=200&api_key={}&login={}"
)
latest_id_url = "https://danbooru.donmai.us/posts.json?api_key={}&login={}"
latest_id = 1 # latset id
# get latest ID
def fetch_latest_id(max_retries=5, backoff_factor=1):
global latest_id
attempt = 0
while attempt < max_retries:
try:
response = requests.get(latest_id_url.format(API_KEY, LOGIN))
if response.status_code == 200:
data = response.json()
if data and "id" in data[0]:
latest_id = data[0]["id"]
return # 成功获取到ID后退出函数
else:
print(f"Failed to retrieve latest ID: {response.status_code}")
except Exception as e:
print(f"An error occurred while fetching the latest ID: {e}")
attempt += 1
sleep_time = backoff_factor * (2**attempt)
print(f"Retrying in {sleep_time} seconds...")
time.sleep(sleep_time)
print(f"Failed to retrieve latest ID after {max_retries} attempts")
def update_latest_id():
while True:
fetch_latest_id()
time.sleep(60)
def fetch_data(max_retries=3, retry_delay=2):
while True:
url = url_queue.get() # get url from url_queue
if url is None:
break
retry_count = 0
while retry_count < max_retries:
try:
response = requests.get(url)
if response.status_code == 200:
data = response.json()
data_queue.put(data)
break # 成功的请求,无需重试,跳出重试循环
else:
print(f"Failed to fetch data from {url}: {response.status_code}")
retry_count += 1
time.sleep(retry_delay)
except Exception as e:
retry_count += 1
print(
f"An error occurred while fetching data from {url}: {e}. Retrying in {retry_delay} seconds..."
)
time.sleep(retry_delay)
url_queue.task_done()
# create 16 threads to fetch data
threads = []
for _ in range(args.workers):
thread = threading.Thread(target=fetch_data)
thread.daemon = True
thread.start()
threads.append(thread)
# 启动线程来定期更新最新 ID
id_update_thread = threading.Thread(target=update_latest_id)
id_update_thread.daemon = True
id_update_thread.start()
required_columns = [col.name for col in t_posts.columns if not col.primary_key]
danbooru_engine = get_engine("danbooru_metadata.db")
t_posts.metadata.create_all(danbooru_engine)
# 获取数据库中最大的 ID
with danbooru_engine.connect() as conn:
res = conn.execute(text("SELECT MAX(id) FROM posts"))
latest_id = res.fetchone()[0] or 1
start_id = args.start_id if args.start_id != 0 else latest_id
print("start_id:", start_id)
def save_data():
while True:
data = data_queue.get()
for column in required_columns:
for d in data:
if column not in d:
d[column] = None
with danbooru_engine.connect() as conn:
conn.execute(t_posts.insert().prefix_with("OR REPLACE"), data)
conn.commit()
save_thread = threading.Thread(target=save_data)
save_thread.daemon = True
save_thread.start()
try:
with Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(),
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
TimeElapsedColumn(),
TimeRemainingColumn(),
TextColumn("[{task.completed} / {task.total}]"),
) as progress:
# 添加一个任务,注意 total 参数设置为 latest_id
task = progress.add_task(
"Processing", start=True, total=latest_id - start_id + 1
)
current_id = start_id
while True:
if current_id >= latest_id:
progress.update(task, completed=latest_id) # 确保进度条满格
sleep(2)
continue
url = base_api_url.format(current_id, API_KEY, LOGIN)
url_queue.put(url) # 将生成的 URL 放入 URL 队列
current_id += 200
progress.update(task, completed=current_id, total=latest_id)
sleep(0.1)
except KeyboardInterrupt:
print("Stopped by user")
exit(0)