File size: 5,149 Bytes
2461fa9
f66a537
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2461fa9
f66a537
 
2461fa9
f66a537
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2461fa9
f66a537
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
import argparse
import queue
import threading
import time
from time import sleep

import requests
from rich.progress import (
    BarColumn,
    Progress,
    TextColumn,
    TimeElapsedColumn,
    TimeRemainingColumn,
)
from sqlalchemy import text

from db import get_engine, t_posts

url_queue = queue.Queue(maxsize=64)
data_queue = queue.Queue(maxsize=64)

parser = argparse.ArgumentParser()
parser.add_argument("--start_id", type=int, default=0)
parser.add_argument("--api_key", type=str)
parser.add_argument("--login", type=str)
parser.add_argument("--workers", type=int, default=16)
args = parser.parse_args()

API_KEY = args.api_key
LOGIN = args.login

base_api_url = (
    "https://danbooru.donmai.us/posts.json?page=a{}&limit=200&api_key={}&login={}"
)
latest_id_url = "https://danbooru.donmai.us/posts.json?api_key={}&login={}"

latest_id = 1  # latset id


# get latest ID
def fetch_latest_id(max_retries=5, backoff_factor=1):
    global latest_id
    attempt = 0

    while attempt < max_retries:
        try:
            response = requests.get(latest_id_url.format(API_KEY, LOGIN))
            if response.status_code == 200:
                data = response.json()
                if data and "id" in data[0]:
                    latest_id = data[0]["id"]
                    return  # 成功获取到ID后退出函数
            else:
                print(f"Failed to retrieve latest ID: {response.status_code}")
        except Exception as e:
            print(f"An error occurred while fetching the latest ID: {e}")

        attempt += 1
        sleep_time = backoff_factor * (2**attempt)
        print(f"Retrying in {sleep_time} seconds...")
        time.sleep(sleep_time)

    print(f"Failed to retrieve latest ID after {max_retries} attempts")


def update_latest_id():
    while True:
        fetch_latest_id()
        time.sleep(60)


def fetch_data(max_retries=3, retry_delay=2):
    while True:
        url = url_queue.get()  # get url from url_queue
        if url is None:
            break

        retry_count = 0
        while retry_count < max_retries:
            try:
                response = requests.get(url)
                if response.status_code == 200:
                    data = response.json()
                    data_queue.put(data)
                    break  # 成功的请求,无需重试,跳出重试循环
                else:
                    print(f"Failed to fetch data from {url}: {response.status_code}")
                    retry_count += 1
                    time.sleep(retry_delay)

            except Exception as e:
                retry_count += 1
                print(
                    f"An error occurred while fetching data from {url}: {e}. Retrying in {retry_delay} seconds..."
                )
                time.sleep(retry_delay)

        url_queue.task_done()


# create 16 threads to fetch data
threads = []
for _ in range(args.workers):
    thread = threading.Thread(target=fetch_data)
    thread.daemon = True
    thread.start()
    threads.append(thread)

# 启动线程来定期更新最新 ID
id_update_thread = threading.Thread(target=update_latest_id)
id_update_thread.daemon = True
id_update_thread.start()

required_columns = [col.name for col in t_posts.columns if not col.primary_key]

danbooru_engine = get_engine("danbooru_metadata.db")
t_posts.metadata.create_all(danbooru_engine)

# 获取数据库中最大的 ID
with danbooru_engine.connect() as conn:
    res = conn.execute(text("SELECT MAX(id) FROM posts"))
    latest_id = res.fetchone()[0] or 1
start_id = args.start_id if args.start_id != 0 else latest_id
print("start_id:", start_id)


def save_data():
    while True:
        data = data_queue.get()
        for column in required_columns:
            for d in data:
                if column not in d:
                    d[column] = None
        with danbooru_engine.connect() as conn:
            conn.execute(t_posts.insert().prefix_with("OR REPLACE"), data)
            conn.commit()


save_thread = threading.Thread(target=save_data)
save_thread.daemon = True
save_thread.start()


try:
    with Progress(
        TextColumn("[progress.description]{task.description}"),
        BarColumn(),
        TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
        TimeElapsedColumn(),
        TimeRemainingColumn(),
        TextColumn("[{task.completed} / {task.total}]"),
    ) as progress:
        # 添加一个任务,注意 total 参数设置为 latest_id
        task = progress.add_task(
            "Processing", start=True, total=latest_id - start_id + 1
        )
        current_id = start_id

        while True:
            if current_id >= latest_id:
                progress.update(task, completed=latest_id)  # 确保进度条满格
                sleep(2)
                continue

            url = base_api_url.format(current_id, API_KEY, LOGIN)
            url_queue.put(url)  # 将生成的 URL 放入 URL 队列
            current_id += 200
            progress.update(task, completed=current_id, total=latest_id)
            sleep(0.1)
except KeyboardInterrupt:
    print("Stopped by user")
    exit(0)