mattismegevand
commited on
Commit
•
a906372
1
Parent(s):
e3c76d2
init commit
Browse files- .gitignore +1 -0
- get_pages.py +37 -0
- minecraft_structures.parquet +3 -0
- pages.txt +0 -0
- parse_pages.py +65 -0
- sql_to_parquet.py +33 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
error*
|
get_pages.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
|
3 |
+
import bs4
|
4 |
+
import requests
|
5 |
+
import time
|
6 |
+
from tqdm import tqdm
|
7 |
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
8 |
+
|
9 |
+
NUM_PAGES = 422
|
10 |
+
BASE_URL = 'https://www.grabcraft.com'
|
11 |
+
MAX_RETRIES = 5
|
12 |
+
RETRY_DELAY = 2 # seconds
|
13 |
+
|
14 |
+
def fetch_page(i):
|
15 |
+
for attempt in range(MAX_RETRIES):
|
16 |
+
try:
|
17 |
+
r = requests.get(f'{BASE_URL}/minecraft/pg/{i}')
|
18 |
+
if r.status_code == 200:
|
19 |
+
soup = bs4.BeautifulSoup(r.text, 'html.parser')
|
20 |
+
return [BASE_URL + '/'.join(a['href'].split('/')[:-1]) for a in soup.find_all('a', attrs={'class': 'button more-info details'})]
|
21 |
+
else:
|
22 |
+
print(f'Error: {r.status_code} on page {i}')
|
23 |
+
except requests.RequestException as e:
|
24 |
+
print(f'Exception: {e} on page {i}')
|
25 |
+
time.sleep(RETRY_DELAY)
|
26 |
+
with open('errors.txt', 'a') as error_file:
|
27 |
+
error_file.write(f'Failed to fetch page {i} after {MAX_RETRIES} attempts.\n')
|
28 |
+
return []
|
29 |
+
|
30 |
+
if __name__ == '__main__':
|
31 |
+
pages = []
|
32 |
+
with ThreadPoolExecutor() as executor:
|
33 |
+
futures = {executor.submit(fetch_page, i): i for i in range(NUM_PAGES + 1)}
|
34 |
+
for future in tqdm(as_completed(futures), total=len(futures)):
|
35 |
+
pages += future.result()
|
36 |
+
with open('pages.txt', 'w') as f:
|
37 |
+
f.write('\n'.join(set(pages)))
|
minecraft_structures.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:91a2f09180db716b37d4a9bdcaa1a17b8ce5fd18bd49e693078ea1fb564a29d4
|
3 |
+
size 16516703
|
pages.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
parse_pages.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
|
3 |
+
import urllib3
|
4 |
+
from bs4 import BeautifulSoup
|
5 |
+
import json
|
6 |
+
import sqlite3
|
7 |
+
import concurrent.futures
|
8 |
+
from tqdm import tqdm
|
9 |
+
import traceback
|
10 |
+
|
11 |
+
def process_url(url):
|
12 |
+
try:
|
13 |
+
structure_name = url.split('/minecraft/')[-1].replace('#blueprints', '')
|
14 |
+
http = urllib3.PoolManager()
|
15 |
+
r = http.request('GET', url)
|
16 |
+
soup = BeautifulSoup(r.data.decode('utf-8'), 'html.parser')
|
17 |
+
|
18 |
+
obj = next(s['src'] for s in soup.find_all('script') if 'src' in s.attrs and 'myRenderObject' in s['src'])
|
19 |
+
r = http.request('GET', obj)
|
20 |
+
data = r.data.decode('utf-8')
|
21 |
+
eq = data.find('=')
|
22 |
+
d = json.loads(data[eq+1:])
|
23 |
+
|
24 |
+
conn = sqlite3.connect('minecraft_structures.db')
|
25 |
+
cur = conn.cursor()
|
26 |
+
|
27 |
+
cur.execute(f'''
|
28 |
+
CREATE TABLE IF NOT EXISTS "{structure_name}" (
|
29 |
+
x INTEGER,
|
30 |
+
y INTEGER,
|
31 |
+
z INTEGER,
|
32 |
+
block_name TEXT
|
33 |
+
)
|
34 |
+
''')
|
35 |
+
|
36 |
+
for level, blocks in d.items():
|
37 |
+
for x, z_block in blocks.items():
|
38 |
+
for z, block in z_block.items():
|
39 |
+
cur.execute(f'INSERT INTO "{structure_name}" (x, y, z, block_name) VALUES (?, ?, ?, ?)',
|
40 |
+
(int(x), int(level), int(z), block['name']))
|
41 |
+
|
42 |
+
conn.commit()
|
43 |
+
conn.close()
|
44 |
+
return None
|
45 |
+
except Exception as e:
|
46 |
+
return f"Error processing {url}: {str(e)}\n{traceback.format_exc()}"
|
47 |
+
|
48 |
+
with open('pages.txt', 'r') as f:
|
49 |
+
urls = [line.strip() for line in f if line.strip()]
|
50 |
+
|
51 |
+
errors = []
|
52 |
+
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
53 |
+
futures = [executor.submit(process_url, url) for url in urls]
|
54 |
+
for future in tqdm(concurrent.futures.as_completed(futures), total=len(urls), desc="Processing URLs"):
|
55 |
+
result = future.result()
|
56 |
+
if result:
|
57 |
+
errors.append(result)
|
58 |
+
|
59 |
+
if errors:
|
60 |
+
with open('errors.log', 'w') as f:
|
61 |
+
for error in errors:
|
62 |
+
f.write(f"{error}\n")
|
63 |
+
print(f"Encountered {len(errors)} errors. Check errors.log for details.")
|
64 |
+
else:
|
65 |
+
print("All URLs processed successfully.")
|
sql_to_parquet.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
|
3 |
+
import sqlite3
|
4 |
+
import pandas as pd
|
5 |
+
import pyarrow as pa
|
6 |
+
import pyarrow.parquet as pq
|
7 |
+
|
8 |
+
def sqlite_to_parquet(sqlite_file, parquet_file):
|
9 |
+
conn = sqlite3.connect(sqlite_file)
|
10 |
+
cur = conn.cursor()
|
11 |
+
|
12 |
+
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
13 |
+
tables = cur.fetchall()
|
14 |
+
|
15 |
+
df_list = []
|
16 |
+
|
17 |
+
for table in tables:
|
18 |
+
table_name = table[0]
|
19 |
+
query = f'SELECT x, y, z, block_name as mat FROM "{table_name}"'
|
20 |
+
df = pd.read_sql_query(query, conn)
|
21 |
+
df['structure'] = table_name
|
22 |
+
df_list.append(df)
|
23 |
+
|
24 |
+
final_df = pd.concat(df_list, ignore_index=True)
|
25 |
+
|
26 |
+
table = pa.Table.from_pandas(final_df)
|
27 |
+
pq.write_table(table, parquet_file)
|
28 |
+
|
29 |
+
conn.close()
|
30 |
+
|
31 |
+
if __name__ == '__main__':
|
32 |
+
sqlite_to_parquet('minecraft_structures.db', 'minecraft_structures.parquet')
|
33 |
+
print("Conversion to Parquet completed successfully.")
|