#!/usr/bin/env python3 import urllib3 from bs4 import BeautifulSoup import json import sqlite3 import concurrent.futures from tqdm import tqdm import traceback def process_url(url): try: structure_name = url.split('/minecraft/')[-1].replace('#blueprints', '') http = urllib3.PoolManager() r = http.request('GET', url) soup = BeautifulSoup(r.data.decode('utf-8'), 'html.parser') obj = next(s['src'] for s in soup.find_all('script') if 'src' in s.attrs and 'myRenderObject' in s['src']) r = http.request('GET', obj) data = r.data.decode('utf-8') eq = data.find('=') d = json.loads(data[eq+1:]) conn = sqlite3.connect('minecraft_structures.db') cur = conn.cursor() cur.execute(f''' CREATE TABLE IF NOT EXISTS "{structure_name}" ( x INTEGER, y INTEGER, z INTEGER, block_name TEXT ) ''') for level, blocks in d.items(): for x, z_block in blocks.items(): for z, block in z_block.items(): cur.execute(f'INSERT INTO "{structure_name}" (x, y, z, block_name) VALUES (?, ?, ?, ?)', (int(x), int(level), int(z), block['name'])) conn.commit() conn.close() return None except Exception as e: return f"Error processing {url}: {str(e)}\n{traceback.format_exc()}" with open('pages.txt', 'r') as f: urls = [line.strip() for line in f if line.strip()] errors = [] with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: futures = [executor.submit(process_url, url) for url in urls] for future in tqdm(concurrent.futures.as_completed(futures), total=len(urls), desc="Processing URLs"): result = future.result() if result: errors.append(result) if errors: with open('errors.log', 'w') as f: for error in errors: f.write(f"{error}\n") print(f"Encountered {len(errors)} errors. Check errors.log for details.") else: print("All URLs processed successfully.")