File size: 1,245 Bytes
a906372
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
#!/usr/bin/env python3

import bs4
import requests
import time
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed

NUM_PAGES = 422
BASE_URL = 'https://www.grabcraft.com'
MAX_RETRIES = 5
RETRY_DELAY = 2  # seconds

def fetch_page(i):
  for attempt in range(MAX_RETRIES):
    try:
      r = requests.get(f'{BASE_URL}/minecraft/pg/{i}')
      if r.status_code == 200:
        soup = bs4.BeautifulSoup(r.text, 'html.parser')
        return [BASE_URL + '/'.join(a['href'].split('/')[:-1]) for a in soup.find_all('a', attrs={'class': 'button more-info details'})]
      else:
        print(f'Error: {r.status_code} on page {i}')
    except requests.RequestException as e:
      print(f'Exception: {e} on page {i}')
    time.sleep(RETRY_DELAY)
  with open('errors.txt', 'a') as error_file:
      error_file.write(f'Failed to fetch page {i} after {MAX_RETRIES} attempts.\n')
  return []

if __name__ == '__main__':
  pages = []
  with ThreadPoolExecutor() as executor:
    futures = {executor.submit(fetch_page, i): i for i in range(NUM_PAGES + 1)}
    for future in tqdm(as_completed(futures), total=len(futures)):
      pages += future.result()
  with open('pages.txt', 'w') as f:
    f.write('\n'.join(set(pages)))