import requests from bs4 import BeautifulSoup import os import json import gradio as gr from datasets import Dataset from PIL import Image import io import uuid import time import random DATA_DIR = "/data" IMAGES_DIR = os.path.join(DATA_DIR, "images") DATASET_FILE = os.path.join(DATA_DIR, "dataset.json") # Add a user agent rotation list USER_AGENTS = [ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.3 Safari/605.1.15", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0" ] def get_headers(cookies=None): headers = { "User-Agent": random.choice(USER_AGENTS), "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Referer": "https://www.google.com/", "DNT": "1", "Connection": "keep-alive", "Upgrade-Insecure-Requests": "1" } if cookies: headers["Cookie"] = cookies return headers def make_request(url, cookies=None): time.sleep(random.uniform(1, 3)) # Add a random delay between requests return requests.get(url, headers=get_headers(cookies), timeout=10) def extract_image_url(html_content): soup = BeautifulSoup(html_content, 'html.parser') # First, try to extract the image URL from the