yandere2023 / scripts /danbooru /danbooru-check.py
nyanko7's picture
Upload folder using huggingface_hub
0b17507 verified
raw
history blame
No virus
5.39 kB
import time, os, json
from tqdm import tqdm
from curl_cffi import requests
import concurrent.futures
from pathlib import Path
import tarfile
import shutil
from huggingface_hub import HfApi
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
def main():
danbooru_path = Path("danbooru")
selected_range = set(['%03d' % i for i in [400]])
notexists, total = 0, 0
notexistsid = {}
with open("posts.json", "r") as f:
bar = tqdm(desc="Indexing files", ascii=True, leave=False)
cache = {}
data_size = {}
for d in selected_range:
data = []
fp = ('0' + d)
for file_path in (danbooru_path / fp).iterdir():
if file_path.is_file():
data.append(file_path.stem)
data_size[file_path.stem] = file_path.stat().st_size
bar.update(1)
cache[fp] = set(data)
bar = tqdm(desc="Checking files", total=6_857_737, ascii=True, leave=False)
for line in f:
post = json.loads(line)
file_id = post['id']
cutoff = str(file_id)[-3:]
if cutoff not in selected_range:
bar.update(1)
continue
cutoff = '0' + cutoff
assert (danbooru_path / cutoff).exists(), f"{(danbooru_path / cutoff)} not exixts"
exists = str(file_id) in cache[cutoff] and (data_size[str(file_id)] == post["file_size"] or int(file_id) < 5_020_995)
total += 1
if not exists and "file_url" in post:
notexists += 1
if cutoff not in notexistsid:
notexistsid[cutoff] = []
notexistsid[cutoff].append((
file_id, cutoff, post["file_url"],
))
# print(post["file_url"])
bar.update(1)
bar.set_postfix_str(f"not exists: {notexists}")
bar.close()
print(f"not exists: {notexists}, total: {total}")
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as tar_executor:
for d in selected_range:
cut = '0' + d
if cut not in notexistsid:
tar_executor.submit(archive_and_upload, Path("danbooru") / cut, cut)
for key, group in notexistsid.items():
keybar = tqdm(desc=f"Downloading files in key={key}", total=len(group), position=1, ascii=True, leave=False)
ok = False
while not ok:
with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
for file_id, cutoff, file_url in group:
executor.submit(download, file_id, cutoff, file_url, keybar)
ok = True
for file_id, cutoff, file_url in group:
suffix = Path(file_url).suffix
if file_url != "" and not Path(f"danbooru/{cutoff}/{file_id}{suffix}").is_file():
ok = False
tar_executor.submit(archive_and_upload, Path(f"danbooru/{cutoff}"), cutoff)
print(f"Finished download group {cutoff}")
keybar.close()
def rm_tree(pth: Path):
for child in pth.iterdir():
if child.is_file():
child.unlink()
else:
rm_tree(child)
pth.rmdir()
def archive_and_upload(dirname, name):
tar_name = Path("danbooru-tars") / f"data-{name}.tar"
# Check if the directory exists
if not os.path.isdir(dirname):
print("The specified directory does not exist.")
return
# Create the tar file
print(f"Creating {tar_name}")
with tarfile.open(tar_name, "w") as tar:
# Sort and add files to the tar file
for root, dirs, files in os.walk(dirname):
# Sort files for consistent ordering
for file in tqdm(sorted(files), desc=f"Creating {tar_name}", ascii=True):
full_path = os.path.join(root, file)
# Add the file to the tar archive
tar.add(full_path, arcname=file)
# Remove the original directory after archiving
rm_tree(dirname)
print(f"The directory {dirname} has been removed.")
api = HfApi()
print(api.upload_file(
path_or_fileobj=tar_name,
path_in_repo=f"original/data-{name}.tar",
repo_id="nyanko7/danbooru2023",
repo_type="dataset",
))
Path(tar_name).unlink()
def download(idx, cutoff, file_url, bar):
suffix = Path(file_url).suffix
max_attempts = 3 # specify the maximum number of attempts
for attempt in range(max_attempts):
try:
r = requests.get(file_url, impersonate="chrome110", timeout=120)
if r.status_code == 200:
with open(f"danbooru/{cutoff}/{idx}{suffix}", "wb") as f:
f.write(r.content)
break # if the download is successful, break the loop
else:
print(f"Attempt {attempt+1} failed to download {file_url}: error {r.status_code}")
except Exception as e:
print(f"Attempt {attempt+1} failed to download {file_url}: error {e}")
time.sleep(1) # wait for 1 second before the next attempt
if attempt+1 == max_attempts:
print(f"Failed to download {file_url} after {max_attempts} attempts.")
bar.update(1)
if __name__ == "__main__":
main()