|
|
|
from huggingface_hub import HfApi |
|
from huggingface_hub import hf_hub_download |
|
import huggingface_hub |
|
from huggingface_hub import get_repo_discussions |
|
from bs4 import BeautifulSoup |
|
import os |
|
|
|
repo_id = "stabilityai/stable-diffusion" |
|
repo_id = "huggingface-projects/diffuse-the-rest" |
|
|
|
discussions_list = list(get_repo_discussions(repo_id=repo_id, repo_type="space")) |
|
all_data = [] |
|
for i, disc in enumerate(discussions_list[:10]): |
|
disc = huggingface_hub.get_discussion_details(repo_id=repo_id, repo_type="space", discussion_num=disc.num) |
|
page = BeautifulSoup(disc.events[0]._event["data"]["latest"]["raw"]) |
|
image_urls = [link.get('src') for link in page.findAll('img')] |
|
data = { |
|
"discussion_number": i, |
|
"data": { |
|
"prompt": disc.title, |
|
"images": image_urls, |
|
} |
|
} |
|
if not image_urls: |
|
continue |
|
else: |
|
all_data.append(data) |
|
|
|
dataset_repo_id = "triple-t/dummy" |
|
repo_id = "huggingface-projects/diffuse-the-rest" |
|
|
|
file_name = "_".join(repo_id.split("/")) + ".json" |
|
api = HfApi() |
|
|
|
path = hf_hub_download(repo_id=dataset_repo_id, filename=file_name, cache_dir="/home/patrick_huggingface_co/image_cache", repo_type="dataset") |
|
|
|
|
|
|
|
api.upload_file( |
|
path_or_fileobj=path, |
|
path_in_repo=file_name, |
|
repo_id=dataset_repo_id, |
|
repo_type="dataset", |
|
) |
|
|