civitai-to-hf / app.py
multimodalart's picture
Update app.py
c2dfb4a
raw
history blame
11.7 kB
import requests
import os
import gradio as gr
from huggingface_hub import HfApi, update_repo_visibility
from slugify import slugify
import gradio as gr
import uuid
from typing import Optional
import json
def get_json_data(url):
api_url = f"https://civitai.com/api/v1/models/{url.split('/')[4]}"
try:
response = requests.get(api_url)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
print(f"Error fetching JSON data: {e}")
return None
def check_nsfw(json_data):
if json_data["nsfw"]:
return False
for model_version in json_data["modelVersions"]:
for image in model_version["images"]:
if image["nsfw"] != "None":
return False
return True
def extract_info(json_data):
if json_data["type"] == "LORA":
for model_version in json_data["modelVersions"]:
if model_version["baseModel"] in ["SDXL 1.0", "SDXL 0.9"]:
for file in model_version["files"]:
if file["primary"]:
info = {
"urls_to_download": [
{"url": file["downloadUrl"], "filename": file["name"], "type": "weightName"},
{"url": model_version["images"][0]["url"], "filename": os.path.basename(model_version["images"][0]["url"]), "type": "imageName"}
],
"id": model_version["id"],
"modelId": model_version["modelId"],
"name": json_data["name"],
"description": json_data["description"],
"trainedWords": model_version["trainedWords"],
"creator": json_data["creator"]["username"]
}
return info
return None
def download_files(info, folder="."):
downloaded_files = {
"imageName": [],
"weightName": []
}
for item in info["urls_to_download"]:
download_file(item["url"], item["filename"], folder)
downloaded_files[item["type"]].append(item["filename"])
return downloaded_files
def download_file(url, filename, folder="."):
try:
response = requests.get(url)
response.raise_for_status()
with open(f"{folder}/{filename}", 'wb') as f:
f.write(response.content)
except requests.exceptions.RequestException as e:
raise gr.Error(f"Error downloading file: {e}")
def process_url(url, do_download=True, folder="."):
json_data = get_json_data(url)
if json_data:
if check_nsfw(json_data):
info = extract_info(json_data)
if info:
if(do_download):
downloaded_files = download_files(info, folder)
else:
downloaded_files = []
return info, downloaded_files
else:
raise gr.Error("Only SDXL LoRAs are supported for now")
else:
raise gr.Error("This model has content tagged as unsafe by CivitAI")
else:
raise gr.Error("Something went wrong in fetching CivitAI API")
def create_readme(info, downloaded_files, is_author=True, folder="."):
readme_content = ""
original_url = f"https://civitai.com/models/{info['id']}"
non_author_disclaimer = f'This model was originally uploaded on [CivitAI]({original_url}), by [{info["creator"]}](https://civitai.com/user/{info["creator"]}/models). The information below was provided by the author on CivitAI:'
content = f"""---
license: other
tags:
- text-to-image
- stable-diffusion
- lora
- diffusers
base_model: stabilityai/stable-diffusion-xl-base-1.0
instance_prompt: {info["trainedWords"][0]}
widget:
- text: {info["trainedWords"][0]}
---
# {info["name"]}
{non_author_disclaimer if not is_author else ''}
![Image]({downloaded_files["imageName"][0]})
{info["description"]}
"""
readme_content += content + "\n"
with open(f"{folder}/README.md", "w") as file:
file.write(readme_content)
def upload_civit_to_hf(profile: Optional[gr.OAuthProfile], url, progress=gr.Progress(track_tqdm=True)):
if not profile.name:
return gr.Error("Are you sure you are logged in?")
folder = str(uuid.uuid4())
os.makedirs(folder, exist_ok=False)
info, downloaded_files = process_url(url, folder=folder)
create_readme(info, downloaded_files, folder=folder)
try:
api = HfApi(token=os.environ["HUGGING_FACE_HUB_TOKEN"])
username = api.whoami()["name"]
slug_name = slugify(info["name"])
repo_id = f"{username}/{profile.preferred_username}-{slug_name}"
api.create_repo(repo_id=repo_id, private=True, exist_ok=True)
api.upload_folder(
folder_path=folder,
repo_id=repo_id,
repo_type="model",
)
api.update_repo_visibility(repo_id=repo_id, private=False)
except:
raise gr.Error("something went wrong")
transfer_repos = gr.load("multimodalart/transfer_repos", hf_token=os.environ["HUGGING_FACE_HUB_TOKEN"], src="spaces")
user_repo_id = f"{profile.preferred_username}/{slug_name}"
response_code = transfer_repos(repo_id, user_repo_id)
i = 0
while response_code != "200":
message = None
if response_code == "409":
if i < 3:
user_repo_id = f"{profile.preferred_username}/{slug_name}-{i}"
response_code = transfer_repos(repo_id, user_repo_id)
i += 1
else:
message = "It seems this model has been uploaded already in your account."
elif response_code == "404":
message = "Something went wrong with the model upload. Try again."
else:
message = f"Unexpected response code: {response_code}."
if message:
api.delete_repo(repo_id=repo_id, repo_type="model")
raise gr.Error(message)
return f'''# Model uploaded to 🤗!
## Access it here [{user_repo_id}](https://huggingface.co/{user_repo_id}) '''
def get_creator(username):
url = f"https://civitai.com/api/trpc/user.getCreator?input=%7B%22json%22%3A%7B%22username%22%3A%22{username}%22%2C%22authed%22%3Atrue%7D%7D"
headers = {
"authority": "civitai.com",
"accept": "*/*",
"accept-language": "en-BR,en;q=0.9,pt-BR;q=0.8,pt;q=0.7,es-ES;q=0.6,es;q=0.5,de-LI;q=0.4,de;q=0.3,en-GB;q=0.2,en-US;q=0.1,sk;q=0.1",
"content-type": "application/json",
"cookie": f'{os.environ["COOKIE_INFO"]}',
"if-modified-since": "Tue, 22 Aug 2023 07:18:52 GMT",
"referer": f"https://civitai.com/user/{username}/models",
"sec-ch-ua": "\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"",
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": "macOS",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-origin",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
}
response = requests.get(url, headers=headers)
return json.loads(response.text)
def extract_huggingface_username(username):
data = get_creator(username)
links = data.get('result', {}).get('data', {}).get('json', {}).get('links', [])
for link in links:
url = link.get('url', '')
if url.startswith('https://huggingface.co/') or url.startswith('https://www.huggingface.co/'):
username = url.split('/')[-1]
return username
return None
def check_civit_link(profile: Optional[gr.OAuthProfile], url):
info, _ = process_url(url, do_download=False)
hf_username = extract_huggingface_username(info['creator'])
attributes_methods = dir(profile)
if(not hf_username):
no_username_text = 'Oops, your CivitAI profile seems to not have information about your Hugging Face account. Please visit <a href="https://civitai.com/user/account">https://civitai.com/user/account</a> and include it there<br><img width="60%" src="https://i.imgur.com/hCbo9uL.png" />'
return no_username_text, gr.update(), gr.update(visible=True)
if(profile.preferred_username != hf_username):
unmatched_username_text = 'Oops, the Hugging Face account in your CivitAI profile seems to be different than the one your are using here. This Space only works for model authors to submit their own models to Hugging Face. If you do own the model, please visit <a href="https://civitai.com/user/account">https://civitai.com/user/account</a> and update it there<br><img src="https://i.imgur.com/hCbo9uL.png" />'
return unmatched_username_text, gr.update(), gr.update(visible=True)
else:
return '', gr.update(interactive=True), gr.update(visible=False)
def swap_fill(profile: Optional[gr.OAuthProfile]):
if profile is None:
return gr.update(visible=True), gr.update(visible=False)
else:
return gr.update(visible=False), gr.update(visible=True)
def show_output():
return gr.update(visible=True)
css = '''
#login {
font-size: 0px;
width: 100% !important;
margin: 0 auto;
}
#logout {
width: 100% !important;
margin-bottom: 2em;
}
#login:after {
content: 'Authorize this app before uploading your model';
visibility: visible;
display: block;
font-size: var(--button-large-text-size);
}
#login:disabled{
font-size: var(--button-large-text-size);
}
#login:disabled:after{
content:''
}
#disabled_upload{
opacity: 0.5;
pointer-events:none;
}
'''
with gr.Blocks(css=css) as demo:
gr.Markdown('''# Bring your CivitAI SDXL LoRA to Hugging Face
Get diffusers compatibility, a free GPU-based Inference Widget and possibility to submit to the [LoRA the Explorer](https://huggingface.co/spaces/multimodalart/LoraTheExplorer) space
''')
gr.LoginButton(elem_id="login")
with gr.Column(elem_id="disabled_upload") as disabled_area:
with gr.Row():
submit_source_civit = gr.Textbox(
label="CivitAI model URL",
info="URL of the CivitAI model, for now only SDXL LoRAs are supported",
)
submit_button_civit = gr.Button("Upload model to Hugging Face and submit", interactive=False)
with gr.Column(visible=False) as enabled_area:
with gr.Row():
submit_source_civit = gr.Textbox(
label="CivitAI model URL",
info="URL of the CivitAI model, for now only SDXL LoRAs are supported",
)
instructions = gr.HTML("")
try_again_button = gr.Button("I have added my HF profile to my account", visible=False)
submit_button_civit = gr.Button("Upload model to Hugging Face", interactive=False)
output = gr.Markdown(label="Output progress", visible=False)
demo.load(fn=swap_fill, outputs=[disabled_area, enabled_area])
submit_source_civit.change(fn=check_civit_link, inputs=[submit_source_civit], outputs=[instructions, submit_button_civit, try_again_button])
try_again_button.click(fn=check_civit_link, inputs=[submit_source_civit], outputs=[instructions, submit_button_civit, try_again_button])
submit_button_civit.click(fn=show_output, inputs=[], outputs=[output]).then(fn=upload_civit_to_hf, inputs=[submit_source_civit], outputs=[output])
gr.LogoutButton(elem_id="logout")
demo.queue()
demo.launch()