Spaces:
Running
Running
Support ControlNet
Browse filesTested with lllyasviel/ControlNet-v1-1/control_v11f1p_sd15_depth.pth
The configuration file is derived from the checkpoint name.
- app.py +1 -1
- convert.py +18 -8
app.py
CHANGED
@@ -24,7 +24,7 @@ demo = gr.Interface(
|
|
24 |
gr.Text(max_lines=1, label="your_hf_token"),
|
25 |
gr.Text(max_lines=1, label="model_id"),
|
26 |
gr.Text(max_lines=1, label="filename"),
|
27 |
-
gr.Radio(label="Model type", choices=["v1", "v2"]),
|
28 |
gr.Radio(label="Sample size (px)", choices=[512, 768]),
|
29 |
gr.Radio(label="Scheduler type", choices=["pndm", "heun", "euler", "dpm", "ddim"], value="dpm"),
|
30 |
gr.Radio(label="Extract EMA or non-EMA?", choices=["ema", "non-ema"], value="ema"),
|
|
|
24 |
gr.Text(max_lines=1, label="your_hf_token"),
|
25 |
gr.Text(max_lines=1, label="model_id"),
|
26 |
gr.Text(max_lines=1, label="filename"),
|
27 |
+
gr.Radio(label="Model type", choices=["v1", "v2", "ControlNet"]),
|
28 |
gr.Radio(label="Sample size (px)", choices=[512, 768]),
|
29 |
gr.Radio(label="Scheduler type", choices=["pndm", "heun", "euler", "dpm", "ddim"], value="dpm"),
|
30 |
gr.Radio(label="Extract EMA or non-EMA?", choices=["ema", "non-ema"], value="ema"),
|
convert.py
CHANGED
@@ -1,19 +1,19 @@
|
|
1 |
-
import
|
2 |
import requests
|
3 |
-
import json
|
4 |
import os
|
5 |
import shutil
|
6 |
-
from
|
7 |
-
from inspect import signature
|
8 |
from tempfile import TemporaryDirectory
|
9 |
-
from typing import
|
10 |
|
11 |
import torch
|
12 |
from io import BytesIO
|
13 |
|
14 |
from huggingface_hub import CommitInfo, Discussion, HfApi, hf_hub_download
|
15 |
from huggingface_hub.file_download import repo_folder_name
|
16 |
-
from diffusers.pipelines.stable_diffusion.convert_from_ckpt import
|
|
|
|
|
17 |
from transformers import CONFIG_MAPPING
|
18 |
|
19 |
|
@@ -33,15 +33,23 @@ def convert_single(model_id: str, filename: str, model_type: str, sample_size: i
|
|
33 |
config_url = "https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference.yaml"
|
34 |
else:
|
35 |
config_url = "https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml"
|
|
|
|
|
|
|
36 |
|
37 |
config_file = BytesIO(requests.get(config_url).content)
|
38 |
|
39 |
-
|
|
|
|
|
|
|
|
|
|
|
40 |
|
41 |
pipeline.save_pretrained(folder)
|
42 |
pipeline.save_pretrained(folder, safe_serialization=True)
|
43 |
|
44 |
-
pipeline = pipeline.to(
|
45 |
pipeline.save_pretrained(folder, variant="fp16")
|
46 |
pipeline.save_pretrained(folder, safe_serialization=True, variant="fp16")
|
47 |
|
@@ -74,6 +82,8 @@ def convert(token: str, model_id: str, filename: str, model_type: str, sample_si
|
|
74 |
new_pr = api.upload_folder(folder_path=folder, path_in_repo="./", repo_id=model_id, repo_type="model", token=token, commit_description=COMMIT_MESSAGE.format(model_id), create_pr=True)
|
75 |
pr_number = new_pr.split("%2F")[-1].split("/")[0]
|
76 |
link = f"Pr created at: {'https://huggingface.co/' + os.path.join(model_id, 'discussions', pr_number)}"
|
|
|
|
|
77 |
finally:
|
78 |
shutil.rmtree(folder)
|
79 |
|
|
|
1 |
+
import gradio as gr
|
2 |
import requests
|
|
|
3 |
import os
|
4 |
import shutil
|
5 |
+
from pathlib import Path
|
|
|
6 |
from tempfile import TemporaryDirectory
|
7 |
+
from typing import Optional
|
8 |
|
9 |
import torch
|
10 |
from io import BytesIO
|
11 |
|
12 |
from huggingface_hub import CommitInfo, Discussion, HfApi, hf_hub_download
|
13 |
from huggingface_hub.file_download import repo_folder_name
|
14 |
+
from diffusers.pipelines.stable_diffusion.convert_from_ckpt import (
|
15 |
+
download_from_original_stable_diffusion_ckpt, download_controlnet_from_original_ckpt
|
16 |
+
)
|
17 |
from transformers import CONFIG_MAPPING
|
18 |
|
19 |
|
|
|
33 |
config_url = "https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference.yaml"
|
34 |
else:
|
35 |
config_url = "https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml"
|
36 |
+
elif model_type == "ControlNet":
|
37 |
+
config_url = (Path(model_id)/"resolve/main"/filename).with_suffix(".yaml")
|
38 |
+
config_url = "https://huggingface.co/" + str(config_url)
|
39 |
|
40 |
config_file = BytesIO(requests.get(config_url).content)
|
41 |
|
42 |
+
if model_type == "ControlNet":
|
43 |
+
pipeline = download_controlnet_from_original_ckpt(ckpt_file, config_file, image_size=sample_size, from_safetensors=from_safetensors, extract_ema=extract_ema)
|
44 |
+
to_args = {"dtype": torch.float16}
|
45 |
+
else:
|
46 |
+
pipeline = download_from_original_stable_diffusion_ckpt(ckpt_file, config_file, image_size=sample_size, scheduler_type=scheduler_type, from_safetensors=from_safetensors, extract_ema=extract_ema)
|
47 |
+
to_args = {"torch_dtype": torch.float16}
|
48 |
|
49 |
pipeline.save_pretrained(folder)
|
50 |
pipeline.save_pretrained(folder, safe_serialization=True)
|
51 |
|
52 |
+
pipeline = pipeline.to(**to_args)
|
53 |
pipeline.save_pretrained(folder, variant="fp16")
|
54 |
pipeline.save_pretrained(folder, safe_serialization=True, variant="fp16")
|
55 |
|
|
|
82 |
new_pr = api.upload_folder(folder_path=folder, path_in_repo="./", repo_id=model_id, repo_type="model", token=token, commit_description=COMMIT_MESSAGE.format(model_id), create_pr=True)
|
83 |
pr_number = new_pr.split("%2F")[-1].split("/")[0]
|
84 |
link = f"Pr created at: {'https://huggingface.co/' + os.path.join(model_id, 'discussions', pr_number)}"
|
85 |
+
except Exception as e:
|
86 |
+
raise gr.exceptions.Error(str(e))
|
87 |
finally:
|
88 |
shutil.rmtree(folder)
|
89 |
|