Upload 1452 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/.github/FUNDING.yml +1 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/.github/workflows/publish.yml +22 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/.gitignore +160 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/CrossAttentionPatch.py +190 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/InstantID.py +607 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/LICENSE +201 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/README.md +138 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/__init__.py +3 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_IPAdapter.json +861 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_basic.json +657 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_depth.json +881 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_multi_id.json +1364 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_posed.json +704 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/daydreaming.jpg +0 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/instant_id_ipadapter.jpg +0 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/instantid_basic_workflow.jpg +0 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/examples/instantid_multi_id.jpg +0 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/pyproject.toml +15 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/requirements.txt +3 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/resampler.py +121 -0
- ComfyUI/custom_nodes/ComfyUI_InstantID/utils.py +24 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/.github/workflows/publish.yml +21 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/.gitignore +183 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/LICENSE.txt +201 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/README.md +315 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/UPDATES.md +40 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/__init__.py +151 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/LiheYoung/Depth-Anything/checkpoints/depth_anything_vitl14.pth +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/.gitignore +1 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/download/body_pose_model.pth.metadata +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/download/facenet.pth.metadata +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/download/hand_pose_model.pth.metadata +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/body_pose_model.pth +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/facenet.pth +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/hand_pose_model.pth +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/config.example.yaml +20 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/dev_interface.py +6 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_animal_pose.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_anime_face_segmentor.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_anyline.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_densepose.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_depth_anything.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_depth_anything_v2.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_dsine.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_marigold.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_marigold_flat.jpg +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_mesh_graphormer.png +3 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_metric3d.png +0 -0
- ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_onnx.png +0 -0
.gitattributes
CHANGED
@@ -38,3 +38,5 @@ ComfyUI/output/ComfyUI_00002_.png filter=lfs diff=lfs merge=lfs -text
|
|
38 |
ComfyUI/temp/ComfyUI_temp_zprxs_00001_.png filter=lfs diff=lfs merge=lfs -text
|
39 |
ComfyUI/temp/ComfyUI_temp_zprxs_00002_.png filter=lfs diff=lfs merge=lfs -text
|
40 |
ComfyUI/custom_nodes/merge_cross_images/result3.png filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
38 |
ComfyUI/temp/ComfyUI_temp_zprxs_00001_.png filter=lfs diff=lfs merge=lfs -text
|
39 |
ComfyUI/temp/ComfyUI_temp_zprxs_00002_.png filter=lfs diff=lfs merge=lfs -text
|
40 |
ComfyUI/custom_nodes/merge_cross_images/result3.png filter=lfs diff=lfs merge=lfs -text
|
41 |
+
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_mesh_graphormer.png filter=lfs diff=lfs merge=lfs -text
|
42 |
+
ComfyUI/custom_nodes/comfyui_controlnet_aux/src/controlnet_aux/mesh_graphormer/hand_landmarker.task filter=lfs diff=lfs merge=lfs -text
|
ComfyUI/custom_nodes/ComfyUI_InstantID/.github/FUNDING.yml
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
github: cubiq
|
ComfyUI/custom_nodes/ComfyUI_InstantID/.github/workflows/publish.yml
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Publish to Comfy registry
|
2 |
+
on:
|
3 |
+
workflow_dispatch:
|
4 |
+
push:
|
5 |
+
branches:
|
6 |
+
- main
|
7 |
+
- master
|
8 |
+
paths:
|
9 |
+
- "pyproject.toml"
|
10 |
+
|
11 |
+
jobs:
|
12 |
+
publish-node:
|
13 |
+
name: Publish Custom Node to registry
|
14 |
+
runs-on: ubuntu-latest
|
15 |
+
steps:
|
16 |
+
- name: Check out code
|
17 |
+
uses: actions/checkout@v4
|
18 |
+
- name: Publish Custom Node
|
19 |
+
uses: Comfy-Org/publish-node-action@main
|
20 |
+
with:
|
21 |
+
## Add your own personal access token to your Github Repository secrets and reference it here.
|
22 |
+
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/.gitignore
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/#use-with-ide
|
110 |
+
.pdm.toml
|
111 |
+
|
112 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
113 |
+
__pypackages__/
|
114 |
+
|
115 |
+
# Celery stuff
|
116 |
+
celerybeat-schedule
|
117 |
+
celerybeat.pid
|
118 |
+
|
119 |
+
# SageMath parsed files
|
120 |
+
*.sage.py
|
121 |
+
|
122 |
+
# Environments
|
123 |
+
.env
|
124 |
+
.venv
|
125 |
+
env/
|
126 |
+
venv/
|
127 |
+
ENV/
|
128 |
+
env.bak/
|
129 |
+
venv.bak/
|
130 |
+
|
131 |
+
# Spyder project settings
|
132 |
+
.spyderproject
|
133 |
+
.spyproject
|
134 |
+
|
135 |
+
# Rope project settings
|
136 |
+
.ropeproject
|
137 |
+
|
138 |
+
# mkdocs documentation
|
139 |
+
/site
|
140 |
+
|
141 |
+
# mypy
|
142 |
+
.mypy_cache/
|
143 |
+
.dmypy.json
|
144 |
+
dmypy.json
|
145 |
+
|
146 |
+
# Pyre type checker
|
147 |
+
.pyre/
|
148 |
+
|
149 |
+
# pytype static type analyzer
|
150 |
+
.pytype/
|
151 |
+
|
152 |
+
# Cython debug symbols
|
153 |
+
cython_debug/
|
154 |
+
|
155 |
+
# PyCharm
|
156 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
157 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
158 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
+
#.idea/
|
ComfyUI/custom_nodes/ComfyUI_InstantID/CrossAttentionPatch.py
ADDED
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import math
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from comfy.ldm.modules.attention import optimized_attention
|
5 |
+
from .utils import tensor_to_size
|
6 |
+
|
7 |
+
class Attn2Replace:
|
8 |
+
def __init__(self, callback=None, **kwargs):
|
9 |
+
self.callback = [callback]
|
10 |
+
self.kwargs = [kwargs]
|
11 |
+
|
12 |
+
def add(self, callback, **kwargs):
|
13 |
+
self.callback.append(callback)
|
14 |
+
self.kwargs.append(kwargs)
|
15 |
+
|
16 |
+
for key, value in kwargs.items():
|
17 |
+
setattr(self, key, value)
|
18 |
+
|
19 |
+
def __call__(self, q, k, v, extra_options):
|
20 |
+
dtype = q.dtype
|
21 |
+
out = optimized_attention(q, k, v, extra_options["n_heads"])
|
22 |
+
sigma = extra_options["sigmas"].detach().cpu()[0].item() if 'sigmas' in extra_options else 999999999.9
|
23 |
+
|
24 |
+
for i, callback in enumerate(self.callback):
|
25 |
+
if sigma <= self.kwargs[i]["sigma_start"] and sigma >= self.kwargs[i]["sigma_end"]:
|
26 |
+
out = out + callback(out, q, k, v, extra_options, **self.kwargs[i])
|
27 |
+
|
28 |
+
return out.to(dtype=dtype)
|
29 |
+
|
30 |
+
def instantid_attention(out, q, k, v, extra_options, module_key='', ipadapter=None, weight=1.0, cond=None, cond_alt=None, uncond=None, weight_type="linear", mask=None, sigma_start=0.0, sigma_end=1.0, unfold_batch=False, embeds_scaling='V only', **kwargs):
|
31 |
+
dtype = q.dtype
|
32 |
+
cond_or_uncond = extra_options["cond_or_uncond"]
|
33 |
+
block_type = extra_options["block"][0]
|
34 |
+
#block_id = extra_options["block"][1]
|
35 |
+
t_idx = extra_options["transformer_index"]
|
36 |
+
layers = 11 if '101_to_k_ip' in ipadapter.ip_layers.to_kvs else 16
|
37 |
+
k_key = module_key + "_to_k_ip"
|
38 |
+
v_key = module_key + "_to_v_ip"
|
39 |
+
|
40 |
+
# extra options for AnimateDiff
|
41 |
+
ad_params = extra_options['ad_params'] if "ad_params" in extra_options else None
|
42 |
+
|
43 |
+
b = q.shape[0]
|
44 |
+
seq_len = q.shape[1]
|
45 |
+
batch_prompt = b // len(cond_or_uncond)
|
46 |
+
_, _, oh, ow = extra_options["original_shape"]
|
47 |
+
|
48 |
+
if weight_type == 'ease in':
|
49 |
+
weight = weight * (0.05 + 0.95 * (1 - t_idx / layers))
|
50 |
+
elif weight_type == 'ease out':
|
51 |
+
weight = weight * (0.05 + 0.95 * (t_idx / layers))
|
52 |
+
elif weight_type == 'ease in-out':
|
53 |
+
weight = weight * (0.05 + 0.95 * (1 - abs(t_idx - (layers/2)) / (layers/2)))
|
54 |
+
elif weight_type == 'reverse in-out':
|
55 |
+
weight = weight * (0.05 + 0.95 * (abs(t_idx - (layers/2)) / (layers/2)))
|
56 |
+
elif weight_type == 'weak input' and block_type == 'input':
|
57 |
+
weight = weight * 0.2
|
58 |
+
elif weight_type == 'weak middle' and block_type == 'middle':
|
59 |
+
weight = weight * 0.2
|
60 |
+
elif weight_type == 'weak output' and block_type == 'output':
|
61 |
+
weight = weight * 0.2
|
62 |
+
elif weight_type == 'strong middle' and (block_type == 'input' or block_type == 'output'):
|
63 |
+
weight = weight * 0.2
|
64 |
+
elif isinstance(weight, dict):
|
65 |
+
if t_idx not in weight:
|
66 |
+
return 0
|
67 |
+
|
68 |
+
weight = weight[t_idx]
|
69 |
+
|
70 |
+
if cond_alt is not None and t_idx in cond_alt:
|
71 |
+
cond = cond_alt[t_idx]
|
72 |
+
del cond_alt
|
73 |
+
|
74 |
+
if unfold_batch:
|
75 |
+
# Check AnimateDiff context window
|
76 |
+
if ad_params is not None and ad_params["sub_idxs"] is not None:
|
77 |
+
if isinstance(weight, torch.Tensor):
|
78 |
+
weight = tensor_to_size(weight, ad_params["full_length"])
|
79 |
+
weight = torch.Tensor(weight[ad_params["sub_idxs"]])
|
80 |
+
if torch.all(weight == 0):
|
81 |
+
return 0
|
82 |
+
weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond
|
83 |
+
elif weight == 0:
|
84 |
+
return 0
|
85 |
+
|
86 |
+
# if image length matches or exceeds full_length get sub_idx images
|
87 |
+
if cond.shape[0] >= ad_params["full_length"]:
|
88 |
+
cond = torch.Tensor(cond[ad_params["sub_idxs"]])
|
89 |
+
uncond = torch.Tensor(uncond[ad_params["sub_idxs"]])
|
90 |
+
# otherwise get sub_idxs images
|
91 |
+
else:
|
92 |
+
cond = tensor_to_size(cond, ad_params["full_length"])
|
93 |
+
uncond = tensor_to_size(uncond, ad_params["full_length"])
|
94 |
+
cond = cond[ad_params["sub_idxs"]]
|
95 |
+
uncond = uncond[ad_params["sub_idxs"]]
|
96 |
+
else:
|
97 |
+
if isinstance(weight, torch.Tensor):
|
98 |
+
weight = tensor_to_size(weight, batch_prompt)
|
99 |
+
if torch.all(weight == 0):
|
100 |
+
return 0
|
101 |
+
weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond
|
102 |
+
elif weight == 0:
|
103 |
+
return 0
|
104 |
+
|
105 |
+
cond = tensor_to_size(cond, batch_prompt)
|
106 |
+
uncond = tensor_to_size(uncond, batch_prompt)
|
107 |
+
|
108 |
+
k_cond = ipadapter.ip_layers.to_kvs[k_key](cond)
|
109 |
+
k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond)
|
110 |
+
v_cond = ipadapter.ip_layers.to_kvs[v_key](cond)
|
111 |
+
v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond)
|
112 |
+
else:
|
113 |
+
# TODO: should we always convert the weights to a tensor?
|
114 |
+
if isinstance(weight, torch.Tensor):
|
115 |
+
weight = tensor_to_size(weight, batch_prompt)
|
116 |
+
if torch.all(weight == 0):
|
117 |
+
return 0
|
118 |
+
weight = weight.repeat(len(cond_or_uncond), 1, 1) # repeat for cond and uncond
|
119 |
+
elif weight == 0:
|
120 |
+
return 0
|
121 |
+
|
122 |
+
k_cond = ipadapter.ip_layers.to_kvs[k_key](cond).repeat(batch_prompt, 1, 1)
|
123 |
+
k_uncond = ipadapter.ip_layers.to_kvs[k_key](uncond).repeat(batch_prompt, 1, 1)
|
124 |
+
v_cond = ipadapter.ip_layers.to_kvs[v_key](cond).repeat(batch_prompt, 1, 1)
|
125 |
+
v_uncond = ipadapter.ip_layers.to_kvs[v_key](uncond).repeat(batch_prompt, 1, 1)
|
126 |
+
|
127 |
+
ip_k = torch.cat([(k_cond, k_uncond)[i] for i in cond_or_uncond], dim=0)
|
128 |
+
ip_v = torch.cat([(v_cond, v_uncond)[i] for i in cond_or_uncond], dim=0)
|
129 |
+
|
130 |
+
if embeds_scaling == 'K+mean(V) w/ C penalty':
|
131 |
+
scaling = float(ip_k.shape[2]) / 1280.0
|
132 |
+
weight = weight * scaling
|
133 |
+
ip_k = ip_k * weight
|
134 |
+
ip_v_mean = torch.mean(ip_v, dim=1, keepdim=True)
|
135 |
+
ip_v = (ip_v - ip_v_mean) + ip_v_mean * weight
|
136 |
+
out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"])
|
137 |
+
del ip_v_mean
|
138 |
+
elif embeds_scaling == 'K+V w/ C penalty':
|
139 |
+
scaling = float(ip_k.shape[2]) / 1280.0
|
140 |
+
weight = weight * scaling
|
141 |
+
ip_k = ip_k * weight
|
142 |
+
ip_v = ip_v * weight
|
143 |
+
out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"])
|
144 |
+
elif embeds_scaling == 'K+V':
|
145 |
+
ip_k = ip_k * weight
|
146 |
+
ip_v = ip_v * weight
|
147 |
+
out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"])
|
148 |
+
else:
|
149 |
+
#ip_v = ip_v * weight
|
150 |
+
out_ip = optimized_attention(q, ip_k, ip_v, extra_options["n_heads"])
|
151 |
+
out_ip = out_ip * weight # I'm doing this to get the same results as before
|
152 |
+
|
153 |
+
if mask is not None:
|
154 |
+
mask_h = oh / math.sqrt(oh * ow / seq_len)
|
155 |
+
mask_h = int(mask_h) + int((seq_len % int(mask_h)) != 0)
|
156 |
+
mask_w = seq_len // mask_h
|
157 |
+
|
158 |
+
# check if using AnimateDiff and sliding context window
|
159 |
+
if (mask.shape[0] > 1 and ad_params is not None and ad_params["sub_idxs"] is not None):
|
160 |
+
# if mask length matches or exceeds full_length, get sub_idx masks
|
161 |
+
if mask.shape[0] >= ad_params["full_length"]:
|
162 |
+
mask = torch.Tensor(mask[ad_params["sub_idxs"]])
|
163 |
+
mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1)
|
164 |
+
else:
|
165 |
+
mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1)
|
166 |
+
mask = tensor_to_size(mask, ad_params["full_length"])
|
167 |
+
mask = mask[ad_params["sub_idxs"]]
|
168 |
+
else:
|
169 |
+
mask = F.interpolate(mask.unsqueeze(1), size=(mask_h, mask_w), mode="bilinear").squeeze(1)
|
170 |
+
mask = tensor_to_size(mask, batch_prompt)
|
171 |
+
|
172 |
+
mask = mask.repeat(len(cond_or_uncond), 1, 1)
|
173 |
+
mask = mask.view(mask.shape[0], -1, 1).repeat(1, 1, out.shape[2])
|
174 |
+
|
175 |
+
# covers cases where extreme aspect ratios can cause the mask to have a wrong size
|
176 |
+
mask_len = mask_h * mask_w
|
177 |
+
if mask_len < seq_len:
|
178 |
+
pad_len = seq_len - mask_len
|
179 |
+
pad1 = pad_len // 2
|
180 |
+
pad2 = pad_len - pad1
|
181 |
+
mask = F.pad(mask, (0, 0, pad1, pad2), value=0.0)
|
182 |
+
elif mask_len > seq_len:
|
183 |
+
crop_start = (mask_len - seq_len) // 2
|
184 |
+
mask = mask[:, crop_start:crop_start+seq_len, :]
|
185 |
+
|
186 |
+
out_ip = out_ip * mask
|
187 |
+
|
188 |
+
#out = out + out_ip
|
189 |
+
|
190 |
+
return out_ip.to(dtype=dtype)
|
ComfyUI/custom_nodes/ComfyUI_InstantID/InstantID.py
ADDED
@@ -0,0 +1,607 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import os
|
3 |
+
import comfy.utils
|
4 |
+
import folder_paths
|
5 |
+
import numpy as np
|
6 |
+
import math
|
7 |
+
import cv2
|
8 |
+
import PIL.Image
|
9 |
+
from .resampler import Resampler
|
10 |
+
from .CrossAttentionPatch import Attn2Replace, instantid_attention
|
11 |
+
from .utils import tensor_to_image
|
12 |
+
|
13 |
+
from insightface.app import FaceAnalysis
|
14 |
+
|
15 |
+
try:
|
16 |
+
import torchvision.transforms.v2 as T
|
17 |
+
except ImportError:
|
18 |
+
import torchvision.transforms as T
|
19 |
+
|
20 |
+
import torch.nn.functional as F
|
21 |
+
|
22 |
+
MODELS_DIR = os.path.join(folder_paths.models_dir, "instantid")
|
23 |
+
if "instantid" not in folder_paths.folder_names_and_paths:
|
24 |
+
current_paths = [MODELS_DIR]
|
25 |
+
else:
|
26 |
+
current_paths, _ = folder_paths.folder_names_and_paths["instantid"]
|
27 |
+
folder_paths.folder_names_and_paths["instantid"] = (current_paths, folder_paths.supported_pt_extensions)
|
28 |
+
|
29 |
+
INSIGHTFACE_DIR = os.path.join(folder_paths.models_dir, "insightface")
|
30 |
+
|
31 |
+
def draw_kps(image_pil, kps, color_list=[(255,0,0), (0,255,0), (0,0,255), (255,255,0), (255,0,255)]):
|
32 |
+
stickwidth = 4
|
33 |
+
limbSeq = np.array([[0, 2], [1, 2], [3, 2], [4, 2]])
|
34 |
+
kps = np.array(kps)
|
35 |
+
|
36 |
+
h, w, _ = image_pil.shape
|
37 |
+
out_img = np.zeros([h, w, 3])
|
38 |
+
|
39 |
+
for i in range(len(limbSeq)):
|
40 |
+
index = limbSeq[i]
|
41 |
+
color = color_list[index[0]]
|
42 |
+
|
43 |
+
x = kps[index][:, 0]
|
44 |
+
y = kps[index][:, 1]
|
45 |
+
length = ((x[0] - x[1]) ** 2 + (y[0] - y[1]) ** 2) ** 0.5
|
46 |
+
angle = math.degrees(math.atan2(y[0] - y[1], x[0] - x[1]))
|
47 |
+
polygon = cv2.ellipse2Poly((int(np.mean(x)), int(np.mean(y))), (int(length / 2), stickwidth), int(angle), 0, 360, 1)
|
48 |
+
out_img = cv2.fillConvexPoly(out_img.copy(), polygon, color)
|
49 |
+
out_img = (out_img * 0.6).astype(np.uint8)
|
50 |
+
|
51 |
+
for idx_kp, kp in enumerate(kps):
|
52 |
+
color = color_list[idx_kp]
|
53 |
+
x, y = kp
|
54 |
+
out_img = cv2.circle(out_img.copy(), (int(x), int(y)), 10, color, -1)
|
55 |
+
|
56 |
+
out_img_pil = PIL.Image.fromarray(out_img.astype(np.uint8))
|
57 |
+
return out_img_pil
|
58 |
+
|
59 |
+
class InstantID(torch.nn.Module):
|
60 |
+
def __init__(self, instantid_model, cross_attention_dim=1280, output_cross_attention_dim=1024, clip_embeddings_dim=512, clip_extra_context_tokens=16):
|
61 |
+
super().__init__()
|
62 |
+
|
63 |
+
self.clip_embeddings_dim = clip_embeddings_dim
|
64 |
+
self.cross_attention_dim = cross_attention_dim
|
65 |
+
self.output_cross_attention_dim = output_cross_attention_dim
|
66 |
+
self.clip_extra_context_tokens = clip_extra_context_tokens
|
67 |
+
|
68 |
+
self.image_proj_model = self.init_proj()
|
69 |
+
|
70 |
+
self.image_proj_model.load_state_dict(instantid_model["image_proj"])
|
71 |
+
self.ip_layers = To_KV(instantid_model["ip_adapter"])
|
72 |
+
|
73 |
+
def init_proj(self):
|
74 |
+
image_proj_model = Resampler(
|
75 |
+
dim=self.cross_attention_dim,
|
76 |
+
depth=4,
|
77 |
+
dim_head=64,
|
78 |
+
heads=20,
|
79 |
+
num_queries=self.clip_extra_context_tokens,
|
80 |
+
embedding_dim=self.clip_embeddings_dim,
|
81 |
+
output_dim=self.output_cross_attention_dim,
|
82 |
+
ff_mult=4
|
83 |
+
)
|
84 |
+
return image_proj_model
|
85 |
+
|
86 |
+
@torch.inference_mode()
|
87 |
+
def get_image_embeds(self, clip_embed, clip_embed_zeroed):
|
88 |
+
#image_prompt_embeds = clip_embed.clone().detach()
|
89 |
+
image_prompt_embeds = self.image_proj_model(clip_embed)
|
90 |
+
#uncond_image_prompt_embeds = clip_embed_zeroed.clone().detach()
|
91 |
+
uncond_image_prompt_embeds = self.image_proj_model(clip_embed_zeroed)
|
92 |
+
|
93 |
+
return image_prompt_embeds, uncond_image_prompt_embeds
|
94 |
+
|
95 |
+
class ImageProjModel(torch.nn.Module):
|
96 |
+
def __init__(self, cross_attention_dim=1024, clip_embeddings_dim=1024, clip_extra_context_tokens=4):
|
97 |
+
super().__init__()
|
98 |
+
|
99 |
+
self.cross_attention_dim = cross_attention_dim
|
100 |
+
self.clip_extra_context_tokens = clip_extra_context_tokens
|
101 |
+
self.proj = torch.nn.Linear(clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim)
|
102 |
+
self.norm = torch.nn.LayerNorm(cross_attention_dim)
|
103 |
+
|
104 |
+
def forward(self, image_embeds):
|
105 |
+
embeds = image_embeds
|
106 |
+
clip_extra_context_tokens = self.proj(embeds).reshape(-1, self.clip_extra_context_tokens, self.cross_attention_dim)
|
107 |
+
clip_extra_context_tokens = self.norm(clip_extra_context_tokens)
|
108 |
+
return clip_extra_context_tokens
|
109 |
+
|
110 |
+
class To_KV(torch.nn.Module):
|
111 |
+
def __init__(self, state_dict):
|
112 |
+
super().__init__()
|
113 |
+
|
114 |
+
self.to_kvs = torch.nn.ModuleDict()
|
115 |
+
for key, value in state_dict.items():
|
116 |
+
k = key.replace(".weight", "").replace(".", "_")
|
117 |
+
self.to_kvs[k] = torch.nn.Linear(value.shape[1], value.shape[0], bias=False)
|
118 |
+
self.to_kvs[k].weight.data = value
|
119 |
+
|
120 |
+
def _set_model_patch_replace(model, patch_kwargs, key):
|
121 |
+
to = model.model_options["transformer_options"].copy()
|
122 |
+
if "patches_replace" not in to:
|
123 |
+
to["patches_replace"] = {}
|
124 |
+
else:
|
125 |
+
to["patches_replace"] = to["patches_replace"].copy()
|
126 |
+
|
127 |
+
if "attn2" not in to["patches_replace"]:
|
128 |
+
to["patches_replace"]["attn2"] = {}
|
129 |
+
else:
|
130 |
+
to["patches_replace"]["attn2"] = to["patches_replace"]["attn2"].copy()
|
131 |
+
|
132 |
+
if key not in to["patches_replace"]["attn2"]:
|
133 |
+
to["patches_replace"]["attn2"][key] = Attn2Replace(instantid_attention, **patch_kwargs)
|
134 |
+
model.model_options["transformer_options"] = to
|
135 |
+
else:
|
136 |
+
to["patches_replace"]["attn2"][key].add(instantid_attention, **patch_kwargs)
|
137 |
+
|
138 |
+
class InstantIDModelLoader:
|
139 |
+
@classmethod
|
140 |
+
def INPUT_TYPES(s):
|
141 |
+
return {"required": { "instantid_file": (folder_paths.get_filename_list("instantid"), )}}
|
142 |
+
|
143 |
+
RETURN_TYPES = ("INSTANTID",)
|
144 |
+
FUNCTION = "load_model"
|
145 |
+
CATEGORY = "InstantID"
|
146 |
+
|
147 |
+
def load_model(self, instantid_file):
|
148 |
+
ckpt_path = folder_paths.get_full_path("instantid", instantid_file)
|
149 |
+
|
150 |
+
model = comfy.utils.load_torch_file(ckpt_path, safe_load=True)
|
151 |
+
|
152 |
+
if ckpt_path.lower().endswith(".safetensors"):
|
153 |
+
st_model = {"image_proj": {}, "ip_adapter": {}}
|
154 |
+
for key in model.keys():
|
155 |
+
if key.startswith("image_proj."):
|
156 |
+
st_model["image_proj"][key.replace("image_proj.", "")] = model[key]
|
157 |
+
elif key.startswith("ip_adapter."):
|
158 |
+
st_model["ip_adapter"][key.replace("ip_adapter.", "")] = model[key]
|
159 |
+
model = st_model
|
160 |
+
|
161 |
+
model = InstantID(
|
162 |
+
model,
|
163 |
+
cross_attention_dim=1280,
|
164 |
+
output_cross_attention_dim=model["ip_adapter"]["1.to_k_ip.weight"].shape[1],
|
165 |
+
clip_embeddings_dim=512,
|
166 |
+
clip_extra_context_tokens=16,
|
167 |
+
)
|
168 |
+
|
169 |
+
return (model,)
|
170 |
+
|
171 |
+
def extractFeatures(insightface, image, extract_kps=False):
|
172 |
+
face_img = tensor_to_image(image)
|
173 |
+
out = []
|
174 |
+
|
175 |
+
insightface.det_model.input_size = (640,640) # reset the detection size
|
176 |
+
|
177 |
+
for i in range(face_img.shape[0]):
|
178 |
+
for size in [(size, size) for size in range(640, 128, -64)]:
|
179 |
+
insightface.det_model.input_size = size # TODO: hacky but seems to be working
|
180 |
+
face = insightface.get(face_img[i])
|
181 |
+
if face:
|
182 |
+
face = sorted(face, key=lambda x:(x['bbox'][2]-x['bbox'][0])*(x['bbox'][3]-x['bbox'][1]))[-1]
|
183 |
+
|
184 |
+
if extract_kps:
|
185 |
+
out.append(draw_kps(face_img[i], face['kps']))
|
186 |
+
else:
|
187 |
+
out.append(torch.from_numpy(face['embedding']).unsqueeze(0))
|
188 |
+
|
189 |
+
if 640 not in size:
|
190 |
+
print(f"\033[33mINFO: InsightFace detection resolution lowered to {size}.\033[0m")
|
191 |
+
break
|
192 |
+
|
193 |
+
if out:
|
194 |
+
if extract_kps:
|
195 |
+
out = torch.stack(T.ToTensor()(out), dim=0).permute([0,2,3,1])
|
196 |
+
else:
|
197 |
+
out = torch.stack(out, dim=0)
|
198 |
+
else:
|
199 |
+
out = None
|
200 |
+
|
201 |
+
return out
|
202 |
+
|
203 |
+
class InstantIDFaceAnalysis:
|
204 |
+
@classmethod
|
205 |
+
def INPUT_TYPES(s):
|
206 |
+
return {
|
207 |
+
"required": {
|
208 |
+
"provider": (["CPU", "CUDA", "ROCM"], ),
|
209 |
+
},
|
210 |
+
}
|
211 |
+
|
212 |
+
RETURN_TYPES = ("FACEANALYSIS",)
|
213 |
+
FUNCTION = "load_insight_face"
|
214 |
+
CATEGORY = "InstantID"
|
215 |
+
|
216 |
+
def load_insight_face(self, provider):
|
217 |
+
model = FaceAnalysis(name="antelopev2", root=INSIGHTFACE_DIR, providers=[provider + 'ExecutionProvider',]) # alternative to buffalo_l
|
218 |
+
model.prepare(ctx_id=0, det_size=(640, 640))
|
219 |
+
|
220 |
+
return (model,)
|
221 |
+
|
222 |
+
class FaceKeypointsPreprocessor:
|
223 |
+
@classmethod
|
224 |
+
def INPUT_TYPES(s):
|
225 |
+
return {
|
226 |
+
"required": {
|
227 |
+
"faceanalysis": ("FACEANALYSIS", ),
|
228 |
+
"image": ("IMAGE", ),
|
229 |
+
},
|
230 |
+
}
|
231 |
+
RETURN_TYPES = ("IMAGE",)
|
232 |
+
FUNCTION = "preprocess_image"
|
233 |
+
CATEGORY = "InstantID"
|
234 |
+
|
235 |
+
def preprocess_image(self, faceanalysis, image):
|
236 |
+
face_kps = extractFeatures(faceanalysis, image, extract_kps=True)
|
237 |
+
|
238 |
+
if face_kps is None:
|
239 |
+
face_kps = torch.zeros_like(image)
|
240 |
+
print(f"\033[33mWARNING: no face detected, unable to extract the keypoints!\033[0m")
|
241 |
+
#raise Exception('Face Keypoints Image: No face detected.')
|
242 |
+
|
243 |
+
return (face_kps,)
|
244 |
+
|
245 |
+
def add_noise(image, factor):
|
246 |
+
seed = int(torch.sum(image).item()) % 1000000007
|
247 |
+
torch.manual_seed(seed)
|
248 |
+
mask = (torch.rand_like(image) < factor).float()
|
249 |
+
noise = torch.rand_like(image)
|
250 |
+
noise = torch.zeros_like(image) * (1-mask) + noise * mask
|
251 |
+
|
252 |
+
return factor*noise
|
253 |
+
|
254 |
+
class ApplyInstantID:
|
255 |
+
@classmethod
|
256 |
+
def INPUT_TYPES(s):
|
257 |
+
return {
|
258 |
+
"required": {
|
259 |
+
"instantid": ("INSTANTID", ),
|
260 |
+
"insightface": ("FACEANALYSIS", ),
|
261 |
+
"control_net": ("CONTROL_NET", ),
|
262 |
+
"image": ("IMAGE", ),
|
263 |
+
"model": ("MODEL", ),
|
264 |
+
"positive": ("CONDITIONING", ),
|
265 |
+
"negative": ("CONDITIONING", ),
|
266 |
+
"weight": ("FLOAT", {"default": .8, "min": 0.0, "max": 5.0, "step": 0.01, }),
|
267 |
+
"start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
268 |
+
"end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
269 |
+
},
|
270 |
+
"optional": {
|
271 |
+
"image_kps": ("IMAGE",),
|
272 |
+
"mask": ("MASK",),
|
273 |
+
}
|
274 |
+
}
|
275 |
+
|
276 |
+
RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING",)
|
277 |
+
RETURN_NAMES = ("MODEL", "positive", "negative", )
|
278 |
+
FUNCTION = "apply_instantid"
|
279 |
+
CATEGORY = "InstantID"
|
280 |
+
|
281 |
+
def apply_instantid(self, instantid, insightface, control_net, image, model, positive, negative, start_at, end_at, weight=.8, ip_weight=None, cn_strength=None, noise=0.35, image_kps=None, mask=None, combine_embeds='average'):
|
282 |
+
self.dtype = torch.float16 if comfy.model_management.should_use_fp16() else torch.float32
|
283 |
+
self.device = comfy.model_management.get_torch_device()
|
284 |
+
|
285 |
+
ip_weight = weight if ip_weight is None else ip_weight
|
286 |
+
cn_strength = weight if cn_strength is None else cn_strength
|
287 |
+
|
288 |
+
face_embed = extractFeatures(insightface, image)
|
289 |
+
if face_embed is None:
|
290 |
+
raise Exception('Reference Image: No face detected.')
|
291 |
+
|
292 |
+
# if no keypoints image is provided, use the image itself (only the first one in the batch)
|
293 |
+
face_kps = extractFeatures(insightface, image_kps if image_kps is not None else image[0].unsqueeze(0), extract_kps=True)
|
294 |
+
|
295 |
+
if face_kps is None:
|
296 |
+
face_kps = torch.zeros_like(image) if image_kps is None else image_kps
|
297 |
+
print(f"\033[33mWARNING: No face detected in the keypoints image!\033[0m")
|
298 |
+
|
299 |
+
clip_embed = face_embed
|
300 |
+
# InstantID works better with averaged embeds (TODO: needs testing)
|
301 |
+
if clip_embed.shape[0] > 1:
|
302 |
+
if combine_embeds == 'average':
|
303 |
+
clip_embed = torch.mean(clip_embed, dim=0).unsqueeze(0)
|
304 |
+
elif combine_embeds == 'norm average':
|
305 |
+
clip_embed = torch.mean(clip_embed / torch.norm(clip_embed, dim=0, keepdim=True), dim=0).unsqueeze(0)
|
306 |
+
|
307 |
+
if noise > 0:
|
308 |
+
seed = int(torch.sum(clip_embed).item()) % 1000000007
|
309 |
+
torch.manual_seed(seed)
|
310 |
+
clip_embed_zeroed = noise * torch.rand_like(clip_embed)
|
311 |
+
#clip_embed_zeroed = add_noise(clip_embed, noise)
|
312 |
+
else:
|
313 |
+
clip_embed_zeroed = torch.zeros_like(clip_embed)
|
314 |
+
|
315 |
+
# 1: patch the attention
|
316 |
+
self.instantid = instantid
|
317 |
+
self.instantid.to(self.device, dtype=self.dtype)
|
318 |
+
|
319 |
+
image_prompt_embeds, uncond_image_prompt_embeds = self.instantid.get_image_embeds(clip_embed.to(self.device, dtype=self.dtype), clip_embed_zeroed.to(self.device, dtype=self.dtype))
|
320 |
+
|
321 |
+
image_prompt_embeds = image_prompt_embeds.to(self.device, dtype=self.dtype)
|
322 |
+
uncond_image_prompt_embeds = uncond_image_prompt_embeds.to(self.device, dtype=self.dtype)
|
323 |
+
|
324 |
+
work_model = model.clone()
|
325 |
+
|
326 |
+
sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at)
|
327 |
+
sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at)
|
328 |
+
|
329 |
+
if mask is not None:
|
330 |
+
mask = mask.to(self.device)
|
331 |
+
|
332 |
+
patch_kwargs = {
|
333 |
+
"ipadapter": self.instantid,
|
334 |
+
"weight": ip_weight,
|
335 |
+
"cond": image_prompt_embeds,
|
336 |
+
"uncond": uncond_image_prompt_embeds,
|
337 |
+
"mask": mask,
|
338 |
+
"sigma_start": sigma_start,
|
339 |
+
"sigma_end": sigma_end,
|
340 |
+
}
|
341 |
+
|
342 |
+
number = 0
|
343 |
+
for id in [4,5,7,8]: # id of input_blocks that have cross attention
|
344 |
+
block_indices = range(2) if id in [4, 5] else range(10) # transformer_depth
|
345 |
+
for index in block_indices:
|
346 |
+
patch_kwargs["module_key"] = str(number*2+1)
|
347 |
+
_set_model_patch_replace(work_model, patch_kwargs, ("input", id, index))
|
348 |
+
number += 1
|
349 |
+
for id in range(6): # id of output_blocks that have cross attention
|
350 |
+
block_indices = range(2) if id in [3, 4, 5] else range(10) # transformer_depth
|
351 |
+
for index in block_indices:
|
352 |
+
patch_kwargs["module_key"] = str(number*2+1)
|
353 |
+
_set_model_patch_replace(work_model, patch_kwargs, ("output", id, index))
|
354 |
+
number += 1
|
355 |
+
for index in range(10):
|
356 |
+
patch_kwargs["module_key"] = str(number*2+1)
|
357 |
+
_set_model_patch_replace(work_model, patch_kwargs, ("middle", 0, index))
|
358 |
+
number += 1
|
359 |
+
|
360 |
+
# 2: do the ControlNet
|
361 |
+
if mask is not None and len(mask.shape) < 3:
|
362 |
+
mask = mask.unsqueeze(0)
|
363 |
+
|
364 |
+
cnets = {}
|
365 |
+
cond_uncond = []
|
366 |
+
|
367 |
+
is_cond = True
|
368 |
+
for conditioning in [positive, negative]:
|
369 |
+
c = []
|
370 |
+
for t in conditioning:
|
371 |
+
d = t[1].copy()
|
372 |
+
|
373 |
+
prev_cnet = d.get('control', None)
|
374 |
+
if prev_cnet in cnets:
|
375 |
+
c_net = cnets[prev_cnet]
|
376 |
+
else:
|
377 |
+
c_net = control_net.copy().set_cond_hint(face_kps.movedim(-1,1), cn_strength, (start_at, end_at))
|
378 |
+
c_net.set_previous_controlnet(prev_cnet)
|
379 |
+
cnets[prev_cnet] = c_net
|
380 |
+
|
381 |
+
d['control'] = c_net
|
382 |
+
d['control_apply_to_uncond'] = False
|
383 |
+
d['cross_attn_controlnet'] = image_prompt_embeds.to(comfy.model_management.intermediate_device()) if is_cond else uncond_image_prompt_embeds.to(comfy.model_management.intermediate_device())
|
384 |
+
|
385 |
+
if mask is not None and is_cond:
|
386 |
+
d['mask'] = mask
|
387 |
+
d['set_area_to_bounds'] = False
|
388 |
+
|
389 |
+
n = [t[0], d]
|
390 |
+
c.append(n)
|
391 |
+
cond_uncond.append(c)
|
392 |
+
is_cond = False
|
393 |
+
|
394 |
+
return(work_model, cond_uncond[0], cond_uncond[1], )
|
395 |
+
|
396 |
+
class ApplyInstantIDAdvanced(ApplyInstantID):
|
397 |
+
@classmethod
|
398 |
+
def INPUT_TYPES(s):
|
399 |
+
return {
|
400 |
+
"required": {
|
401 |
+
"instantid": ("INSTANTID", ),
|
402 |
+
"insightface": ("FACEANALYSIS", ),
|
403 |
+
"control_net": ("CONTROL_NET", ),
|
404 |
+
"image": ("IMAGE", ),
|
405 |
+
"model": ("MODEL", ),
|
406 |
+
"positive": ("CONDITIONING", ),
|
407 |
+
"negative": ("CONDITIONING", ),
|
408 |
+
"ip_weight": ("FLOAT", {"default": .8, "min": 0.0, "max": 3.0, "step": 0.01, }),
|
409 |
+
"cn_strength": ("FLOAT", {"default": .8, "min": 0.0, "max": 10.0, "step": 0.01, }),
|
410 |
+
"start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
411 |
+
"end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
412 |
+
"noise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.1, }),
|
413 |
+
"combine_embeds": (['average', 'norm average', 'concat'], {"default": 'average'}),
|
414 |
+
},
|
415 |
+
"optional": {
|
416 |
+
"image_kps": ("IMAGE",),
|
417 |
+
"mask": ("MASK",),
|
418 |
+
}
|
419 |
+
}
|
420 |
+
|
421 |
+
class InstantIDAttentionPatch:
|
422 |
+
@classmethod
|
423 |
+
def INPUT_TYPES(s):
|
424 |
+
return {
|
425 |
+
"required": {
|
426 |
+
"instantid": ("INSTANTID", ),
|
427 |
+
"insightface": ("FACEANALYSIS", ),
|
428 |
+
"image": ("IMAGE", ),
|
429 |
+
"model": ("MODEL", ),
|
430 |
+
"weight": ("FLOAT", {"default": 1.0, "min": -1.0, "max": 3.0, "step": 0.01, }),
|
431 |
+
"start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
432 |
+
"end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
433 |
+
"noise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.1, }),
|
434 |
+
},
|
435 |
+
"optional": {
|
436 |
+
"mask": ("MASK",),
|
437 |
+
}
|
438 |
+
}
|
439 |
+
|
440 |
+
RETURN_TYPES = ("MODEL", "FACE_EMBEDS")
|
441 |
+
FUNCTION = "patch_attention"
|
442 |
+
CATEGORY = "InstantID"
|
443 |
+
|
444 |
+
def patch_attention(self, instantid, insightface, image, model, weight, start_at, end_at, noise=0.0, mask=None):
|
445 |
+
self.dtype = torch.float16 if comfy.model_management.should_use_fp16() else torch.float32
|
446 |
+
self.device = comfy.model_management.get_torch_device()
|
447 |
+
|
448 |
+
face_embed = extractFeatures(insightface, image)
|
449 |
+
if face_embed is None:
|
450 |
+
raise Exception('Reference Image: No face detected.')
|
451 |
+
|
452 |
+
clip_embed = face_embed
|
453 |
+
# InstantID works better with averaged embeds (TODO: needs testing)
|
454 |
+
if clip_embed.shape[0] > 1:
|
455 |
+
clip_embed = torch.mean(clip_embed, dim=0).unsqueeze(0)
|
456 |
+
|
457 |
+
if noise > 0:
|
458 |
+
seed = int(torch.sum(clip_embed).item()) % 1000000007
|
459 |
+
torch.manual_seed(seed)
|
460 |
+
clip_embed_zeroed = noise * torch.rand_like(clip_embed)
|
461 |
+
else:
|
462 |
+
clip_embed_zeroed = torch.zeros_like(clip_embed)
|
463 |
+
|
464 |
+
# 1: patch the attention
|
465 |
+
self.instantid = instantid
|
466 |
+
self.instantid.to(self.device, dtype=self.dtype)
|
467 |
+
|
468 |
+
image_prompt_embeds, uncond_image_prompt_embeds = self.instantid.get_image_embeds(clip_embed.to(self.device, dtype=self.dtype), clip_embed_zeroed.to(self.device, dtype=self.dtype))
|
469 |
+
|
470 |
+
image_prompt_embeds = image_prompt_embeds.to(self.device, dtype=self.dtype)
|
471 |
+
uncond_image_prompt_embeds = uncond_image_prompt_embeds.to(self.device, dtype=self.dtype)
|
472 |
+
|
473 |
+
if weight == 0:
|
474 |
+
return (model, { "cond": image_prompt_embeds, "uncond": uncond_image_prompt_embeds } )
|
475 |
+
|
476 |
+
work_model = model.clone()
|
477 |
+
|
478 |
+
sigma_start = model.get_model_object("model_sampling").percent_to_sigma(start_at)
|
479 |
+
sigma_end = model.get_model_object("model_sampling").percent_to_sigma(end_at)
|
480 |
+
|
481 |
+
if mask is not None:
|
482 |
+
mask = mask.to(self.device)
|
483 |
+
|
484 |
+
patch_kwargs = {
|
485 |
+
"weight": weight,
|
486 |
+
"ipadapter": self.instantid,
|
487 |
+
"cond": image_prompt_embeds,
|
488 |
+
"uncond": uncond_image_prompt_embeds,
|
489 |
+
"mask": mask,
|
490 |
+
"sigma_start": sigma_start,
|
491 |
+
"sigma_end": sigma_end,
|
492 |
+
}
|
493 |
+
|
494 |
+
number = 0
|
495 |
+
for id in [4,5,7,8]: # id of input_blocks that have cross attention
|
496 |
+
block_indices = range(2) if id in [4, 5] else range(10) # transformer_depth
|
497 |
+
for index in block_indices:
|
498 |
+
patch_kwargs["module_key"] = str(number*2+1)
|
499 |
+
_set_model_patch_replace(work_model, patch_kwargs, ("input", id, index))
|
500 |
+
number += 1
|
501 |
+
for id in range(6): # id of output_blocks that have cross attention
|
502 |
+
block_indices = range(2) if id in [3, 4, 5] else range(10) # transformer_depth
|
503 |
+
for index in block_indices:
|
504 |
+
patch_kwargs["module_key"] = str(number*2+1)
|
505 |
+
_set_model_patch_replace(work_model, patch_kwargs, ("output", id, index))
|
506 |
+
number += 1
|
507 |
+
for index in range(10):
|
508 |
+
patch_kwargs["module_key"] = str(number*2+1)
|
509 |
+
_set_model_patch_replace(work_model, patch_kwargs, ("middle", 0, index))
|
510 |
+
number += 1
|
511 |
+
|
512 |
+
return(work_model, { "cond": image_prompt_embeds, "uncond": uncond_image_prompt_embeds }, )
|
513 |
+
|
514 |
+
class ApplyInstantIDControlNet:
|
515 |
+
@classmethod
|
516 |
+
def INPUT_TYPES(s):
|
517 |
+
return {
|
518 |
+
"required": {
|
519 |
+
"face_embeds": ("FACE_EMBEDS", ),
|
520 |
+
"control_net": ("CONTROL_NET", ),
|
521 |
+
"image_kps": ("IMAGE", ),
|
522 |
+
"positive": ("CONDITIONING", ),
|
523 |
+
"negative": ("CONDITIONING", ),
|
524 |
+
"strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01, }),
|
525 |
+
"start_at": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
526 |
+
"end_at": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001, }),
|
527 |
+
},
|
528 |
+
"optional": {
|
529 |
+
"mask": ("MASK",),
|
530 |
+
}
|
531 |
+
}
|
532 |
+
|
533 |
+
RETURN_TYPES = ("CONDITIONING", "CONDITIONING",)
|
534 |
+
RETURN_NAMES = ("positive", "negative", )
|
535 |
+
FUNCTION = "apply_controlnet"
|
536 |
+
CATEGORY = "InstantID"
|
537 |
+
|
538 |
+
def apply_controlnet(self, face_embeds, control_net, image_kps, positive, negative, strength, start_at, end_at, mask=None):
|
539 |
+
self.device = comfy.model_management.get_torch_device()
|
540 |
+
|
541 |
+
if strength == 0:
|
542 |
+
return (positive, negative)
|
543 |
+
|
544 |
+
if mask is not None:
|
545 |
+
mask = mask.to(self.device)
|
546 |
+
|
547 |
+
if mask is not None and len(mask.shape) < 3:
|
548 |
+
mask = mask.unsqueeze(0)
|
549 |
+
|
550 |
+
image_prompt_embeds = face_embeds['cond']
|
551 |
+
uncond_image_prompt_embeds = face_embeds['uncond']
|
552 |
+
|
553 |
+
cnets = {}
|
554 |
+
cond_uncond = []
|
555 |
+
control_hint = image_kps.movedim(-1,1)
|
556 |
+
|
557 |
+
is_cond = True
|
558 |
+
for conditioning in [positive, negative]:
|
559 |
+
c = []
|
560 |
+
for t in conditioning:
|
561 |
+
d = t[1].copy()
|
562 |
+
|
563 |
+
prev_cnet = d.get('control', None)
|
564 |
+
if prev_cnet in cnets:
|
565 |
+
c_net = cnets[prev_cnet]
|
566 |
+
else:
|
567 |
+
c_net = control_net.copy().set_cond_hint(control_hint, strength, (start_at, end_at))
|
568 |
+
c_net.set_previous_controlnet(prev_cnet)
|
569 |
+
cnets[prev_cnet] = c_net
|
570 |
+
|
571 |
+
d['control'] = c_net
|
572 |
+
d['control_apply_to_uncond'] = False
|
573 |
+
d['cross_attn_controlnet'] = image_prompt_embeds.to(comfy.model_management.intermediate_device()) if is_cond else uncond_image_prompt_embeds.to(comfy.model_management.intermediate_device())
|
574 |
+
|
575 |
+
if mask is not None and is_cond:
|
576 |
+
d['mask'] = mask
|
577 |
+
d['set_area_to_bounds'] = False
|
578 |
+
|
579 |
+
n = [t[0], d]
|
580 |
+
c.append(n)
|
581 |
+
cond_uncond.append(c)
|
582 |
+
is_cond = False
|
583 |
+
|
584 |
+
return(cond_uncond[0], cond_uncond[1])
|
585 |
+
|
586 |
+
|
587 |
+
NODE_CLASS_MAPPINGS = {
|
588 |
+
"InstantIDModelLoader": InstantIDModelLoader,
|
589 |
+
"InstantIDFaceAnalysis": InstantIDFaceAnalysis,
|
590 |
+
"ApplyInstantID": ApplyInstantID,
|
591 |
+
"ApplyInstantIDAdvanced": ApplyInstantIDAdvanced,
|
592 |
+
"FaceKeypointsPreprocessor": FaceKeypointsPreprocessor,
|
593 |
+
|
594 |
+
"InstantIDAttentionPatch": InstantIDAttentionPatch,
|
595 |
+
"ApplyInstantIDControlNet": ApplyInstantIDControlNet,
|
596 |
+
}
|
597 |
+
|
598 |
+
NODE_DISPLAY_NAME_MAPPINGS = {
|
599 |
+
"InstantIDModelLoader": "Load InstantID Model",
|
600 |
+
"InstantIDFaceAnalysis": "InstantID Face Analysis",
|
601 |
+
"ApplyInstantID": "Apply InstantID",
|
602 |
+
"ApplyInstantIDAdvanced": "Apply InstantID Advanced",
|
603 |
+
"FaceKeypointsPreprocessor": "Face Keypoints Preprocessor",
|
604 |
+
|
605 |
+
"InstantIDAttentionPatch": "InstantID Patch Attention",
|
606 |
+
"ApplyInstantIDControlNet": "InstantID Apply ControlNet",
|
607 |
+
}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright [yyyy] [name of copyright owner]
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
ComfyUI/custom_nodes/ComfyUI_InstantID/README.md
ADDED
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# ComfyUI InstantID (Native Support)
|
2 |
+
|
3 |
+
Native [InstantID](https://github.com/InstantID/InstantID) support for [ComfyUI](https://github.com/comfyanonymous/ComfyUI).
|
4 |
+
|
5 |
+
This extension differs from the many already available as it doesn't use *diffusers* but instead implements InstantID natively and it fully integrates with ComfyUI.
|
6 |
+
|
7 |
+
# Sponsorship
|
8 |
+
|
9 |
+
<div align="center">
|
10 |
+
|
11 |
+
**[:heart: Github Sponsor](https://github.com/sponsors/cubiq) | [:coin: Paypal](https://paypal.me/matt3o)**
|
12 |
+
|
13 |
+
</div>
|
14 |
+
|
15 |
+
If you like my work and wish to see updates and new features please consider sponsoring my projects.
|
16 |
+
|
17 |
+
- [ComfyUI IPAdapter Plus](https://github.com/cubiq/ComfyUI_IPAdapter_plus)
|
18 |
+
- [ComfyUI InstantID (Native)](https://github.com/cubiq/ComfyUI_InstantID)
|
19 |
+
- [ComfyUI Essentials](https://github.com/cubiq/ComfyUI_essentials)
|
20 |
+
- [ComfyUI FaceAnalysis](https://github.com/cubiq/ComfyUI_FaceAnalysis)
|
21 |
+
- [Comfy Dungeon](https://github.com/cubiq/Comfy_Dungeon)
|
22 |
+
|
23 |
+
Not to mention the documentation and videos tutorials. Check my **ComfyUI Advanced Understanding** videos on YouTube for example, [part 1](https://www.youtube.com/watch?v=_C7kR2TFIX0) and [part 2](https://www.youtube.com/watch?v=ijqXnW_9gzc)
|
24 |
+
|
25 |
+
The only way to keep the code open and free is by sponsoring its development. The more sponsorships the more time I can dedicate to my open source projects.
|
26 |
+
|
27 |
+
Please consider a [Github Sponsorship](https://github.com/sponsors/cubiq) or [PayPal donation](https://paypal.me/matt3o) (Matteo "matt3o" Spinelli). For sponsorships of $50+, let me know if you'd like to be mentioned in this readme file, you can find me on [Discord](https://latent.vision/discord) or _matt3o :snail: gmail.com_.
|
28 |
+
|
29 |
+
## Important updates
|
30 |
+
|
31 |
+
- **2024/02/27:** Added [noise injection](#noise-injection) in the negative embeds.
|
32 |
+
|
33 |
+
- **2024/02/26:** Fixed a small but nasty bug. Results will be different and you may need to lower the CFG.
|
34 |
+
|
35 |
+
- **2024/02/20:** I refactored the nodes so they are hopefully easier to use. **This is a breaking update**, the previous workflows won't work anymore.
|
36 |
+
|
37 |
+
## Basic Workflow
|
38 |
+
|
39 |
+
In the `examples` directory you'll find some basic workflows.
|
40 |
+
|
41 |
+

|
42 |
+
|
43 |
+
## Video Tutorial
|
44 |
+
|
45 |
+
<a href="https://youtu.be/wMLiGhogOPE" target="_blank">
|
46 |
+
<img src="https://img.youtube.com/vi/wMLiGhogOPE/hqdefault.jpg" alt="Watch the video" />
|
47 |
+
</a>
|
48 |
+
|
49 |
+
** :movie_camera: [Introduction to InstantID features](https://youtu.be/wMLiGhogOPE)**
|
50 |
+
|
51 |
+
## Installation
|
52 |
+
|
53 |
+
**Upgrade ComfyUI to the latest version!**
|
54 |
+
|
55 |
+
Download or `git clone` this repository into the `ComfyUI/custom_nodes/` directory or use the Manager.
|
56 |
+
|
57 |
+
InstantID requires `insightface`, you need to add it to your libraries together with `onnxruntime` and `onnxruntime-gpu`.
|
58 |
+
|
59 |
+
The InsightFace model is **antelopev2** (not the classic buffalo_l). Download the models (for example from [here](https://drive.google.com/file/d/18wEUfMNohBJ4K3Ly5wpTejPfDzp-8fI8/view?usp=sharing) or [here](https://huggingface.co/MonsterMMORPG/tools/tree/main)), unzip and place them in the `ComfyUI/models/insightface/models/antelopev2` directory.
|
60 |
+
|
61 |
+
The **main model** can be downloaded from [HuggingFace](https://huggingface.co/InstantX/InstantID/resolve/main/ip-adapter.bin?download=true) and should be placed into the `ComfyUI/models/instantid` directory. (Note that the model is called *ip_adapter* as it is based on the [IPAdapter](https://github.com/tencent-ailab/IP-Adapter)).
|
62 |
+
|
63 |
+
You also needs a [controlnet](https://huggingface.co/InstantX/InstantID/resolve/main/ControlNetModel/diffusion_pytorch_model.safetensors?download=true), place it in the ComfyUI controlnet directory.
|
64 |
+
|
65 |
+
**Remember at the moment this is only for SDXL.**
|
66 |
+
|
67 |
+
## Watermarks!
|
68 |
+
|
69 |
+
The training data is full of watermarks, to avoid them to show up in your generations use a resolution slightly different from 1024×1024 (or the standard ones) for example **1016×1016** works pretty well.
|
70 |
+
|
71 |
+
## Lower the CFG!
|
72 |
+
|
73 |
+
It's important to lower the CFG to at least 4/5 or you can use the `RescaleCFG` node.
|
74 |
+
|
75 |
+
## Face keypoints
|
76 |
+
|
77 |
+
The person is posed based on the keypoints generated from the reference image. You can use a different pose by sending an image to the `image_kps` input.
|
78 |
+
|
79 |
+
<img src="examples/daydreaming.jpg" width="386" height="386" alt="Day Dreaming" />
|
80 |
+
|
81 |
+
## Noise Injection
|
82 |
+
|
83 |
+
The default InstantID implementation seems to really burn the image, I find that by injecting noise to the negative embeds we can mitigate the effect and also increase the likeliness to the reference. The default Apply InstantID node automatically injects 35% noise, if you want to fine tune the effect you can use the Advanced InstantID node.
|
84 |
+
|
85 |
+
This is still experimental and may change in the future.
|
86 |
+
|
87 |
+
## Additional Controlnets
|
88 |
+
|
89 |
+
You can add more controlnets to the generation. An example workflow for depth controlnet is provided.
|
90 |
+
|
91 |
+
## Styling with IPAdapter
|
92 |
+
|
93 |
+
It's possible to style the composition with IPAdapter. An example is provided.
|
94 |
+
|
95 |
+
<img src="examples/instant_id_ipadapter.jpg" width="512" alt="IPAdapter" />
|
96 |
+
|
97 |
+
## Multi-ID
|
98 |
+
|
99 |
+
Multi-ID is supported but the workflow is a bit complicated and the generation slower. I'll check if I can find a better way of doing it. The "hackish" workflow is provided in the example directory.
|
100 |
+
|
101 |
+
<img src="examples/instantid_multi_id.jpg" width="768" alt="IPAdapter" />
|
102 |
+
|
103 |
+
## Advanced Node
|
104 |
+
|
105 |
+
There's an InstantID advanced node available, at the moment the only difference with the standard one is that you can set the weights for the instantID models and the controlnet separately. It now also includes a noise injection option. It might be helpful for finetuning.
|
106 |
+
|
107 |
+
The instantID model influences the composition of about 25%, the rest is the controlnet.
|
108 |
+
|
109 |
+
The noise helps reducing the "burn" effect.
|
110 |
+
|
111 |
+
## Other notes
|
112 |
+
|
113 |
+
It works very well with SDXL Turbo/Lighting. Best results with community's checkpoints.
|
114 |
+
|
115 |
+
## Current sponsors
|
116 |
+
|
117 |
+
It's only thanks to generous sponsors that **the whole community** can enjoy open and free software. Please join me in thanking the following companies and individuals!
|
118 |
+
|
119 |
+
### Gold sponsors
|
120 |
+
|
121 |
+
[](https://kaiber.ai/) [](https://replicate.com/)
|
122 |
+
|
123 |
+
### Companies supporting my projects
|
124 |
+
|
125 |
+
- [RunComfy](https://www.runcomfy.com/) (ComfyUI Cloud)
|
126 |
+
|
127 |
+
### Esteemed individuals
|
128 |
+
|
129 |
+
- [Jack Gane](https://github.com/ganeJackS)
|
130 |
+
- [Nathan Shipley](https://www.nathanshipley.com/)
|
131 |
+
|
132 |
+
### One-time Extraordinaire
|
133 |
+
|
134 |
+
- [Eric Rollei](https://github.com/EricRollei)
|
135 |
+
- [francaleu](https://github.com/francaleu)
|
136 |
+
- [Neta.art](https://github.com/talesofai)
|
137 |
+
- [Samwise Wang](https://github.com/tzwm)
|
138 |
+
- _And all private sponsors, you know who you are!_
|
ComfyUI/custom_nodes/ComfyUI_InstantID/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from .InstantID import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
2 |
+
|
3 |
+
__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS']
|
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_IPAdapter.json
ADDED
@@ -0,0 +1,861 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"last_node_id": 72,
|
3 |
+
"last_link_id": 231,
|
4 |
+
"nodes": [
|
5 |
+
{
|
6 |
+
"id": 11,
|
7 |
+
"type": "InstantIDModelLoader",
|
8 |
+
"pos": [
|
9 |
+
560,
|
10 |
+
70
|
11 |
+
],
|
12 |
+
"size": {
|
13 |
+
"0": 238.72393798828125,
|
14 |
+
"1": 58
|
15 |
+
},
|
16 |
+
"flags": {},
|
17 |
+
"order": 0,
|
18 |
+
"mode": 0,
|
19 |
+
"outputs": [
|
20 |
+
{
|
21 |
+
"name": "INSTANTID",
|
22 |
+
"type": "INSTANTID",
|
23 |
+
"links": [
|
24 |
+
197
|
25 |
+
],
|
26 |
+
"shape": 3,
|
27 |
+
"slot_index": 0
|
28 |
+
}
|
29 |
+
],
|
30 |
+
"properties": {
|
31 |
+
"Node name for S&R": "InstantIDModelLoader"
|
32 |
+
},
|
33 |
+
"widgets_values": [
|
34 |
+
"ip-adapter.bin"
|
35 |
+
]
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"id": 38,
|
39 |
+
"type": "InstantIDFaceAnalysis",
|
40 |
+
"pos": [
|
41 |
+
570,
|
42 |
+
180
|
43 |
+
],
|
44 |
+
"size": {
|
45 |
+
"0": 227.09793090820312,
|
46 |
+
"1": 58
|
47 |
+
},
|
48 |
+
"flags": {},
|
49 |
+
"order": 1,
|
50 |
+
"mode": 0,
|
51 |
+
"outputs": [
|
52 |
+
{
|
53 |
+
"name": "FACEANALYSIS",
|
54 |
+
"type": "FACEANALYSIS",
|
55 |
+
"links": [
|
56 |
+
198
|
57 |
+
],
|
58 |
+
"shape": 3,
|
59 |
+
"slot_index": 0
|
60 |
+
}
|
61 |
+
],
|
62 |
+
"properties": {
|
63 |
+
"Node name for S&R": "InstantIDFaceAnalysis"
|
64 |
+
},
|
65 |
+
"widgets_values": [
|
66 |
+
"CPU"
|
67 |
+
]
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"id": 16,
|
71 |
+
"type": "ControlNetLoader",
|
72 |
+
"pos": [
|
73 |
+
560,
|
74 |
+
290
|
75 |
+
],
|
76 |
+
"size": {
|
77 |
+
"0": 250.07241821289062,
|
78 |
+
"1": 58
|
79 |
+
},
|
80 |
+
"flags": {},
|
81 |
+
"order": 2,
|
82 |
+
"mode": 0,
|
83 |
+
"outputs": [
|
84 |
+
{
|
85 |
+
"name": "CONTROL_NET",
|
86 |
+
"type": "CONTROL_NET",
|
87 |
+
"links": [
|
88 |
+
199
|
89 |
+
],
|
90 |
+
"shape": 3,
|
91 |
+
"slot_index": 0
|
92 |
+
}
|
93 |
+
],
|
94 |
+
"properties": {
|
95 |
+
"Node name for S&R": "ControlNetLoader"
|
96 |
+
},
|
97 |
+
"widgets_values": [
|
98 |
+
"instantid/diffusion_pytorch_model.safetensors"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"id": 15,
|
103 |
+
"type": "PreviewImage",
|
104 |
+
"pos": [
|
105 |
+
1910,
|
106 |
+
290
|
107 |
+
],
|
108 |
+
"size": {
|
109 |
+
"0": 584.0855712890625,
|
110 |
+
"1": 610.4592895507812
|
111 |
+
},
|
112 |
+
"flags": {},
|
113 |
+
"order": 15,
|
114 |
+
"mode": 0,
|
115 |
+
"inputs": [
|
116 |
+
{
|
117 |
+
"name": "images",
|
118 |
+
"type": "IMAGE",
|
119 |
+
"link": 19
|
120 |
+
}
|
121 |
+
],
|
122 |
+
"properties": {
|
123 |
+
"Node name for S&R": "PreviewImage"
|
124 |
+
}
|
125 |
+
},
|
126 |
+
{
|
127 |
+
"id": 5,
|
128 |
+
"type": "EmptyLatentImage",
|
129 |
+
"pos": [
|
130 |
+
910,
|
131 |
+
540
|
132 |
+
],
|
133 |
+
"size": {
|
134 |
+
"0": 315,
|
135 |
+
"1": 106
|
136 |
+
},
|
137 |
+
"flags": {},
|
138 |
+
"order": 3,
|
139 |
+
"mode": 0,
|
140 |
+
"outputs": [
|
141 |
+
{
|
142 |
+
"name": "LATENT",
|
143 |
+
"type": "LATENT",
|
144 |
+
"links": [
|
145 |
+
2
|
146 |
+
],
|
147 |
+
"slot_index": 0
|
148 |
+
}
|
149 |
+
],
|
150 |
+
"properties": {
|
151 |
+
"Node name for S&R": "EmptyLatentImage"
|
152 |
+
},
|
153 |
+
"widgets_values": [
|
154 |
+
1016,
|
155 |
+
1016,
|
156 |
+
1
|
157 |
+
]
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"id": 8,
|
161 |
+
"type": "VAEDecode",
|
162 |
+
"pos": [
|
163 |
+
1910,
|
164 |
+
200
|
165 |
+
],
|
166 |
+
"size": {
|
167 |
+
"0": 210,
|
168 |
+
"1": 46
|
169 |
+
},
|
170 |
+
"flags": {},
|
171 |
+
"order": 14,
|
172 |
+
"mode": 0,
|
173 |
+
"inputs": [
|
174 |
+
{
|
175 |
+
"name": "samples",
|
176 |
+
"type": "LATENT",
|
177 |
+
"link": 7
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"name": "vae",
|
181 |
+
"type": "VAE",
|
182 |
+
"link": 8
|
183 |
+
}
|
184 |
+
],
|
185 |
+
"outputs": [
|
186 |
+
{
|
187 |
+
"name": "IMAGE",
|
188 |
+
"type": "IMAGE",
|
189 |
+
"links": [
|
190 |
+
19
|
191 |
+
],
|
192 |
+
"slot_index": 0
|
193 |
+
}
|
194 |
+
],
|
195 |
+
"properties": {
|
196 |
+
"Node name for S&R": "VAEDecode"
|
197 |
+
}
|
198 |
+
},
|
199 |
+
{
|
200 |
+
"id": 39,
|
201 |
+
"type": "CLIPTextEncode",
|
202 |
+
"pos": [
|
203 |
+
520,
|
204 |
+
430
|
205 |
+
],
|
206 |
+
"size": {
|
207 |
+
"0": 291.9967346191406,
|
208 |
+
"1": 128.62518310546875
|
209 |
+
},
|
210 |
+
"flags": {},
|
211 |
+
"order": 9,
|
212 |
+
"mode": 0,
|
213 |
+
"inputs": [
|
214 |
+
{
|
215 |
+
"name": "clip",
|
216 |
+
"type": "CLIP",
|
217 |
+
"link": 122
|
218 |
+
}
|
219 |
+
],
|
220 |
+
"outputs": [
|
221 |
+
{
|
222 |
+
"name": "CONDITIONING",
|
223 |
+
"type": "CONDITIONING",
|
224 |
+
"links": [
|
225 |
+
203
|
226 |
+
],
|
227 |
+
"shape": 3,
|
228 |
+
"slot_index": 0
|
229 |
+
}
|
230 |
+
],
|
231 |
+
"properties": {
|
232 |
+
"Node name for S&R": "CLIPTextEncode"
|
233 |
+
},
|
234 |
+
"widgets_values": [
|
235 |
+
"comic character. graphic illustration, comic art, graphic novel art, vibrant, highly detailed"
|
236 |
+
]
|
237 |
+
},
|
238 |
+
{
|
239 |
+
"id": 40,
|
240 |
+
"type": "CLIPTextEncode",
|
241 |
+
"pos": [
|
242 |
+
520,
|
243 |
+
620
|
244 |
+
],
|
245 |
+
"size": {
|
246 |
+
"0": 286.3603515625,
|
247 |
+
"1": 112.35245513916016
|
248 |
+
},
|
249 |
+
"flags": {},
|
250 |
+
"order": 10,
|
251 |
+
"mode": 0,
|
252 |
+
"inputs": [
|
253 |
+
{
|
254 |
+
"name": "clip",
|
255 |
+
"type": "CLIP",
|
256 |
+
"link": 123
|
257 |
+
}
|
258 |
+
],
|
259 |
+
"outputs": [
|
260 |
+
{
|
261 |
+
"name": "CONDITIONING",
|
262 |
+
"type": "CONDITIONING",
|
263 |
+
"links": [
|
264 |
+
204
|
265 |
+
],
|
266 |
+
"shape": 3,
|
267 |
+
"slot_index": 0
|
268 |
+
}
|
269 |
+
],
|
270 |
+
"properties": {
|
271 |
+
"Node name for S&R": "CLIPTextEncode"
|
272 |
+
},
|
273 |
+
"widgets_values": [
|
274 |
+
"photograph, deformed, glitch, noisy, realistic, stock photo"
|
275 |
+
]
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"id": 4,
|
279 |
+
"type": "CheckpointLoaderSimple",
|
280 |
+
"pos": [
|
281 |
+
70,
|
282 |
+
520
|
283 |
+
],
|
284 |
+
"size": {
|
285 |
+
"0": 315,
|
286 |
+
"1": 98
|
287 |
+
},
|
288 |
+
"flags": {},
|
289 |
+
"order": 4,
|
290 |
+
"mode": 0,
|
291 |
+
"outputs": [
|
292 |
+
{
|
293 |
+
"name": "MODEL",
|
294 |
+
"type": "MODEL",
|
295 |
+
"links": [
|
296 |
+
206
|
297 |
+
],
|
298 |
+
"slot_index": 0
|
299 |
+
},
|
300 |
+
{
|
301 |
+
"name": "CLIP",
|
302 |
+
"type": "CLIP",
|
303 |
+
"links": [
|
304 |
+
122,
|
305 |
+
123
|
306 |
+
],
|
307 |
+
"slot_index": 1
|
308 |
+
},
|
309 |
+
{
|
310 |
+
"name": "VAE",
|
311 |
+
"type": "VAE",
|
312 |
+
"links": [
|
313 |
+
8
|
314 |
+
],
|
315 |
+
"slot_index": 2
|
316 |
+
}
|
317 |
+
],
|
318 |
+
"properties": {
|
319 |
+
"Node name for S&R": "CheckpointLoaderSimple"
|
320 |
+
},
|
321 |
+
"widgets_values": [
|
322 |
+
"sdxl/AlbedoBaseXL.safetensors"
|
323 |
+
]
|
324 |
+
},
|
325 |
+
{
|
326 |
+
"id": 13,
|
327 |
+
"type": "LoadImage",
|
328 |
+
"pos": [
|
329 |
+
290,
|
330 |
+
70
|
331 |
+
],
|
332 |
+
"size": {
|
333 |
+
"0": 210,
|
334 |
+
"1": 314
|
335 |
+
},
|
336 |
+
"flags": {},
|
337 |
+
"order": 5,
|
338 |
+
"mode": 0,
|
339 |
+
"outputs": [
|
340 |
+
{
|
341 |
+
"name": "IMAGE",
|
342 |
+
"type": "IMAGE",
|
343 |
+
"links": [
|
344 |
+
214
|
345 |
+
],
|
346 |
+
"shape": 3,
|
347 |
+
"slot_index": 0
|
348 |
+
},
|
349 |
+
{
|
350 |
+
"name": "MASK",
|
351 |
+
"type": "MASK",
|
352 |
+
"links": null,
|
353 |
+
"shape": 3
|
354 |
+
}
|
355 |
+
],
|
356 |
+
"properties": {
|
357 |
+
"Node name for S&R": "LoadImage"
|
358 |
+
},
|
359 |
+
"widgets_values": [
|
360 |
+
"joseph-gonzalez-iFgRcqHznqg-unsplash.jpg",
|
361 |
+
"image"
|
362 |
+
]
|
363 |
+
},
|
364 |
+
{
|
365 |
+
"id": 3,
|
366 |
+
"type": "KSampler",
|
367 |
+
"pos": [
|
368 |
+
1540,
|
369 |
+
200
|
370 |
+
],
|
371 |
+
"size": {
|
372 |
+
"0": 315,
|
373 |
+
"1": 262
|
374 |
+
},
|
375 |
+
"flags": {},
|
376 |
+
"order": 13,
|
377 |
+
"mode": 0,
|
378 |
+
"inputs": [
|
379 |
+
{
|
380 |
+
"name": "model",
|
381 |
+
"type": "MODEL",
|
382 |
+
"link": 231
|
383 |
+
},
|
384 |
+
{
|
385 |
+
"name": "positive",
|
386 |
+
"type": "CONDITIONING",
|
387 |
+
"link": 200
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"name": "negative",
|
391 |
+
"type": "CONDITIONING",
|
392 |
+
"link": 201
|
393 |
+
},
|
394 |
+
{
|
395 |
+
"name": "latent_image",
|
396 |
+
"type": "LATENT",
|
397 |
+
"link": 2
|
398 |
+
}
|
399 |
+
],
|
400 |
+
"outputs": [
|
401 |
+
{
|
402 |
+
"name": "LATENT",
|
403 |
+
"type": "LATENT",
|
404 |
+
"links": [
|
405 |
+
7
|
406 |
+
],
|
407 |
+
"slot_index": 0
|
408 |
+
}
|
409 |
+
],
|
410 |
+
"properties": {
|
411 |
+
"Node name for S&R": "KSampler"
|
412 |
+
},
|
413 |
+
"widgets_values": [
|
414 |
+
1631591432,
|
415 |
+
"fixed",
|
416 |
+
30,
|
417 |
+
4.5,
|
418 |
+
"ddpm",
|
419 |
+
"karras",
|
420 |
+
1
|
421 |
+
]
|
422 |
+
},
|
423 |
+
{
|
424 |
+
"id": 68,
|
425 |
+
"type": "IPAdapterModelLoader",
|
426 |
+
"pos": [
|
427 |
+
830,
|
428 |
+
-500
|
429 |
+
],
|
430 |
+
"size": {
|
431 |
+
"0": 315,
|
432 |
+
"1": 58
|
433 |
+
},
|
434 |
+
"flags": {},
|
435 |
+
"order": 6,
|
436 |
+
"mode": 0,
|
437 |
+
"outputs": [
|
438 |
+
{
|
439 |
+
"name": "IPADAPTER",
|
440 |
+
"type": "IPADAPTER",
|
441 |
+
"links": [
|
442 |
+
227
|
443 |
+
],
|
444 |
+
"shape": 3,
|
445 |
+
"slot_index": 0
|
446 |
+
}
|
447 |
+
],
|
448 |
+
"properties": {
|
449 |
+
"Node name for S&R": "IPAdapterModelLoader"
|
450 |
+
},
|
451 |
+
"widgets_values": [
|
452 |
+
"ip-adapter-plus_sdxl_vit-h.safetensors"
|
453 |
+
]
|
454 |
+
},
|
455 |
+
{
|
456 |
+
"id": 60,
|
457 |
+
"type": "ApplyInstantID",
|
458 |
+
"pos": [
|
459 |
+
910,
|
460 |
+
210
|
461 |
+
],
|
462 |
+
"size": {
|
463 |
+
"0": 315,
|
464 |
+
"1": 266
|
465 |
+
},
|
466 |
+
"flags": {},
|
467 |
+
"order": 11,
|
468 |
+
"mode": 0,
|
469 |
+
"inputs": [
|
470 |
+
{
|
471 |
+
"name": "instantid",
|
472 |
+
"type": "INSTANTID",
|
473 |
+
"link": 197
|
474 |
+
},
|
475 |
+
{
|
476 |
+
"name": "insightface",
|
477 |
+
"type": "FACEANALYSIS",
|
478 |
+
"link": 198
|
479 |
+
},
|
480 |
+
{
|
481 |
+
"name": "control_net",
|
482 |
+
"type": "CONTROL_NET",
|
483 |
+
"link": 199
|
484 |
+
},
|
485 |
+
{
|
486 |
+
"name": "image",
|
487 |
+
"type": "IMAGE",
|
488 |
+
"link": 214
|
489 |
+
},
|
490 |
+
{
|
491 |
+
"name": "model",
|
492 |
+
"type": "MODEL",
|
493 |
+
"link": 206
|
494 |
+
},
|
495 |
+
{
|
496 |
+
"name": "positive",
|
497 |
+
"type": "CONDITIONING",
|
498 |
+
"link": 203
|
499 |
+
},
|
500 |
+
{
|
501 |
+
"name": "negative",
|
502 |
+
"type": "CONDITIONING",
|
503 |
+
"link": 204
|
504 |
+
},
|
505 |
+
{
|
506 |
+
"name": "image_kps",
|
507 |
+
"type": "IMAGE",
|
508 |
+
"link": null
|
509 |
+
},
|
510 |
+
{
|
511 |
+
"name": "mask",
|
512 |
+
"type": "MASK",
|
513 |
+
"link": null
|
514 |
+
}
|
515 |
+
],
|
516 |
+
"outputs": [
|
517 |
+
{
|
518 |
+
"name": "MODEL",
|
519 |
+
"type": "MODEL",
|
520 |
+
"links": [
|
521 |
+
230
|
522 |
+
],
|
523 |
+
"shape": 3,
|
524 |
+
"slot_index": 0
|
525 |
+
},
|
526 |
+
{
|
527 |
+
"name": "POSITIVE",
|
528 |
+
"type": "CONDITIONING",
|
529 |
+
"links": [
|
530 |
+
200
|
531 |
+
],
|
532 |
+
"shape": 3,
|
533 |
+
"slot_index": 1
|
534 |
+
},
|
535 |
+
{
|
536 |
+
"name": "NEGATIVE",
|
537 |
+
"type": "CONDITIONING",
|
538 |
+
"links": [
|
539 |
+
201
|
540 |
+
],
|
541 |
+
"shape": 3,
|
542 |
+
"slot_index": 2
|
543 |
+
}
|
544 |
+
],
|
545 |
+
"properties": {
|
546 |
+
"Node name for S&R": "ApplyInstantID"
|
547 |
+
},
|
548 |
+
"widgets_values": [
|
549 |
+
0.8,
|
550 |
+
0,
|
551 |
+
1
|
552 |
+
]
|
553 |
+
},
|
554 |
+
{
|
555 |
+
"id": 70,
|
556 |
+
"type": "CLIPVisionLoader",
|
557 |
+
"pos": [
|
558 |
+
830,
|
559 |
+
-390
|
560 |
+
],
|
561 |
+
"size": {
|
562 |
+
"0": 315,
|
563 |
+
"1": 58
|
564 |
+
},
|
565 |
+
"flags": {},
|
566 |
+
"order": 7,
|
567 |
+
"mode": 0,
|
568 |
+
"outputs": [
|
569 |
+
{
|
570 |
+
"name": "CLIP_VISION",
|
571 |
+
"type": "CLIP_VISION",
|
572 |
+
"links": [
|
573 |
+
228
|
574 |
+
],
|
575 |
+
"shape": 3,
|
576 |
+
"slot_index": 0
|
577 |
+
}
|
578 |
+
],
|
579 |
+
"properties": {
|
580 |
+
"Node name for S&R": "CLIPVisionLoader"
|
581 |
+
},
|
582 |
+
"widgets_values": [
|
583 |
+
"CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors"
|
584 |
+
]
|
585 |
+
},
|
586 |
+
{
|
587 |
+
"id": 71,
|
588 |
+
"type": "LoadImage",
|
589 |
+
"pos": [
|
590 |
+
830,
|
591 |
+
-280
|
592 |
+
],
|
593 |
+
"size": {
|
594 |
+
"0": 315,
|
595 |
+
"1": 314
|
596 |
+
},
|
597 |
+
"flags": {},
|
598 |
+
"order": 8,
|
599 |
+
"mode": 0,
|
600 |
+
"outputs": [
|
601 |
+
{
|
602 |
+
"name": "IMAGE",
|
603 |
+
"type": "IMAGE",
|
604 |
+
"links": [
|
605 |
+
229
|
606 |
+
],
|
607 |
+
"shape": 3,
|
608 |
+
"slot_index": 0
|
609 |
+
},
|
610 |
+
{
|
611 |
+
"name": "MASK",
|
612 |
+
"type": "MASK",
|
613 |
+
"links": null,
|
614 |
+
"shape": 3
|
615 |
+
}
|
616 |
+
],
|
617 |
+
"properties": {
|
618 |
+
"Node name for S&R": "LoadImage"
|
619 |
+
},
|
620 |
+
"widgets_values": [
|
621 |
+
"anime_colorful.png",
|
622 |
+
"image"
|
623 |
+
]
|
624 |
+
},
|
625 |
+
{
|
626 |
+
"id": 72,
|
627 |
+
"type": "IPAdapterAdvanced",
|
628 |
+
"pos": [
|
629 |
+
1226,
|
630 |
+
-337
|
631 |
+
],
|
632 |
+
"size": {
|
633 |
+
"0": 315,
|
634 |
+
"1": 278
|
635 |
+
},
|
636 |
+
"flags": {},
|
637 |
+
"order": 12,
|
638 |
+
"mode": 0,
|
639 |
+
"inputs": [
|
640 |
+
{
|
641 |
+
"name": "model",
|
642 |
+
"type": "MODEL",
|
643 |
+
"link": 230
|
644 |
+
},
|
645 |
+
{
|
646 |
+
"name": "ipadapter",
|
647 |
+
"type": "IPADAPTER",
|
648 |
+
"link": 227
|
649 |
+
},
|
650 |
+
{
|
651 |
+
"name": "image",
|
652 |
+
"type": "IMAGE",
|
653 |
+
"link": 229
|
654 |
+
},
|
655 |
+
{
|
656 |
+
"name": "image_negative",
|
657 |
+
"type": "IMAGE",
|
658 |
+
"link": null
|
659 |
+
},
|
660 |
+
{
|
661 |
+
"name": "attn_mask",
|
662 |
+
"type": "MASK",
|
663 |
+
"link": null
|
664 |
+
},
|
665 |
+
{
|
666 |
+
"name": "clip_vision",
|
667 |
+
"type": "CLIP_VISION",
|
668 |
+
"link": 228
|
669 |
+
}
|
670 |
+
],
|
671 |
+
"outputs": [
|
672 |
+
{
|
673 |
+
"name": "MODEL",
|
674 |
+
"type": "MODEL",
|
675 |
+
"links": [
|
676 |
+
231
|
677 |
+
],
|
678 |
+
"shape": 3,
|
679 |
+
"slot_index": 0
|
680 |
+
}
|
681 |
+
],
|
682 |
+
"properties": {
|
683 |
+
"Node name for S&R": "IPAdapterAdvanced"
|
684 |
+
},
|
685 |
+
"widgets_values": [
|
686 |
+
0.5,
|
687 |
+
"linear",
|
688 |
+
"concat",
|
689 |
+
0,
|
690 |
+
1,
|
691 |
+
"V only"
|
692 |
+
]
|
693 |
+
}
|
694 |
+
],
|
695 |
+
"links": [
|
696 |
+
[
|
697 |
+
2,
|
698 |
+
5,
|
699 |
+
0,
|
700 |
+
3,
|
701 |
+
3,
|
702 |
+
"LATENT"
|
703 |
+
],
|
704 |
+
[
|
705 |
+
7,
|
706 |
+
3,
|
707 |
+
0,
|
708 |
+
8,
|
709 |
+
0,
|
710 |
+
"LATENT"
|
711 |
+
],
|
712 |
+
[
|
713 |
+
8,
|
714 |
+
4,
|
715 |
+
2,
|
716 |
+
8,
|
717 |
+
1,
|
718 |
+
"VAE"
|
719 |
+
],
|
720 |
+
[
|
721 |
+
19,
|
722 |
+
8,
|
723 |
+
0,
|
724 |
+
15,
|
725 |
+
0,
|
726 |
+
"IMAGE"
|
727 |
+
],
|
728 |
+
[
|
729 |
+
122,
|
730 |
+
4,
|
731 |
+
1,
|
732 |
+
39,
|
733 |
+
0,
|
734 |
+
"CLIP"
|
735 |
+
],
|
736 |
+
[
|
737 |
+
123,
|
738 |
+
4,
|
739 |
+
1,
|
740 |
+
40,
|
741 |
+
0,
|
742 |
+
"CLIP"
|
743 |
+
],
|
744 |
+
[
|
745 |
+
197,
|
746 |
+
11,
|
747 |
+
0,
|
748 |
+
60,
|
749 |
+
0,
|
750 |
+
"INSTANTID"
|
751 |
+
],
|
752 |
+
[
|
753 |
+
198,
|
754 |
+
38,
|
755 |
+
0,
|
756 |
+
60,
|
757 |
+
1,
|
758 |
+
"FACEANALYSIS"
|
759 |
+
],
|
760 |
+
[
|
761 |
+
199,
|
762 |
+
16,
|
763 |
+
0,
|
764 |
+
60,
|
765 |
+
2,
|
766 |
+
"CONTROL_NET"
|
767 |
+
],
|
768 |
+
[
|
769 |
+
200,
|
770 |
+
60,
|
771 |
+
1,
|
772 |
+
3,
|
773 |
+
1,
|
774 |
+
"CONDITIONING"
|
775 |
+
],
|
776 |
+
[
|
777 |
+
201,
|
778 |
+
60,
|
779 |
+
2,
|
780 |
+
3,
|
781 |
+
2,
|
782 |
+
"CONDITIONING"
|
783 |
+
],
|
784 |
+
[
|
785 |
+
203,
|
786 |
+
39,
|
787 |
+
0,
|
788 |
+
60,
|
789 |
+
5,
|
790 |
+
"CONDITIONING"
|
791 |
+
],
|
792 |
+
[
|
793 |
+
204,
|
794 |
+
40,
|
795 |
+
0,
|
796 |
+
60,
|
797 |
+
6,
|
798 |
+
"CONDITIONING"
|
799 |
+
],
|
800 |
+
[
|
801 |
+
206,
|
802 |
+
4,
|
803 |
+
0,
|
804 |
+
60,
|
805 |
+
4,
|
806 |
+
"MODEL"
|
807 |
+
],
|
808 |
+
[
|
809 |
+
214,
|
810 |
+
13,
|
811 |
+
0,
|
812 |
+
60,
|
813 |
+
3,
|
814 |
+
"IMAGE"
|
815 |
+
],
|
816 |
+
[
|
817 |
+
227,
|
818 |
+
68,
|
819 |
+
0,
|
820 |
+
72,
|
821 |
+
1,
|
822 |
+
"IPADAPTER"
|
823 |
+
],
|
824 |
+
[
|
825 |
+
228,
|
826 |
+
70,
|
827 |
+
0,
|
828 |
+
72,
|
829 |
+
5,
|
830 |
+
"CLIP_VISION"
|
831 |
+
],
|
832 |
+
[
|
833 |
+
229,
|
834 |
+
71,
|
835 |
+
0,
|
836 |
+
72,
|
837 |
+
2,
|
838 |
+
"IMAGE"
|
839 |
+
],
|
840 |
+
[
|
841 |
+
230,
|
842 |
+
60,
|
843 |
+
0,
|
844 |
+
72,
|
845 |
+
0,
|
846 |
+
"MODEL"
|
847 |
+
],
|
848 |
+
[
|
849 |
+
231,
|
850 |
+
72,
|
851 |
+
0,
|
852 |
+
3,
|
853 |
+
0,
|
854 |
+
"MODEL"
|
855 |
+
]
|
856 |
+
],
|
857 |
+
"groups": [],
|
858 |
+
"config": {},
|
859 |
+
"extra": {},
|
860 |
+
"version": 0.4
|
861 |
+
}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_basic.json
ADDED
@@ -0,0 +1,657 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"last_node_id": 66,
|
3 |
+
"last_link_id": 220,
|
4 |
+
"nodes": [
|
5 |
+
{
|
6 |
+
"id": 11,
|
7 |
+
"type": "InstantIDModelLoader",
|
8 |
+
"pos": [
|
9 |
+
560,
|
10 |
+
70
|
11 |
+
],
|
12 |
+
"size": {
|
13 |
+
"0": 238.72393798828125,
|
14 |
+
"1": 58
|
15 |
+
},
|
16 |
+
"flags": {},
|
17 |
+
"order": 0,
|
18 |
+
"mode": 0,
|
19 |
+
"outputs": [
|
20 |
+
{
|
21 |
+
"name": "INSTANTID",
|
22 |
+
"type": "INSTANTID",
|
23 |
+
"links": [
|
24 |
+
197
|
25 |
+
],
|
26 |
+
"shape": 3,
|
27 |
+
"slot_index": 0
|
28 |
+
}
|
29 |
+
],
|
30 |
+
"properties": {
|
31 |
+
"Node name for S&R": "InstantIDModelLoader"
|
32 |
+
},
|
33 |
+
"widgets_values": [
|
34 |
+
"ip-adapter.bin"
|
35 |
+
]
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"id": 38,
|
39 |
+
"type": "InstantIDFaceAnalysis",
|
40 |
+
"pos": [
|
41 |
+
570,
|
42 |
+
180
|
43 |
+
],
|
44 |
+
"size": {
|
45 |
+
"0": 227.09793090820312,
|
46 |
+
"1": 58
|
47 |
+
},
|
48 |
+
"flags": {},
|
49 |
+
"order": 1,
|
50 |
+
"mode": 0,
|
51 |
+
"outputs": [
|
52 |
+
{
|
53 |
+
"name": "FACEANALYSIS",
|
54 |
+
"type": "FACEANALYSIS",
|
55 |
+
"links": [
|
56 |
+
198
|
57 |
+
],
|
58 |
+
"shape": 3,
|
59 |
+
"slot_index": 0
|
60 |
+
}
|
61 |
+
],
|
62 |
+
"properties": {
|
63 |
+
"Node name for S&R": "InstantIDFaceAnalysis"
|
64 |
+
},
|
65 |
+
"widgets_values": [
|
66 |
+
"CPU"
|
67 |
+
]
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"id": 16,
|
71 |
+
"type": "ControlNetLoader",
|
72 |
+
"pos": [
|
73 |
+
560,
|
74 |
+
290
|
75 |
+
],
|
76 |
+
"size": {
|
77 |
+
"0": 250.07241821289062,
|
78 |
+
"1": 58
|
79 |
+
},
|
80 |
+
"flags": {},
|
81 |
+
"order": 2,
|
82 |
+
"mode": 0,
|
83 |
+
"outputs": [
|
84 |
+
{
|
85 |
+
"name": "CONTROL_NET",
|
86 |
+
"type": "CONTROL_NET",
|
87 |
+
"links": [
|
88 |
+
199
|
89 |
+
],
|
90 |
+
"shape": 3,
|
91 |
+
"slot_index": 0
|
92 |
+
}
|
93 |
+
],
|
94 |
+
"properties": {
|
95 |
+
"Node name for S&R": "ControlNetLoader"
|
96 |
+
},
|
97 |
+
"widgets_values": [
|
98 |
+
"instantid/diffusion_pytorch_model.safetensors"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"id": 15,
|
103 |
+
"type": "PreviewImage",
|
104 |
+
"pos": [
|
105 |
+
1670,
|
106 |
+
300
|
107 |
+
],
|
108 |
+
"size": {
|
109 |
+
"0": 584.0855712890625,
|
110 |
+
"1": 610.4592895507812
|
111 |
+
},
|
112 |
+
"flags": {},
|
113 |
+
"order": 11,
|
114 |
+
"mode": 0,
|
115 |
+
"inputs": [
|
116 |
+
{
|
117 |
+
"name": "images",
|
118 |
+
"type": "IMAGE",
|
119 |
+
"link": 19
|
120 |
+
}
|
121 |
+
],
|
122 |
+
"properties": {
|
123 |
+
"Node name for S&R": "PreviewImage"
|
124 |
+
}
|
125 |
+
},
|
126 |
+
{
|
127 |
+
"id": 5,
|
128 |
+
"type": "EmptyLatentImage",
|
129 |
+
"pos": [
|
130 |
+
910,
|
131 |
+
540
|
132 |
+
],
|
133 |
+
"size": {
|
134 |
+
"0": 315,
|
135 |
+
"1": 106
|
136 |
+
},
|
137 |
+
"flags": {},
|
138 |
+
"order": 3,
|
139 |
+
"mode": 0,
|
140 |
+
"outputs": [
|
141 |
+
{
|
142 |
+
"name": "LATENT",
|
143 |
+
"type": "LATENT",
|
144 |
+
"links": [
|
145 |
+
2
|
146 |
+
],
|
147 |
+
"slot_index": 0
|
148 |
+
}
|
149 |
+
],
|
150 |
+
"properties": {
|
151 |
+
"Node name for S&R": "EmptyLatentImage"
|
152 |
+
},
|
153 |
+
"widgets_values": [
|
154 |
+
1016,
|
155 |
+
1016,
|
156 |
+
1
|
157 |
+
]
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"id": 8,
|
161 |
+
"type": "VAEDecode",
|
162 |
+
"pos": [
|
163 |
+
1670,
|
164 |
+
210
|
165 |
+
],
|
166 |
+
"size": {
|
167 |
+
"0": 210,
|
168 |
+
"1": 46
|
169 |
+
},
|
170 |
+
"flags": {},
|
171 |
+
"order": 10,
|
172 |
+
"mode": 0,
|
173 |
+
"inputs": [
|
174 |
+
{
|
175 |
+
"name": "samples",
|
176 |
+
"type": "LATENT",
|
177 |
+
"link": 7
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"name": "vae",
|
181 |
+
"type": "VAE",
|
182 |
+
"link": 8
|
183 |
+
}
|
184 |
+
],
|
185 |
+
"outputs": [
|
186 |
+
{
|
187 |
+
"name": "IMAGE",
|
188 |
+
"type": "IMAGE",
|
189 |
+
"links": [
|
190 |
+
19
|
191 |
+
],
|
192 |
+
"slot_index": 0
|
193 |
+
}
|
194 |
+
],
|
195 |
+
"properties": {
|
196 |
+
"Node name for S&R": "VAEDecode"
|
197 |
+
}
|
198 |
+
},
|
199 |
+
{
|
200 |
+
"id": 60,
|
201 |
+
"type": "ApplyInstantID",
|
202 |
+
"pos": [
|
203 |
+
910,
|
204 |
+
210
|
205 |
+
],
|
206 |
+
"size": {
|
207 |
+
"0": 315,
|
208 |
+
"1": 266
|
209 |
+
},
|
210 |
+
"flags": {},
|
211 |
+
"order": 8,
|
212 |
+
"mode": 0,
|
213 |
+
"inputs": [
|
214 |
+
{
|
215 |
+
"name": "instantid",
|
216 |
+
"type": "INSTANTID",
|
217 |
+
"link": 197
|
218 |
+
},
|
219 |
+
{
|
220 |
+
"name": "insightface",
|
221 |
+
"type": "FACEANALYSIS",
|
222 |
+
"link": 198
|
223 |
+
},
|
224 |
+
{
|
225 |
+
"name": "control_net",
|
226 |
+
"type": "CONTROL_NET",
|
227 |
+
"link": 199
|
228 |
+
},
|
229 |
+
{
|
230 |
+
"name": "image",
|
231 |
+
"type": "IMAGE",
|
232 |
+
"link": 214
|
233 |
+
},
|
234 |
+
{
|
235 |
+
"name": "model",
|
236 |
+
"type": "MODEL",
|
237 |
+
"link": 206
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"name": "positive",
|
241 |
+
"type": "CONDITIONING",
|
242 |
+
"link": 203
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"name": "negative",
|
246 |
+
"type": "CONDITIONING",
|
247 |
+
"link": 204
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"name": "image_kps",
|
251 |
+
"type": "IMAGE",
|
252 |
+
"link": null
|
253 |
+
},
|
254 |
+
{
|
255 |
+
"name": "mask",
|
256 |
+
"type": "MASK",
|
257 |
+
"link": null
|
258 |
+
}
|
259 |
+
],
|
260 |
+
"outputs": [
|
261 |
+
{
|
262 |
+
"name": "MODEL",
|
263 |
+
"type": "MODEL",
|
264 |
+
"links": [
|
265 |
+
220
|
266 |
+
],
|
267 |
+
"shape": 3,
|
268 |
+
"slot_index": 0
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"name": "POSITIVE",
|
272 |
+
"type": "CONDITIONING",
|
273 |
+
"links": [
|
274 |
+
200
|
275 |
+
],
|
276 |
+
"shape": 3,
|
277 |
+
"slot_index": 1
|
278 |
+
},
|
279 |
+
{
|
280 |
+
"name": "NEGATIVE",
|
281 |
+
"type": "CONDITIONING",
|
282 |
+
"links": [
|
283 |
+
201
|
284 |
+
],
|
285 |
+
"shape": 3,
|
286 |
+
"slot_index": 2
|
287 |
+
}
|
288 |
+
],
|
289 |
+
"properties": {
|
290 |
+
"Node name for S&R": "ApplyInstantID"
|
291 |
+
},
|
292 |
+
"widgets_values": [
|
293 |
+
0.8,
|
294 |
+
0,
|
295 |
+
1
|
296 |
+
]
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"id": 39,
|
300 |
+
"type": "CLIPTextEncode",
|
301 |
+
"pos": [
|
302 |
+
520,
|
303 |
+
430
|
304 |
+
],
|
305 |
+
"size": {
|
306 |
+
"0": 291.9967346191406,
|
307 |
+
"1": 128.62518310546875
|
308 |
+
},
|
309 |
+
"flags": {},
|
310 |
+
"order": 6,
|
311 |
+
"mode": 0,
|
312 |
+
"inputs": [
|
313 |
+
{
|
314 |
+
"name": "clip",
|
315 |
+
"type": "CLIP",
|
316 |
+
"link": 122
|
317 |
+
}
|
318 |
+
],
|
319 |
+
"outputs": [
|
320 |
+
{
|
321 |
+
"name": "CONDITIONING",
|
322 |
+
"type": "CONDITIONING",
|
323 |
+
"links": [
|
324 |
+
203
|
325 |
+
],
|
326 |
+
"shape": 3,
|
327 |
+
"slot_index": 0
|
328 |
+
}
|
329 |
+
],
|
330 |
+
"properties": {
|
331 |
+
"Node name for S&R": "CLIPTextEncode"
|
332 |
+
},
|
333 |
+
"widgets_values": [
|
334 |
+
"comic character. graphic illustration, comic art, graphic novel art, vibrant, highly detailed"
|
335 |
+
]
|
336 |
+
},
|
337 |
+
{
|
338 |
+
"id": 40,
|
339 |
+
"type": "CLIPTextEncode",
|
340 |
+
"pos": [
|
341 |
+
520,
|
342 |
+
620
|
343 |
+
],
|
344 |
+
"size": {
|
345 |
+
"0": 286.3603515625,
|
346 |
+
"1": 112.35245513916016
|
347 |
+
},
|
348 |
+
"flags": {},
|
349 |
+
"order": 7,
|
350 |
+
"mode": 0,
|
351 |
+
"inputs": [
|
352 |
+
{
|
353 |
+
"name": "clip",
|
354 |
+
"type": "CLIP",
|
355 |
+
"link": 123
|
356 |
+
}
|
357 |
+
],
|
358 |
+
"outputs": [
|
359 |
+
{
|
360 |
+
"name": "CONDITIONING",
|
361 |
+
"type": "CONDITIONING",
|
362 |
+
"links": [
|
363 |
+
204
|
364 |
+
],
|
365 |
+
"shape": 3,
|
366 |
+
"slot_index": 0
|
367 |
+
}
|
368 |
+
],
|
369 |
+
"properties": {
|
370 |
+
"Node name for S&R": "CLIPTextEncode"
|
371 |
+
},
|
372 |
+
"widgets_values": [
|
373 |
+
"photograph, deformed, glitch, noisy, realistic, stock photo"
|
374 |
+
]
|
375 |
+
},
|
376 |
+
{
|
377 |
+
"id": 4,
|
378 |
+
"type": "CheckpointLoaderSimple",
|
379 |
+
"pos": [
|
380 |
+
70,
|
381 |
+
520
|
382 |
+
],
|
383 |
+
"size": {
|
384 |
+
"0": 315,
|
385 |
+
"1": 98
|
386 |
+
},
|
387 |
+
"flags": {},
|
388 |
+
"order": 4,
|
389 |
+
"mode": 0,
|
390 |
+
"outputs": [
|
391 |
+
{
|
392 |
+
"name": "MODEL",
|
393 |
+
"type": "MODEL",
|
394 |
+
"links": [
|
395 |
+
206
|
396 |
+
],
|
397 |
+
"slot_index": 0
|
398 |
+
},
|
399 |
+
{
|
400 |
+
"name": "CLIP",
|
401 |
+
"type": "CLIP",
|
402 |
+
"links": [
|
403 |
+
122,
|
404 |
+
123
|
405 |
+
],
|
406 |
+
"slot_index": 1
|
407 |
+
},
|
408 |
+
{
|
409 |
+
"name": "VAE",
|
410 |
+
"type": "VAE",
|
411 |
+
"links": [
|
412 |
+
8
|
413 |
+
],
|
414 |
+
"slot_index": 2
|
415 |
+
}
|
416 |
+
],
|
417 |
+
"properties": {
|
418 |
+
"Node name for S&R": "CheckpointLoaderSimple"
|
419 |
+
},
|
420 |
+
"widgets_values": [
|
421 |
+
"sdxl/AlbedoBaseXL.safetensors"
|
422 |
+
]
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"id": 3,
|
426 |
+
"type": "KSampler",
|
427 |
+
"pos": [
|
428 |
+
1300,
|
429 |
+
210
|
430 |
+
],
|
431 |
+
"size": {
|
432 |
+
"0": 315,
|
433 |
+
"1": 262
|
434 |
+
},
|
435 |
+
"flags": {},
|
436 |
+
"order": 9,
|
437 |
+
"mode": 0,
|
438 |
+
"inputs": [
|
439 |
+
{
|
440 |
+
"name": "model",
|
441 |
+
"type": "MODEL",
|
442 |
+
"link": 220
|
443 |
+
},
|
444 |
+
{
|
445 |
+
"name": "positive",
|
446 |
+
"type": "CONDITIONING",
|
447 |
+
"link": 200
|
448 |
+
},
|
449 |
+
{
|
450 |
+
"name": "negative",
|
451 |
+
"type": "CONDITIONING",
|
452 |
+
"link": 201
|
453 |
+
},
|
454 |
+
{
|
455 |
+
"name": "latent_image",
|
456 |
+
"type": "LATENT",
|
457 |
+
"link": 2
|
458 |
+
}
|
459 |
+
],
|
460 |
+
"outputs": [
|
461 |
+
{
|
462 |
+
"name": "LATENT",
|
463 |
+
"type": "LATENT",
|
464 |
+
"links": [
|
465 |
+
7
|
466 |
+
],
|
467 |
+
"slot_index": 0
|
468 |
+
}
|
469 |
+
],
|
470 |
+
"properties": {
|
471 |
+
"Node name for S&R": "KSampler"
|
472 |
+
},
|
473 |
+
"widgets_values": [
|
474 |
+
1631591050,
|
475 |
+
"fixed",
|
476 |
+
30,
|
477 |
+
4.5,
|
478 |
+
"ddpm",
|
479 |
+
"karras",
|
480 |
+
1
|
481 |
+
]
|
482 |
+
},
|
483 |
+
{
|
484 |
+
"id": 13,
|
485 |
+
"type": "LoadImage",
|
486 |
+
"pos": [
|
487 |
+
290,
|
488 |
+
70
|
489 |
+
],
|
490 |
+
"size": {
|
491 |
+
"0": 210,
|
492 |
+
"1": 314
|
493 |
+
},
|
494 |
+
"flags": {},
|
495 |
+
"order": 5,
|
496 |
+
"mode": 0,
|
497 |
+
"outputs": [
|
498 |
+
{
|
499 |
+
"name": "IMAGE",
|
500 |
+
"type": "IMAGE",
|
501 |
+
"links": [
|
502 |
+
214
|
503 |
+
],
|
504 |
+
"shape": 3,
|
505 |
+
"slot_index": 0
|
506 |
+
},
|
507 |
+
{
|
508 |
+
"name": "MASK",
|
509 |
+
"type": "MASK",
|
510 |
+
"links": null,
|
511 |
+
"shape": 3
|
512 |
+
}
|
513 |
+
],
|
514 |
+
"properties": {
|
515 |
+
"Node name for S&R": "LoadImage"
|
516 |
+
},
|
517 |
+
"widgets_values": [
|
518 |
+
"joseph-gonzalez-iFgRcqHznqg-unsplash.jpg",
|
519 |
+
"image"
|
520 |
+
]
|
521 |
+
}
|
522 |
+
],
|
523 |
+
"links": [
|
524 |
+
[
|
525 |
+
2,
|
526 |
+
5,
|
527 |
+
0,
|
528 |
+
3,
|
529 |
+
3,
|
530 |
+
"LATENT"
|
531 |
+
],
|
532 |
+
[
|
533 |
+
7,
|
534 |
+
3,
|
535 |
+
0,
|
536 |
+
8,
|
537 |
+
0,
|
538 |
+
"LATENT"
|
539 |
+
],
|
540 |
+
[
|
541 |
+
8,
|
542 |
+
4,
|
543 |
+
2,
|
544 |
+
8,
|
545 |
+
1,
|
546 |
+
"VAE"
|
547 |
+
],
|
548 |
+
[
|
549 |
+
19,
|
550 |
+
8,
|
551 |
+
0,
|
552 |
+
15,
|
553 |
+
0,
|
554 |
+
"IMAGE"
|
555 |
+
],
|
556 |
+
[
|
557 |
+
122,
|
558 |
+
4,
|
559 |
+
1,
|
560 |
+
39,
|
561 |
+
0,
|
562 |
+
"CLIP"
|
563 |
+
],
|
564 |
+
[
|
565 |
+
123,
|
566 |
+
4,
|
567 |
+
1,
|
568 |
+
40,
|
569 |
+
0,
|
570 |
+
"CLIP"
|
571 |
+
],
|
572 |
+
[
|
573 |
+
197,
|
574 |
+
11,
|
575 |
+
0,
|
576 |
+
60,
|
577 |
+
0,
|
578 |
+
"INSTANTID"
|
579 |
+
],
|
580 |
+
[
|
581 |
+
198,
|
582 |
+
38,
|
583 |
+
0,
|
584 |
+
60,
|
585 |
+
1,
|
586 |
+
"FACEANALYSIS"
|
587 |
+
],
|
588 |
+
[
|
589 |
+
199,
|
590 |
+
16,
|
591 |
+
0,
|
592 |
+
60,
|
593 |
+
2,
|
594 |
+
"CONTROL_NET"
|
595 |
+
],
|
596 |
+
[
|
597 |
+
200,
|
598 |
+
60,
|
599 |
+
1,
|
600 |
+
3,
|
601 |
+
1,
|
602 |
+
"CONDITIONING"
|
603 |
+
],
|
604 |
+
[
|
605 |
+
201,
|
606 |
+
60,
|
607 |
+
2,
|
608 |
+
3,
|
609 |
+
2,
|
610 |
+
"CONDITIONING"
|
611 |
+
],
|
612 |
+
[
|
613 |
+
203,
|
614 |
+
39,
|
615 |
+
0,
|
616 |
+
60,
|
617 |
+
5,
|
618 |
+
"CONDITIONING"
|
619 |
+
],
|
620 |
+
[
|
621 |
+
204,
|
622 |
+
40,
|
623 |
+
0,
|
624 |
+
60,
|
625 |
+
6,
|
626 |
+
"CONDITIONING"
|
627 |
+
],
|
628 |
+
[
|
629 |
+
206,
|
630 |
+
4,
|
631 |
+
0,
|
632 |
+
60,
|
633 |
+
4,
|
634 |
+
"MODEL"
|
635 |
+
],
|
636 |
+
[
|
637 |
+
214,
|
638 |
+
13,
|
639 |
+
0,
|
640 |
+
60,
|
641 |
+
3,
|
642 |
+
"IMAGE"
|
643 |
+
],
|
644 |
+
[
|
645 |
+
220,
|
646 |
+
60,
|
647 |
+
0,
|
648 |
+
3,
|
649 |
+
0,
|
650 |
+
"MODEL"
|
651 |
+
]
|
652 |
+
],
|
653 |
+
"groups": [],
|
654 |
+
"config": {},
|
655 |
+
"extra": {},
|
656 |
+
"version": 0.4
|
657 |
+
}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_depth.json
ADDED
@@ -0,0 +1,881 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"last_node_id": 78,
|
3 |
+
"last_link_id": 239,
|
4 |
+
"nodes": [
|
5 |
+
{
|
6 |
+
"id": 11,
|
7 |
+
"type": "InstantIDModelLoader",
|
8 |
+
"pos": [
|
9 |
+
560,
|
10 |
+
70
|
11 |
+
],
|
12 |
+
"size": {
|
13 |
+
"0": 238.72393798828125,
|
14 |
+
"1": 58
|
15 |
+
},
|
16 |
+
"flags": {},
|
17 |
+
"order": 0,
|
18 |
+
"mode": 0,
|
19 |
+
"outputs": [
|
20 |
+
{
|
21 |
+
"name": "INSTANTID",
|
22 |
+
"type": "INSTANTID",
|
23 |
+
"links": [
|
24 |
+
197
|
25 |
+
],
|
26 |
+
"shape": 3,
|
27 |
+
"slot_index": 0
|
28 |
+
}
|
29 |
+
],
|
30 |
+
"properties": {
|
31 |
+
"Node name for S&R": "InstantIDModelLoader"
|
32 |
+
},
|
33 |
+
"widgets_values": [
|
34 |
+
"ip-adapter.bin"
|
35 |
+
]
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"id": 38,
|
39 |
+
"type": "InstantIDFaceAnalysis",
|
40 |
+
"pos": [
|
41 |
+
570,
|
42 |
+
180
|
43 |
+
],
|
44 |
+
"size": {
|
45 |
+
"0": 227.09793090820312,
|
46 |
+
"1": 58
|
47 |
+
},
|
48 |
+
"flags": {},
|
49 |
+
"order": 1,
|
50 |
+
"mode": 0,
|
51 |
+
"outputs": [
|
52 |
+
{
|
53 |
+
"name": "FACEANALYSIS",
|
54 |
+
"type": "FACEANALYSIS",
|
55 |
+
"links": [
|
56 |
+
198
|
57 |
+
],
|
58 |
+
"shape": 3,
|
59 |
+
"slot_index": 0
|
60 |
+
}
|
61 |
+
],
|
62 |
+
"properties": {
|
63 |
+
"Node name for S&R": "InstantIDFaceAnalysis"
|
64 |
+
},
|
65 |
+
"widgets_values": [
|
66 |
+
"CPU"
|
67 |
+
]
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"id": 16,
|
71 |
+
"type": "ControlNetLoader",
|
72 |
+
"pos": [
|
73 |
+
560,
|
74 |
+
290
|
75 |
+
],
|
76 |
+
"size": {
|
77 |
+
"0": 250.07241821289062,
|
78 |
+
"1": 58
|
79 |
+
},
|
80 |
+
"flags": {},
|
81 |
+
"order": 2,
|
82 |
+
"mode": 0,
|
83 |
+
"outputs": [
|
84 |
+
{
|
85 |
+
"name": "CONTROL_NET",
|
86 |
+
"type": "CONTROL_NET",
|
87 |
+
"links": [
|
88 |
+
199
|
89 |
+
],
|
90 |
+
"shape": 3,
|
91 |
+
"slot_index": 0
|
92 |
+
}
|
93 |
+
],
|
94 |
+
"properties": {
|
95 |
+
"Node name for S&R": "ControlNetLoader"
|
96 |
+
},
|
97 |
+
"widgets_values": [
|
98 |
+
"instantid/diffusion_pytorch_model.safetensors"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"id": 39,
|
103 |
+
"type": "CLIPTextEncode",
|
104 |
+
"pos": [
|
105 |
+
520,
|
106 |
+
430
|
107 |
+
],
|
108 |
+
"size": {
|
109 |
+
"0": 291.9967346191406,
|
110 |
+
"1": 128.62518310546875
|
111 |
+
},
|
112 |
+
"flags": {},
|
113 |
+
"order": 8,
|
114 |
+
"mode": 0,
|
115 |
+
"inputs": [
|
116 |
+
{
|
117 |
+
"name": "clip",
|
118 |
+
"type": "CLIP",
|
119 |
+
"link": 122
|
120 |
+
}
|
121 |
+
],
|
122 |
+
"outputs": [
|
123 |
+
{
|
124 |
+
"name": "CONDITIONING",
|
125 |
+
"type": "CONDITIONING",
|
126 |
+
"links": [
|
127 |
+
203
|
128 |
+
],
|
129 |
+
"shape": 3,
|
130 |
+
"slot_index": 0
|
131 |
+
}
|
132 |
+
],
|
133 |
+
"properties": {
|
134 |
+
"Node name for S&R": "CLIPTextEncode"
|
135 |
+
},
|
136 |
+
"widgets_values": [
|
137 |
+
"comic character. graphic illustration, comic art, graphic novel art, vibrant, highly detailed"
|
138 |
+
]
|
139 |
+
},
|
140 |
+
{
|
141 |
+
"id": 40,
|
142 |
+
"type": "CLIPTextEncode",
|
143 |
+
"pos": [
|
144 |
+
520,
|
145 |
+
620
|
146 |
+
],
|
147 |
+
"size": {
|
148 |
+
"0": 286.3603515625,
|
149 |
+
"1": 112.35245513916016
|
150 |
+
},
|
151 |
+
"flags": {},
|
152 |
+
"order": 9,
|
153 |
+
"mode": 0,
|
154 |
+
"inputs": [
|
155 |
+
{
|
156 |
+
"name": "clip",
|
157 |
+
"type": "CLIP",
|
158 |
+
"link": 123
|
159 |
+
}
|
160 |
+
],
|
161 |
+
"outputs": [
|
162 |
+
{
|
163 |
+
"name": "CONDITIONING",
|
164 |
+
"type": "CONDITIONING",
|
165 |
+
"links": [
|
166 |
+
204
|
167 |
+
],
|
168 |
+
"shape": 3,
|
169 |
+
"slot_index": 0
|
170 |
+
}
|
171 |
+
],
|
172 |
+
"properties": {
|
173 |
+
"Node name for S&R": "CLIPTextEncode"
|
174 |
+
},
|
175 |
+
"widgets_values": [
|
176 |
+
"photograph, deformed, glitch, noisy, realistic, stock photo"
|
177 |
+
]
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"id": 4,
|
181 |
+
"type": "CheckpointLoaderSimple",
|
182 |
+
"pos": [
|
183 |
+
70,
|
184 |
+
520
|
185 |
+
],
|
186 |
+
"size": {
|
187 |
+
"0": 315,
|
188 |
+
"1": 98
|
189 |
+
},
|
190 |
+
"flags": {},
|
191 |
+
"order": 3,
|
192 |
+
"mode": 0,
|
193 |
+
"outputs": [
|
194 |
+
{
|
195 |
+
"name": "MODEL",
|
196 |
+
"type": "MODEL",
|
197 |
+
"links": [
|
198 |
+
206
|
199 |
+
],
|
200 |
+
"slot_index": 0
|
201 |
+
},
|
202 |
+
{
|
203 |
+
"name": "CLIP",
|
204 |
+
"type": "CLIP",
|
205 |
+
"links": [
|
206 |
+
122,
|
207 |
+
123
|
208 |
+
],
|
209 |
+
"slot_index": 1
|
210 |
+
},
|
211 |
+
{
|
212 |
+
"name": "VAE",
|
213 |
+
"type": "VAE",
|
214 |
+
"links": [
|
215 |
+
8
|
216 |
+
],
|
217 |
+
"slot_index": 2
|
218 |
+
}
|
219 |
+
],
|
220 |
+
"properties": {
|
221 |
+
"Node name for S&R": "CheckpointLoaderSimple"
|
222 |
+
},
|
223 |
+
"widgets_values": [
|
224 |
+
"sdxl/AlbedoBaseXL.safetensors"
|
225 |
+
]
|
226 |
+
},
|
227 |
+
{
|
228 |
+
"id": 60,
|
229 |
+
"type": "ApplyInstantID",
|
230 |
+
"pos": [
|
231 |
+
910,
|
232 |
+
210
|
233 |
+
],
|
234 |
+
"size": {
|
235 |
+
"0": 315,
|
236 |
+
"1": 266
|
237 |
+
},
|
238 |
+
"flags": {},
|
239 |
+
"order": 11,
|
240 |
+
"mode": 0,
|
241 |
+
"inputs": [
|
242 |
+
{
|
243 |
+
"name": "instantid",
|
244 |
+
"type": "INSTANTID",
|
245 |
+
"link": 197
|
246 |
+
},
|
247 |
+
{
|
248 |
+
"name": "insightface",
|
249 |
+
"type": "FACEANALYSIS",
|
250 |
+
"link": 198
|
251 |
+
},
|
252 |
+
{
|
253 |
+
"name": "control_net",
|
254 |
+
"type": "CONTROL_NET",
|
255 |
+
"link": 199
|
256 |
+
},
|
257 |
+
{
|
258 |
+
"name": "image",
|
259 |
+
"type": "IMAGE",
|
260 |
+
"link": 214
|
261 |
+
},
|
262 |
+
{
|
263 |
+
"name": "model",
|
264 |
+
"type": "MODEL",
|
265 |
+
"link": 206
|
266 |
+
},
|
267 |
+
{
|
268 |
+
"name": "positive",
|
269 |
+
"type": "CONDITIONING",
|
270 |
+
"link": 203
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"name": "negative",
|
274 |
+
"type": "CONDITIONING",
|
275 |
+
"link": 204
|
276 |
+
},
|
277 |
+
{
|
278 |
+
"name": "image_kps",
|
279 |
+
"type": "IMAGE",
|
280 |
+
"link": 236
|
281 |
+
},
|
282 |
+
{
|
283 |
+
"name": "mask",
|
284 |
+
"type": "MASK",
|
285 |
+
"link": null
|
286 |
+
}
|
287 |
+
],
|
288 |
+
"outputs": [
|
289 |
+
{
|
290 |
+
"name": "MODEL",
|
291 |
+
"type": "MODEL",
|
292 |
+
"links": [
|
293 |
+
227
|
294 |
+
],
|
295 |
+
"shape": 3,
|
296 |
+
"slot_index": 0
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"name": "POSITIVE",
|
300 |
+
"type": "CONDITIONING",
|
301 |
+
"links": [
|
302 |
+
229
|
303 |
+
],
|
304 |
+
"shape": 3,
|
305 |
+
"slot_index": 1
|
306 |
+
},
|
307 |
+
{
|
308 |
+
"name": "NEGATIVE",
|
309 |
+
"type": "CONDITIONING",
|
310 |
+
"links": [
|
311 |
+
228
|
312 |
+
],
|
313 |
+
"shape": 3,
|
314 |
+
"slot_index": 2
|
315 |
+
}
|
316 |
+
],
|
317 |
+
"properties": {
|
318 |
+
"Node name for S&R": "ApplyInstantID"
|
319 |
+
},
|
320 |
+
"widgets_values": [
|
321 |
+
0.8,
|
322 |
+
0,
|
323 |
+
1
|
324 |
+
]
|
325 |
+
},
|
326 |
+
{
|
327 |
+
"id": 15,
|
328 |
+
"type": "PreviewImage",
|
329 |
+
"pos": [
|
330 |
+
1937,
|
331 |
+
321
|
332 |
+
],
|
333 |
+
"size": {
|
334 |
+
"0": 584.0855712890625,
|
335 |
+
"1": 610.4592895507812
|
336 |
+
},
|
337 |
+
"flags": {},
|
338 |
+
"order": 15,
|
339 |
+
"mode": 0,
|
340 |
+
"inputs": [
|
341 |
+
{
|
342 |
+
"name": "images",
|
343 |
+
"type": "IMAGE",
|
344 |
+
"link": 19
|
345 |
+
}
|
346 |
+
],
|
347 |
+
"properties": {
|
348 |
+
"Node name for S&R": "PreviewImage"
|
349 |
+
}
|
350 |
+
},
|
351 |
+
{
|
352 |
+
"id": 8,
|
353 |
+
"type": "VAEDecode",
|
354 |
+
"pos": [
|
355 |
+
1940,
|
356 |
+
207
|
357 |
+
],
|
358 |
+
"size": {
|
359 |
+
"0": 210,
|
360 |
+
"1": 46
|
361 |
+
},
|
362 |
+
"flags": {},
|
363 |
+
"order": 14,
|
364 |
+
"mode": 0,
|
365 |
+
"inputs": [
|
366 |
+
{
|
367 |
+
"name": "samples",
|
368 |
+
"type": "LATENT",
|
369 |
+
"link": 7
|
370 |
+
},
|
371 |
+
{
|
372 |
+
"name": "vae",
|
373 |
+
"type": "VAE",
|
374 |
+
"link": 8
|
375 |
+
}
|
376 |
+
],
|
377 |
+
"outputs": [
|
378 |
+
{
|
379 |
+
"name": "IMAGE",
|
380 |
+
"type": "IMAGE",
|
381 |
+
"links": [
|
382 |
+
19
|
383 |
+
],
|
384 |
+
"slot_index": 0
|
385 |
+
}
|
386 |
+
],
|
387 |
+
"properties": {
|
388 |
+
"Node name for S&R": "VAEDecode"
|
389 |
+
}
|
390 |
+
},
|
391 |
+
{
|
392 |
+
"id": 5,
|
393 |
+
"type": "EmptyLatentImage",
|
394 |
+
"pos": [
|
395 |
+
910,
|
396 |
+
540
|
397 |
+
],
|
398 |
+
"size": {
|
399 |
+
"0": 315,
|
400 |
+
"1": 106
|
401 |
+
},
|
402 |
+
"flags": {},
|
403 |
+
"order": 4,
|
404 |
+
"mode": 0,
|
405 |
+
"outputs": [
|
406 |
+
{
|
407 |
+
"name": "LATENT",
|
408 |
+
"type": "LATENT",
|
409 |
+
"links": [
|
410 |
+
2
|
411 |
+
],
|
412 |
+
"slot_index": 0
|
413 |
+
}
|
414 |
+
],
|
415 |
+
"properties": {
|
416 |
+
"Node name for S&R": "EmptyLatentImage"
|
417 |
+
},
|
418 |
+
"widgets_values": [
|
419 |
+
1016,
|
420 |
+
1016,
|
421 |
+
1
|
422 |
+
]
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"id": 13,
|
426 |
+
"type": "LoadImage",
|
427 |
+
"pos": [
|
428 |
+
290,
|
429 |
+
70
|
430 |
+
],
|
431 |
+
"size": {
|
432 |
+
"0": 210,
|
433 |
+
"1": 314
|
434 |
+
},
|
435 |
+
"flags": {},
|
436 |
+
"order": 5,
|
437 |
+
"mode": 0,
|
438 |
+
"outputs": [
|
439 |
+
{
|
440 |
+
"name": "IMAGE",
|
441 |
+
"type": "IMAGE",
|
442 |
+
"links": [
|
443 |
+
214
|
444 |
+
],
|
445 |
+
"shape": 3,
|
446 |
+
"slot_index": 0
|
447 |
+
},
|
448 |
+
{
|
449 |
+
"name": "MASK",
|
450 |
+
"type": "MASK",
|
451 |
+
"links": null,
|
452 |
+
"shape": 3
|
453 |
+
}
|
454 |
+
],
|
455 |
+
"properties": {
|
456 |
+
"Node name for S&R": "LoadImage"
|
457 |
+
},
|
458 |
+
"widgets_values": [
|
459 |
+
"face4.jpg",
|
460 |
+
"image"
|
461 |
+
]
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"id": 73,
|
465 |
+
"type": "ControlNetLoader",
|
466 |
+
"pos": [
|
467 |
+
909,
|
468 |
+
706
|
469 |
+
],
|
470 |
+
"size": {
|
471 |
+
"0": 315,
|
472 |
+
"1": 58
|
473 |
+
},
|
474 |
+
"flags": {},
|
475 |
+
"order": 6,
|
476 |
+
"mode": 0,
|
477 |
+
"outputs": [
|
478 |
+
{
|
479 |
+
"name": "CONTROL_NET",
|
480 |
+
"type": "CONTROL_NET",
|
481 |
+
"links": [
|
482 |
+
232
|
483 |
+
],
|
484 |
+
"shape": 3
|
485 |
+
}
|
486 |
+
],
|
487 |
+
"properties": {
|
488 |
+
"Node name for S&R": "ControlNetLoader"
|
489 |
+
},
|
490 |
+
"widgets_values": [
|
491 |
+
"control-lora/control-lora-depth-rank256.safetensors"
|
492 |
+
]
|
493 |
+
},
|
494 |
+
{
|
495 |
+
"id": 74,
|
496 |
+
"type": "LoadImage",
|
497 |
+
"pos": [
|
498 |
+
508,
|
499 |
+
816
|
500 |
+
],
|
501 |
+
"size": {
|
502 |
+
"0": 315,
|
503 |
+
"1": 314.0000305175781
|
504 |
+
},
|
505 |
+
"flags": {},
|
506 |
+
"order": 7,
|
507 |
+
"mode": 0,
|
508 |
+
"outputs": [
|
509 |
+
{
|
510 |
+
"name": "IMAGE",
|
511 |
+
"type": "IMAGE",
|
512 |
+
"links": [
|
513 |
+
236,
|
514 |
+
238
|
515 |
+
],
|
516 |
+
"shape": 3,
|
517 |
+
"slot_index": 0
|
518 |
+
},
|
519 |
+
{
|
520 |
+
"name": "MASK",
|
521 |
+
"type": "MASK",
|
522 |
+
"links": null,
|
523 |
+
"shape": 3
|
524 |
+
}
|
525 |
+
],
|
526 |
+
"properties": {
|
527 |
+
"Node name for S&R": "LoadImage"
|
528 |
+
},
|
529 |
+
"widgets_values": [
|
530 |
+
"666561.jpg",
|
531 |
+
"image"
|
532 |
+
]
|
533 |
+
},
|
534 |
+
{
|
535 |
+
"id": 72,
|
536 |
+
"type": "ControlNetApplyAdvanced",
|
537 |
+
"pos": [
|
538 |
+
1284,
|
539 |
+
416
|
540 |
+
],
|
541 |
+
"size": {
|
542 |
+
"0": 226.8000030517578,
|
543 |
+
"1": 166
|
544 |
+
},
|
545 |
+
"flags": {},
|
546 |
+
"order": 12,
|
547 |
+
"mode": 0,
|
548 |
+
"inputs": [
|
549 |
+
{
|
550 |
+
"name": "positive",
|
551 |
+
"type": "CONDITIONING",
|
552 |
+
"link": 229
|
553 |
+
},
|
554 |
+
{
|
555 |
+
"name": "negative",
|
556 |
+
"type": "CONDITIONING",
|
557 |
+
"link": 228
|
558 |
+
},
|
559 |
+
{
|
560 |
+
"name": "control_net",
|
561 |
+
"type": "CONTROL_NET",
|
562 |
+
"link": 232,
|
563 |
+
"slot_index": 2
|
564 |
+
},
|
565 |
+
{
|
566 |
+
"name": "image",
|
567 |
+
"type": "IMAGE",
|
568 |
+
"link": 239
|
569 |
+
}
|
570 |
+
],
|
571 |
+
"outputs": [
|
572 |
+
{
|
573 |
+
"name": "positive",
|
574 |
+
"type": "CONDITIONING",
|
575 |
+
"links": [
|
576 |
+
230
|
577 |
+
],
|
578 |
+
"shape": 3,
|
579 |
+
"slot_index": 0
|
580 |
+
},
|
581 |
+
{
|
582 |
+
"name": "negative",
|
583 |
+
"type": "CONDITIONING",
|
584 |
+
"links": [
|
585 |
+
231
|
586 |
+
],
|
587 |
+
"shape": 3,
|
588 |
+
"slot_index": 1
|
589 |
+
}
|
590 |
+
],
|
591 |
+
"properties": {
|
592 |
+
"Node name for S&R": "ControlNetApplyAdvanced"
|
593 |
+
},
|
594 |
+
"widgets_values": [
|
595 |
+
0.65,
|
596 |
+
0,
|
597 |
+
0.35000000000000003
|
598 |
+
]
|
599 |
+
},
|
600 |
+
{
|
601 |
+
"id": 77,
|
602 |
+
"type": "Zoe-DepthMapPreprocessor",
|
603 |
+
"pos": [
|
604 |
+
1009,
|
605 |
+
839
|
606 |
+
],
|
607 |
+
"size": [
|
608 |
+
210,
|
609 |
+
58
|
610 |
+
],
|
611 |
+
"flags": {},
|
612 |
+
"order": 10,
|
613 |
+
"mode": 0,
|
614 |
+
"inputs": [
|
615 |
+
{
|
616 |
+
"name": "image",
|
617 |
+
"type": "IMAGE",
|
618 |
+
"link": 238
|
619 |
+
}
|
620 |
+
],
|
621 |
+
"outputs": [
|
622 |
+
{
|
623 |
+
"name": "IMAGE",
|
624 |
+
"type": "IMAGE",
|
625 |
+
"links": [
|
626 |
+
239
|
627 |
+
],
|
628 |
+
"shape": 3,
|
629 |
+
"slot_index": 0
|
630 |
+
}
|
631 |
+
],
|
632 |
+
"properties": {
|
633 |
+
"Node name for S&R": "Zoe-DepthMapPreprocessor"
|
634 |
+
},
|
635 |
+
"widgets_values": [
|
636 |
+
1024
|
637 |
+
]
|
638 |
+
},
|
639 |
+
{
|
640 |
+
"id": 3,
|
641 |
+
"type": "KSampler",
|
642 |
+
"pos": [
|
643 |
+
1570,
|
644 |
+
210
|
645 |
+
],
|
646 |
+
"size": {
|
647 |
+
"0": 315,
|
648 |
+
"1": 262
|
649 |
+
},
|
650 |
+
"flags": {},
|
651 |
+
"order": 13,
|
652 |
+
"mode": 0,
|
653 |
+
"inputs": [
|
654 |
+
{
|
655 |
+
"name": "model",
|
656 |
+
"type": "MODEL",
|
657 |
+
"link": 227
|
658 |
+
},
|
659 |
+
{
|
660 |
+
"name": "positive",
|
661 |
+
"type": "CONDITIONING",
|
662 |
+
"link": 230
|
663 |
+
},
|
664 |
+
{
|
665 |
+
"name": "negative",
|
666 |
+
"type": "CONDITIONING",
|
667 |
+
"link": 231
|
668 |
+
},
|
669 |
+
{
|
670 |
+
"name": "latent_image",
|
671 |
+
"type": "LATENT",
|
672 |
+
"link": 2
|
673 |
+
}
|
674 |
+
],
|
675 |
+
"outputs": [
|
676 |
+
{
|
677 |
+
"name": "LATENT",
|
678 |
+
"type": "LATENT",
|
679 |
+
"links": [
|
680 |
+
7
|
681 |
+
],
|
682 |
+
"slot_index": 0
|
683 |
+
}
|
684 |
+
],
|
685 |
+
"properties": {
|
686 |
+
"Node name for S&R": "KSampler"
|
687 |
+
},
|
688 |
+
"widgets_values": [
|
689 |
+
1631592172,
|
690 |
+
"fixed",
|
691 |
+
30,
|
692 |
+
4.5,
|
693 |
+
"ddpm",
|
694 |
+
"karras",
|
695 |
+
1
|
696 |
+
]
|
697 |
+
}
|
698 |
+
],
|
699 |
+
"links": [
|
700 |
+
[
|
701 |
+
2,
|
702 |
+
5,
|
703 |
+
0,
|
704 |
+
3,
|
705 |
+
3,
|
706 |
+
"LATENT"
|
707 |
+
],
|
708 |
+
[
|
709 |
+
7,
|
710 |
+
3,
|
711 |
+
0,
|
712 |
+
8,
|
713 |
+
0,
|
714 |
+
"LATENT"
|
715 |
+
],
|
716 |
+
[
|
717 |
+
8,
|
718 |
+
4,
|
719 |
+
2,
|
720 |
+
8,
|
721 |
+
1,
|
722 |
+
"VAE"
|
723 |
+
],
|
724 |
+
[
|
725 |
+
19,
|
726 |
+
8,
|
727 |
+
0,
|
728 |
+
15,
|
729 |
+
0,
|
730 |
+
"IMAGE"
|
731 |
+
],
|
732 |
+
[
|
733 |
+
122,
|
734 |
+
4,
|
735 |
+
1,
|
736 |
+
39,
|
737 |
+
0,
|
738 |
+
"CLIP"
|
739 |
+
],
|
740 |
+
[
|
741 |
+
123,
|
742 |
+
4,
|
743 |
+
1,
|
744 |
+
40,
|
745 |
+
0,
|
746 |
+
"CLIP"
|
747 |
+
],
|
748 |
+
[
|
749 |
+
197,
|
750 |
+
11,
|
751 |
+
0,
|
752 |
+
60,
|
753 |
+
0,
|
754 |
+
"INSTANTID"
|
755 |
+
],
|
756 |
+
[
|
757 |
+
198,
|
758 |
+
38,
|
759 |
+
0,
|
760 |
+
60,
|
761 |
+
1,
|
762 |
+
"FACEANALYSIS"
|
763 |
+
],
|
764 |
+
[
|
765 |
+
199,
|
766 |
+
16,
|
767 |
+
0,
|
768 |
+
60,
|
769 |
+
2,
|
770 |
+
"CONTROL_NET"
|
771 |
+
],
|
772 |
+
[
|
773 |
+
203,
|
774 |
+
39,
|
775 |
+
0,
|
776 |
+
60,
|
777 |
+
5,
|
778 |
+
"CONDITIONING"
|
779 |
+
],
|
780 |
+
[
|
781 |
+
204,
|
782 |
+
40,
|
783 |
+
0,
|
784 |
+
60,
|
785 |
+
6,
|
786 |
+
"CONDITIONING"
|
787 |
+
],
|
788 |
+
[
|
789 |
+
206,
|
790 |
+
4,
|
791 |
+
0,
|
792 |
+
60,
|
793 |
+
4,
|
794 |
+
"MODEL"
|
795 |
+
],
|
796 |
+
[
|
797 |
+
214,
|
798 |
+
13,
|
799 |
+
0,
|
800 |
+
60,
|
801 |
+
3,
|
802 |
+
"IMAGE"
|
803 |
+
],
|
804 |
+
[
|
805 |
+
227,
|
806 |
+
60,
|
807 |
+
0,
|
808 |
+
3,
|
809 |
+
0,
|
810 |
+
"MODEL"
|
811 |
+
],
|
812 |
+
[
|
813 |
+
228,
|
814 |
+
60,
|
815 |
+
2,
|
816 |
+
72,
|
817 |
+
1,
|
818 |
+
"CONDITIONING"
|
819 |
+
],
|
820 |
+
[
|
821 |
+
229,
|
822 |
+
60,
|
823 |
+
1,
|
824 |
+
72,
|
825 |
+
0,
|
826 |
+
"CONDITIONING"
|
827 |
+
],
|
828 |
+
[
|
829 |
+
230,
|
830 |
+
72,
|
831 |
+
0,
|
832 |
+
3,
|
833 |
+
1,
|
834 |
+
"CONDITIONING"
|
835 |
+
],
|
836 |
+
[
|
837 |
+
231,
|
838 |
+
72,
|
839 |
+
1,
|
840 |
+
3,
|
841 |
+
2,
|
842 |
+
"CONDITIONING"
|
843 |
+
],
|
844 |
+
[
|
845 |
+
232,
|
846 |
+
73,
|
847 |
+
0,
|
848 |
+
72,
|
849 |
+
2,
|
850 |
+
"CONTROL_NET"
|
851 |
+
],
|
852 |
+
[
|
853 |
+
236,
|
854 |
+
74,
|
855 |
+
0,
|
856 |
+
60,
|
857 |
+
7,
|
858 |
+
"IMAGE"
|
859 |
+
],
|
860 |
+
[
|
861 |
+
238,
|
862 |
+
74,
|
863 |
+
0,
|
864 |
+
77,
|
865 |
+
0,
|
866 |
+
"IMAGE"
|
867 |
+
],
|
868 |
+
[
|
869 |
+
239,
|
870 |
+
77,
|
871 |
+
0,
|
872 |
+
72,
|
873 |
+
3,
|
874 |
+
"IMAGE"
|
875 |
+
]
|
876 |
+
],
|
877 |
+
"groups": [],
|
878 |
+
"config": {},
|
879 |
+
"extra": {},
|
880 |
+
"version": 0.4
|
881 |
+
}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_multi_id.json
ADDED
@@ -0,0 +1,1364 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"last_node_id": 92,
|
3 |
+
"last_link_id": 290,
|
4 |
+
"nodes": [
|
5 |
+
{
|
6 |
+
"id": 15,
|
7 |
+
"type": "PreviewImage",
|
8 |
+
"pos": [
|
9 |
+
2160,
|
10 |
+
-150
|
11 |
+
],
|
12 |
+
"size": {
|
13 |
+
"0": 584.0855712890625,
|
14 |
+
"1": 610.4592895507812
|
15 |
+
},
|
16 |
+
"flags": {},
|
17 |
+
"order": 23,
|
18 |
+
"mode": 0,
|
19 |
+
"inputs": [
|
20 |
+
{
|
21 |
+
"name": "images",
|
22 |
+
"type": "IMAGE",
|
23 |
+
"link": 19
|
24 |
+
}
|
25 |
+
],
|
26 |
+
"properties": {
|
27 |
+
"Node name for S&R": "PreviewImage"
|
28 |
+
}
|
29 |
+
},
|
30 |
+
{
|
31 |
+
"id": 8,
|
32 |
+
"type": "VAEDecode",
|
33 |
+
"pos": [
|
34 |
+
2170,
|
35 |
+
-270
|
36 |
+
],
|
37 |
+
"size": {
|
38 |
+
"0": 210,
|
39 |
+
"1": 46
|
40 |
+
},
|
41 |
+
"flags": {},
|
42 |
+
"order": 22,
|
43 |
+
"mode": 0,
|
44 |
+
"inputs": [
|
45 |
+
{
|
46 |
+
"name": "samples",
|
47 |
+
"type": "LATENT",
|
48 |
+
"link": 7
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"name": "vae",
|
52 |
+
"type": "VAE",
|
53 |
+
"link": 254
|
54 |
+
}
|
55 |
+
],
|
56 |
+
"outputs": [
|
57 |
+
{
|
58 |
+
"name": "IMAGE",
|
59 |
+
"type": "IMAGE",
|
60 |
+
"links": [
|
61 |
+
19
|
62 |
+
],
|
63 |
+
"slot_index": 0
|
64 |
+
}
|
65 |
+
],
|
66 |
+
"properties": {
|
67 |
+
"Node name for S&R": "VAEDecode"
|
68 |
+
}
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"id": 81,
|
72 |
+
"type": "Reroute",
|
73 |
+
"pos": [
|
74 |
+
1980,
|
75 |
+
120
|
76 |
+
],
|
77 |
+
"size": [
|
78 |
+
75,
|
79 |
+
26
|
80 |
+
],
|
81 |
+
"flags": {},
|
82 |
+
"order": 13,
|
83 |
+
"mode": 0,
|
84 |
+
"inputs": [
|
85 |
+
{
|
86 |
+
"name": "",
|
87 |
+
"type": "*",
|
88 |
+
"link": 253
|
89 |
+
}
|
90 |
+
],
|
91 |
+
"outputs": [
|
92 |
+
{
|
93 |
+
"name": "VAE",
|
94 |
+
"type": "VAE",
|
95 |
+
"links": [
|
96 |
+
254
|
97 |
+
],
|
98 |
+
"slot_index": 0
|
99 |
+
}
|
100 |
+
],
|
101 |
+
"properties": {
|
102 |
+
"showOutputText": true,
|
103 |
+
"horizontal": false
|
104 |
+
}
|
105 |
+
},
|
106 |
+
{
|
107 |
+
"id": 38,
|
108 |
+
"type": "InstantIDFaceAnalysis",
|
109 |
+
"pos": [
|
110 |
+
-210,
|
111 |
+
-40
|
112 |
+
],
|
113 |
+
"size": [
|
114 |
+
210,
|
115 |
+
60
|
116 |
+
],
|
117 |
+
"flags": {},
|
118 |
+
"order": 0,
|
119 |
+
"mode": 0,
|
120 |
+
"outputs": [
|
121 |
+
{
|
122 |
+
"name": "FACEANALYSIS",
|
123 |
+
"type": "FACEANALYSIS",
|
124 |
+
"links": [
|
125 |
+
198,
|
126 |
+
239
|
127 |
+
],
|
128 |
+
"shape": 3,
|
129 |
+
"slot_index": 0
|
130 |
+
}
|
131 |
+
],
|
132 |
+
"properties": {
|
133 |
+
"Node name for S&R": "InstantIDFaceAnalysis"
|
134 |
+
},
|
135 |
+
"widgets_values": [
|
136 |
+
"CPU"
|
137 |
+
]
|
138 |
+
},
|
139 |
+
{
|
140 |
+
"id": 16,
|
141 |
+
"type": "ControlNetLoader",
|
142 |
+
"pos": [
|
143 |
+
-210,
|
144 |
+
70
|
145 |
+
],
|
146 |
+
"size": [
|
147 |
+
210,
|
148 |
+
60
|
149 |
+
],
|
150 |
+
"flags": {},
|
151 |
+
"order": 1,
|
152 |
+
"mode": 0,
|
153 |
+
"outputs": [
|
154 |
+
{
|
155 |
+
"name": "CONTROL_NET",
|
156 |
+
"type": "CONTROL_NET",
|
157 |
+
"links": [
|
158 |
+
199,
|
159 |
+
240
|
160 |
+
],
|
161 |
+
"shape": 3,
|
162 |
+
"slot_index": 0
|
163 |
+
}
|
164 |
+
],
|
165 |
+
"properties": {
|
166 |
+
"Node name for S&R": "ControlNetLoader"
|
167 |
+
},
|
168 |
+
"widgets_values": [
|
169 |
+
"instantid/diffusion_pytorch_model.safetensors"
|
170 |
+
]
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"id": 79,
|
174 |
+
"type": "ConditioningCombine",
|
175 |
+
"pos": [
|
176 |
+
1410,
|
177 |
+
-190
|
178 |
+
],
|
179 |
+
"size": [
|
180 |
+
228.39999389648438,
|
181 |
+
46
|
182 |
+
],
|
183 |
+
"flags": {},
|
184 |
+
"order": 19,
|
185 |
+
"mode": 0,
|
186 |
+
"inputs": [
|
187 |
+
{
|
188 |
+
"name": "conditioning_1",
|
189 |
+
"type": "CONDITIONING",
|
190 |
+
"link": 247
|
191 |
+
},
|
192 |
+
{
|
193 |
+
"name": "conditioning_2",
|
194 |
+
"type": "CONDITIONING",
|
195 |
+
"link": 248
|
196 |
+
}
|
197 |
+
],
|
198 |
+
"outputs": [
|
199 |
+
{
|
200 |
+
"name": "CONDITIONING",
|
201 |
+
"type": "CONDITIONING",
|
202 |
+
"links": [
|
203 |
+
249
|
204 |
+
],
|
205 |
+
"shape": 3,
|
206 |
+
"slot_index": 0
|
207 |
+
}
|
208 |
+
],
|
209 |
+
"properties": {
|
210 |
+
"Node name for S&R": "ConditioningCombine"
|
211 |
+
}
|
212 |
+
},
|
213 |
+
{
|
214 |
+
"id": 84,
|
215 |
+
"type": "ImageFlip+",
|
216 |
+
"pos": [
|
217 |
+
990,
|
218 |
+
-210
|
219 |
+
],
|
220 |
+
"size": {
|
221 |
+
"0": 315,
|
222 |
+
"1": 58
|
223 |
+
},
|
224 |
+
"flags": {},
|
225 |
+
"order": 15,
|
226 |
+
"mode": 0,
|
227 |
+
"inputs": [
|
228 |
+
{
|
229 |
+
"name": "image",
|
230 |
+
"type": "IMAGE",
|
231 |
+
"link": 258
|
232 |
+
}
|
233 |
+
],
|
234 |
+
"outputs": [
|
235 |
+
{
|
236 |
+
"name": "IMAGE",
|
237 |
+
"type": "IMAGE",
|
238 |
+
"links": [
|
239 |
+
259
|
240 |
+
],
|
241 |
+
"shape": 3,
|
242 |
+
"slot_index": 0
|
243 |
+
}
|
244 |
+
],
|
245 |
+
"properties": {
|
246 |
+
"Node name for S&R": "ImageFlip+"
|
247 |
+
},
|
248 |
+
"widgets_values": [
|
249 |
+
"x"
|
250 |
+
]
|
251 |
+
},
|
252 |
+
{
|
253 |
+
"id": 13,
|
254 |
+
"type": "LoadImage",
|
255 |
+
"pos": [
|
256 |
+
715,
|
257 |
+
35
|
258 |
+
],
|
259 |
+
"size": [
|
260 |
+
213.36950471073226,
|
261 |
+
296.38119750842566
|
262 |
+
],
|
263 |
+
"flags": {},
|
264 |
+
"order": 2,
|
265 |
+
"mode": 0,
|
266 |
+
"outputs": [
|
267 |
+
{
|
268 |
+
"name": "IMAGE",
|
269 |
+
"type": "IMAGE",
|
270 |
+
"links": [
|
271 |
+
214
|
272 |
+
],
|
273 |
+
"shape": 3,
|
274 |
+
"slot_index": 0
|
275 |
+
},
|
276 |
+
{
|
277 |
+
"name": "MASK",
|
278 |
+
"type": "MASK",
|
279 |
+
"links": null,
|
280 |
+
"shape": 3
|
281 |
+
}
|
282 |
+
],
|
283 |
+
"properties": {
|
284 |
+
"Node name for S&R": "LoadImage"
|
285 |
+
},
|
286 |
+
"widgets_values": [
|
287 |
+
"face4.jpg",
|
288 |
+
"image"
|
289 |
+
]
|
290 |
+
},
|
291 |
+
{
|
292 |
+
"id": 88,
|
293 |
+
"type": "MaskFlip+",
|
294 |
+
"pos": [
|
295 |
+
990,
|
296 |
+
-110
|
297 |
+
],
|
298 |
+
"size": {
|
299 |
+
"0": 315,
|
300 |
+
"1": 58
|
301 |
+
},
|
302 |
+
"flags": {},
|
303 |
+
"order": 17,
|
304 |
+
"mode": 0,
|
305 |
+
"inputs": [
|
306 |
+
{
|
307 |
+
"name": "mask",
|
308 |
+
"type": "MASK",
|
309 |
+
"link": 263
|
310 |
+
}
|
311 |
+
],
|
312 |
+
"outputs": [
|
313 |
+
{
|
314 |
+
"name": "MASK",
|
315 |
+
"type": "MASK",
|
316 |
+
"links": [
|
317 |
+
264
|
318 |
+
],
|
319 |
+
"shape": 3,
|
320 |
+
"slot_index": 0
|
321 |
+
}
|
322 |
+
],
|
323 |
+
"properties": {
|
324 |
+
"Node name for S&R": "MaskFlip+"
|
325 |
+
},
|
326 |
+
"widgets_values": [
|
327 |
+
"x"
|
328 |
+
]
|
329 |
+
},
|
330 |
+
{
|
331 |
+
"id": 78,
|
332 |
+
"type": "LoadImage",
|
333 |
+
"pos": [
|
334 |
+
714,
|
335 |
+
-512
|
336 |
+
],
|
337 |
+
"size": [
|
338 |
+
210,
|
339 |
+
314
|
340 |
+
],
|
341 |
+
"flags": {},
|
342 |
+
"order": 3,
|
343 |
+
"mode": 0,
|
344 |
+
"outputs": [
|
345 |
+
{
|
346 |
+
"name": "IMAGE",
|
347 |
+
"type": "IMAGE",
|
348 |
+
"links": [
|
349 |
+
246
|
350 |
+
],
|
351 |
+
"shape": 3,
|
352 |
+
"slot_index": 0
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"name": "MASK",
|
356 |
+
"type": "MASK",
|
357 |
+
"links": null,
|
358 |
+
"shape": 3
|
359 |
+
}
|
360 |
+
],
|
361 |
+
"properties": {
|
362 |
+
"Node name for S&R": "LoadImage"
|
363 |
+
},
|
364 |
+
"widgets_values": [
|
365 |
+
"joseph-gonzalez-iFgRcqHznqg-unsplash.jpg",
|
366 |
+
"image"
|
367 |
+
]
|
368 |
+
},
|
369 |
+
{
|
370 |
+
"id": 85,
|
371 |
+
"type": "SolidMask",
|
372 |
+
"pos": [
|
373 |
+
970,
|
374 |
+
510
|
375 |
+
],
|
376 |
+
"size": [
|
377 |
+
210,
|
378 |
+
106
|
379 |
+
],
|
380 |
+
"flags": {},
|
381 |
+
"order": 4,
|
382 |
+
"mode": 0,
|
383 |
+
"outputs": [
|
384 |
+
{
|
385 |
+
"name": "MASK",
|
386 |
+
"type": "MASK",
|
387 |
+
"links": [
|
388 |
+
260
|
389 |
+
],
|
390 |
+
"shape": 3,
|
391 |
+
"slot_index": 0
|
392 |
+
}
|
393 |
+
],
|
394 |
+
"properties": {
|
395 |
+
"Node name for S&R": "SolidMask"
|
396 |
+
},
|
397 |
+
"widgets_values": [
|
398 |
+
0,
|
399 |
+
1280,
|
400 |
+
960
|
401 |
+
]
|
402 |
+
},
|
403 |
+
{
|
404 |
+
"id": 11,
|
405 |
+
"type": "InstantIDModelLoader",
|
406 |
+
"pos": [
|
407 |
+
-210,
|
408 |
+
-150
|
409 |
+
],
|
410 |
+
"size": [
|
411 |
+
210,
|
412 |
+
60
|
413 |
+
],
|
414 |
+
"flags": {},
|
415 |
+
"order": 5,
|
416 |
+
"mode": 0,
|
417 |
+
"outputs": [
|
418 |
+
{
|
419 |
+
"name": "INSTANTID",
|
420 |
+
"type": "INSTANTID",
|
421 |
+
"links": [
|
422 |
+
197,
|
423 |
+
238
|
424 |
+
],
|
425 |
+
"shape": 3,
|
426 |
+
"slot_index": 0
|
427 |
+
}
|
428 |
+
],
|
429 |
+
"properties": {
|
430 |
+
"Node name for S&R": "InstantIDModelLoader"
|
431 |
+
},
|
432 |
+
"widgets_values": [
|
433 |
+
"ip-adapter.bin"
|
434 |
+
]
|
435 |
+
},
|
436 |
+
{
|
437 |
+
"id": 4,
|
438 |
+
"type": "CheckpointLoaderSimple",
|
439 |
+
"pos": [
|
440 |
+
-312,
|
441 |
+
198
|
442 |
+
],
|
443 |
+
"size": {
|
444 |
+
"0": 315,
|
445 |
+
"1": 98
|
446 |
+
},
|
447 |
+
"flags": {},
|
448 |
+
"order": 6,
|
449 |
+
"mode": 0,
|
450 |
+
"outputs": [
|
451 |
+
{
|
452 |
+
"name": "MODEL",
|
453 |
+
"type": "MODEL",
|
454 |
+
"links": [
|
455 |
+
206
|
456 |
+
],
|
457 |
+
"slot_index": 0
|
458 |
+
},
|
459 |
+
{
|
460 |
+
"name": "CLIP",
|
461 |
+
"type": "CLIP",
|
462 |
+
"links": [
|
463 |
+
122,
|
464 |
+
123,
|
465 |
+
266
|
466 |
+
],
|
467 |
+
"slot_index": 1
|
468 |
+
},
|
469 |
+
{
|
470 |
+
"name": "VAE",
|
471 |
+
"type": "VAE",
|
472 |
+
"links": [
|
473 |
+
253
|
474 |
+
],
|
475 |
+
"slot_index": 2
|
476 |
+
}
|
477 |
+
],
|
478 |
+
"properties": {
|
479 |
+
"Node name for S&R": "CheckpointLoaderSimple"
|
480 |
+
},
|
481 |
+
"widgets_values": [
|
482 |
+
"sdxl/AlbedoBaseXL.safetensors"
|
483 |
+
]
|
484 |
+
},
|
485 |
+
{
|
486 |
+
"id": 87,
|
487 |
+
"type": "MaskComposite",
|
488 |
+
"pos": [
|
489 |
+
1232,
|
490 |
+
583
|
491 |
+
],
|
492 |
+
"size": [
|
493 |
+
210,
|
494 |
+
126
|
495 |
+
],
|
496 |
+
"flags": {},
|
497 |
+
"order": 14,
|
498 |
+
"mode": 0,
|
499 |
+
"inputs": [
|
500 |
+
{
|
501 |
+
"name": "destination",
|
502 |
+
"type": "MASK",
|
503 |
+
"link": 260
|
504 |
+
},
|
505 |
+
{
|
506 |
+
"name": "source",
|
507 |
+
"type": "MASK",
|
508 |
+
"link": 261
|
509 |
+
}
|
510 |
+
],
|
511 |
+
"outputs": [
|
512 |
+
{
|
513 |
+
"name": "MASK",
|
514 |
+
"type": "MASK",
|
515 |
+
"links": [
|
516 |
+
262,
|
517 |
+
263
|
518 |
+
],
|
519 |
+
"shape": 3,
|
520 |
+
"slot_index": 0
|
521 |
+
}
|
522 |
+
],
|
523 |
+
"properties": {
|
524 |
+
"Node name for S&R": "MaskComposite"
|
525 |
+
},
|
526 |
+
"widgets_values": [
|
527 |
+
0,
|
528 |
+
0,
|
529 |
+
"add"
|
530 |
+
]
|
531 |
+
},
|
532 |
+
{
|
533 |
+
"id": 86,
|
534 |
+
"type": "SolidMask",
|
535 |
+
"pos": [
|
536 |
+
970,
|
537 |
+
660
|
538 |
+
],
|
539 |
+
"size": {
|
540 |
+
"0": 210,
|
541 |
+
"1": 106
|
542 |
+
},
|
543 |
+
"flags": {},
|
544 |
+
"order": 7,
|
545 |
+
"mode": 0,
|
546 |
+
"outputs": [
|
547 |
+
{
|
548 |
+
"name": "MASK",
|
549 |
+
"type": "MASK",
|
550 |
+
"links": [
|
551 |
+
261
|
552 |
+
],
|
553 |
+
"shape": 3,
|
554 |
+
"slot_index": 0
|
555 |
+
}
|
556 |
+
],
|
557 |
+
"properties": {
|
558 |
+
"Node name for S&R": "SolidMask"
|
559 |
+
},
|
560 |
+
"widgets_values": [
|
561 |
+
1,
|
562 |
+
640,
|
563 |
+
960
|
564 |
+
]
|
565 |
+
},
|
566 |
+
{
|
567 |
+
"id": 82,
|
568 |
+
"type": "LoadImage",
|
569 |
+
"pos": [
|
570 |
+
591,
|
571 |
+
511
|
572 |
+
],
|
573 |
+
"size": [
|
574 |
+
315,
|
575 |
+
314.0000190734863
|
576 |
+
],
|
577 |
+
"flags": {},
|
578 |
+
"order": 8,
|
579 |
+
"mode": 0,
|
580 |
+
"outputs": [
|
581 |
+
{
|
582 |
+
"name": "IMAGE",
|
583 |
+
"type": "IMAGE",
|
584 |
+
"links": [
|
585 |
+
257,
|
586 |
+
258
|
587 |
+
],
|
588 |
+
"shape": 3,
|
589 |
+
"slot_index": 0
|
590 |
+
},
|
591 |
+
{
|
592 |
+
"name": "MASK",
|
593 |
+
"type": "MASK",
|
594 |
+
"links": null,
|
595 |
+
"shape": 3
|
596 |
+
}
|
597 |
+
],
|
598 |
+
"properties": {
|
599 |
+
"Node name for S&R": "LoadImage"
|
600 |
+
},
|
601 |
+
"widgets_values": [
|
602 |
+
"pose (1).jpg",
|
603 |
+
"image"
|
604 |
+
]
|
605 |
+
},
|
606 |
+
{
|
607 |
+
"id": 40,
|
608 |
+
"type": "CLIPTextEncode",
|
609 |
+
"pos": [
|
610 |
+
146,
|
611 |
+
487
|
612 |
+
],
|
613 |
+
"size": {
|
614 |
+
"0": 286.3603515625,
|
615 |
+
"1": 112.35245513916016
|
616 |
+
},
|
617 |
+
"flags": {},
|
618 |
+
"order": 11,
|
619 |
+
"mode": 0,
|
620 |
+
"inputs": [
|
621 |
+
{
|
622 |
+
"name": "clip",
|
623 |
+
"type": "CLIP",
|
624 |
+
"link": 123
|
625 |
+
}
|
626 |
+
],
|
627 |
+
"outputs": [
|
628 |
+
{
|
629 |
+
"name": "CONDITIONING",
|
630 |
+
"type": "CONDITIONING",
|
631 |
+
"links": [
|
632 |
+
204,
|
633 |
+
278
|
634 |
+
],
|
635 |
+
"shape": 3,
|
636 |
+
"slot_index": 0
|
637 |
+
}
|
638 |
+
],
|
639 |
+
"properties": {
|
640 |
+
"Node name for S&R": "CLIPTextEncode"
|
641 |
+
},
|
642 |
+
"widgets_values": [
|
643 |
+
"photograph, deformed, glitch, noisy, realistic, stock photo, naked"
|
644 |
+
],
|
645 |
+
"color": "#322",
|
646 |
+
"bgcolor": "#533"
|
647 |
+
},
|
648 |
+
{
|
649 |
+
"id": 5,
|
650 |
+
"type": "EmptyLatentImage",
|
651 |
+
"pos": [
|
652 |
+
1431,
|
653 |
+
20
|
654 |
+
],
|
655 |
+
"size": [
|
656 |
+
210,
|
657 |
+
106
|
658 |
+
],
|
659 |
+
"flags": {},
|
660 |
+
"order": 9,
|
661 |
+
"mode": 0,
|
662 |
+
"outputs": [
|
663 |
+
{
|
664 |
+
"name": "LATENT",
|
665 |
+
"type": "LATENT",
|
666 |
+
"links": [
|
667 |
+
2
|
668 |
+
],
|
669 |
+
"slot_index": 0
|
670 |
+
}
|
671 |
+
],
|
672 |
+
"properties": {
|
673 |
+
"Node name for S&R": "EmptyLatentImage"
|
674 |
+
},
|
675 |
+
"widgets_values": [
|
676 |
+
1280,
|
677 |
+
960,
|
678 |
+
1
|
679 |
+
]
|
680 |
+
},
|
681 |
+
{
|
682 |
+
"id": 3,
|
683 |
+
"type": "KSampler",
|
684 |
+
"pos": [
|
685 |
+
1730,
|
686 |
+
-180
|
687 |
+
],
|
688 |
+
"size": {
|
689 |
+
"0": 315,
|
690 |
+
"1": 262
|
691 |
+
},
|
692 |
+
"flags": {},
|
693 |
+
"order": 21,
|
694 |
+
"mode": 0,
|
695 |
+
"inputs": [
|
696 |
+
{
|
697 |
+
"name": "model",
|
698 |
+
"type": "MODEL",
|
699 |
+
"link": 256
|
700 |
+
},
|
701 |
+
{
|
702 |
+
"name": "positive",
|
703 |
+
"type": "CONDITIONING",
|
704 |
+
"link": 249
|
705 |
+
},
|
706 |
+
{
|
707 |
+
"name": "negative",
|
708 |
+
"type": "CONDITIONING",
|
709 |
+
"link": 288
|
710 |
+
},
|
711 |
+
{
|
712 |
+
"name": "latent_image",
|
713 |
+
"type": "LATENT",
|
714 |
+
"link": 2
|
715 |
+
}
|
716 |
+
],
|
717 |
+
"outputs": [
|
718 |
+
{
|
719 |
+
"name": "LATENT",
|
720 |
+
"type": "LATENT",
|
721 |
+
"links": [
|
722 |
+
7
|
723 |
+
],
|
724 |
+
"slot_index": 0
|
725 |
+
}
|
726 |
+
],
|
727 |
+
"properties": {
|
728 |
+
"Node name for S&R": "KSampler"
|
729 |
+
},
|
730 |
+
"widgets_values": [
|
731 |
+
1631594039,
|
732 |
+
"fixed",
|
733 |
+
30,
|
734 |
+
4.5,
|
735 |
+
"ddpm",
|
736 |
+
"normal",
|
737 |
+
1
|
738 |
+
]
|
739 |
+
},
|
740 |
+
{
|
741 |
+
"id": 80,
|
742 |
+
"type": "ConditioningCombine",
|
743 |
+
"pos": [
|
744 |
+
1410,
|
745 |
+
-90
|
746 |
+
],
|
747 |
+
"size": {
|
748 |
+
"0": 228.39999389648438,
|
749 |
+
"1": 46
|
750 |
+
},
|
751 |
+
"flags": {},
|
752 |
+
"order": 20,
|
753 |
+
"mode": 0,
|
754 |
+
"inputs": [
|
755 |
+
{
|
756 |
+
"name": "conditioning_1",
|
757 |
+
"type": "CONDITIONING",
|
758 |
+
"link": 290
|
759 |
+
},
|
760 |
+
{
|
761 |
+
"name": "conditioning_2",
|
762 |
+
"type": "CONDITIONING",
|
763 |
+
"link": 287
|
764 |
+
}
|
765 |
+
],
|
766 |
+
"outputs": [
|
767 |
+
{
|
768 |
+
"name": "CONDITIONING",
|
769 |
+
"type": "CONDITIONING",
|
770 |
+
"links": [
|
771 |
+
288
|
772 |
+
],
|
773 |
+
"shape": 3,
|
774 |
+
"slot_index": 0
|
775 |
+
}
|
776 |
+
],
|
777 |
+
"properties": {
|
778 |
+
"Node name for S&R": "ConditioningCombine"
|
779 |
+
}
|
780 |
+
},
|
781 |
+
{
|
782 |
+
"id": 77,
|
783 |
+
"type": "ApplyInstantID",
|
784 |
+
"pos": [
|
785 |
+
990,
|
786 |
+
-528
|
787 |
+
],
|
788 |
+
"size": {
|
789 |
+
"0": 315,
|
790 |
+
"1": 266
|
791 |
+
},
|
792 |
+
"flags": {},
|
793 |
+
"order": 18,
|
794 |
+
"mode": 0,
|
795 |
+
"inputs": [
|
796 |
+
{
|
797 |
+
"name": "instantid",
|
798 |
+
"type": "INSTANTID",
|
799 |
+
"link": 238
|
800 |
+
},
|
801 |
+
{
|
802 |
+
"name": "insightface",
|
803 |
+
"type": "FACEANALYSIS",
|
804 |
+
"link": 239
|
805 |
+
},
|
806 |
+
{
|
807 |
+
"name": "control_net",
|
808 |
+
"type": "CONTROL_NET",
|
809 |
+
"link": 240
|
810 |
+
},
|
811 |
+
{
|
812 |
+
"name": "image",
|
813 |
+
"type": "IMAGE",
|
814 |
+
"link": 246
|
815 |
+
},
|
816 |
+
{
|
817 |
+
"name": "model",
|
818 |
+
"type": "MODEL",
|
819 |
+
"link": 255
|
820 |
+
},
|
821 |
+
{
|
822 |
+
"name": "positive",
|
823 |
+
"type": "CONDITIONING",
|
824 |
+
"link": 272
|
825 |
+
},
|
826 |
+
{
|
827 |
+
"name": "negative",
|
828 |
+
"type": "CONDITIONING",
|
829 |
+
"link": 278
|
830 |
+
},
|
831 |
+
{
|
832 |
+
"name": "image_kps",
|
833 |
+
"type": "IMAGE",
|
834 |
+
"link": 259
|
835 |
+
},
|
836 |
+
{
|
837 |
+
"name": "mask",
|
838 |
+
"type": "MASK",
|
839 |
+
"link": 264
|
840 |
+
}
|
841 |
+
],
|
842 |
+
"outputs": [
|
843 |
+
{
|
844 |
+
"name": "MODEL",
|
845 |
+
"type": "MODEL",
|
846 |
+
"links": [
|
847 |
+
256
|
848 |
+
],
|
849 |
+
"shape": 3,
|
850 |
+
"slot_index": 0
|
851 |
+
},
|
852 |
+
{
|
853 |
+
"name": "POSITIVE",
|
854 |
+
"type": "CONDITIONING",
|
855 |
+
"links": [
|
856 |
+
247
|
857 |
+
],
|
858 |
+
"shape": 3,
|
859 |
+
"slot_index": 1
|
860 |
+
},
|
861 |
+
{
|
862 |
+
"name": "NEGATIVE",
|
863 |
+
"type": "CONDITIONING",
|
864 |
+
"links": [
|
865 |
+
290
|
866 |
+
],
|
867 |
+
"shape": 3,
|
868 |
+
"slot_index": 2
|
869 |
+
}
|
870 |
+
],
|
871 |
+
"properties": {
|
872 |
+
"Node name for S&R": "ApplyInstantID"
|
873 |
+
},
|
874 |
+
"widgets_values": [
|
875 |
+
0.8,
|
876 |
+
0,
|
877 |
+
1
|
878 |
+
]
|
879 |
+
},
|
880 |
+
{
|
881 |
+
"id": 60,
|
882 |
+
"type": "ApplyInstantID",
|
883 |
+
"pos": [
|
884 |
+
991,
|
885 |
+
73
|
886 |
+
],
|
887 |
+
"size": {
|
888 |
+
"0": 315,
|
889 |
+
"1": 266
|
890 |
+
},
|
891 |
+
"flags": {},
|
892 |
+
"order": 16,
|
893 |
+
"mode": 0,
|
894 |
+
"inputs": [
|
895 |
+
{
|
896 |
+
"name": "instantid",
|
897 |
+
"type": "INSTANTID",
|
898 |
+
"link": 197
|
899 |
+
},
|
900 |
+
{
|
901 |
+
"name": "insightface",
|
902 |
+
"type": "FACEANALYSIS",
|
903 |
+
"link": 198
|
904 |
+
},
|
905 |
+
{
|
906 |
+
"name": "control_net",
|
907 |
+
"type": "CONTROL_NET",
|
908 |
+
"link": 199
|
909 |
+
},
|
910 |
+
{
|
911 |
+
"name": "image",
|
912 |
+
"type": "IMAGE",
|
913 |
+
"link": 214
|
914 |
+
},
|
915 |
+
{
|
916 |
+
"name": "model",
|
917 |
+
"type": "MODEL",
|
918 |
+
"link": 206
|
919 |
+
},
|
920 |
+
{
|
921 |
+
"name": "positive",
|
922 |
+
"type": "CONDITIONING",
|
923 |
+
"link": 203
|
924 |
+
},
|
925 |
+
{
|
926 |
+
"name": "negative",
|
927 |
+
"type": "CONDITIONING",
|
928 |
+
"link": 204
|
929 |
+
},
|
930 |
+
{
|
931 |
+
"name": "image_kps",
|
932 |
+
"type": "IMAGE",
|
933 |
+
"link": 257
|
934 |
+
},
|
935 |
+
{
|
936 |
+
"name": "mask",
|
937 |
+
"type": "MASK",
|
938 |
+
"link": 262
|
939 |
+
}
|
940 |
+
],
|
941 |
+
"outputs": [
|
942 |
+
{
|
943 |
+
"name": "MODEL",
|
944 |
+
"type": "MODEL",
|
945 |
+
"links": [
|
946 |
+
255
|
947 |
+
],
|
948 |
+
"shape": 3,
|
949 |
+
"slot_index": 0
|
950 |
+
},
|
951 |
+
{
|
952 |
+
"name": "POSITIVE",
|
953 |
+
"type": "CONDITIONING",
|
954 |
+
"links": [
|
955 |
+
248
|
956 |
+
],
|
957 |
+
"shape": 3,
|
958 |
+
"slot_index": 1
|
959 |
+
},
|
960 |
+
{
|
961 |
+
"name": "NEGATIVE",
|
962 |
+
"type": "CONDITIONING",
|
963 |
+
"links": [
|
964 |
+
287
|
965 |
+
],
|
966 |
+
"shape": 3,
|
967 |
+
"slot_index": 2
|
968 |
+
}
|
969 |
+
],
|
970 |
+
"properties": {
|
971 |
+
"Node name for S&R": "ApplyInstantID"
|
972 |
+
},
|
973 |
+
"widgets_values": [
|
974 |
+
0.9,
|
975 |
+
0,
|
976 |
+
1
|
977 |
+
]
|
978 |
+
},
|
979 |
+
{
|
980 |
+
"id": 89,
|
981 |
+
"type": "CLIPTextEncode",
|
982 |
+
"pos": [
|
983 |
+
314,
|
984 |
+
-421
|
985 |
+
],
|
986 |
+
"size": {
|
987 |
+
"0": 291.9967346191406,
|
988 |
+
"1": 128.62518310546875
|
989 |
+
},
|
990 |
+
"flags": {},
|
991 |
+
"order": 12,
|
992 |
+
"mode": 0,
|
993 |
+
"inputs": [
|
994 |
+
{
|
995 |
+
"name": "clip",
|
996 |
+
"type": "CLIP",
|
997 |
+
"link": 266
|
998 |
+
}
|
999 |
+
],
|
1000 |
+
"outputs": [
|
1001 |
+
{
|
1002 |
+
"name": "CONDITIONING",
|
1003 |
+
"type": "CONDITIONING",
|
1004 |
+
"links": [
|
1005 |
+
272
|
1006 |
+
],
|
1007 |
+
"shape": 3,
|
1008 |
+
"slot_index": 0
|
1009 |
+
}
|
1010 |
+
],
|
1011 |
+
"properties": {
|
1012 |
+
"Node name for S&R": "CLIPTextEncode"
|
1013 |
+
},
|
1014 |
+
"widgets_values": [
|
1015 |
+
"comic male character. graphic illustration, comic art, graphic novel art, vibrant, highly detailed. New York background"
|
1016 |
+
],
|
1017 |
+
"color": "#232",
|
1018 |
+
"bgcolor": "#353"
|
1019 |
+
},
|
1020 |
+
{
|
1021 |
+
"id": 39,
|
1022 |
+
"type": "CLIPTextEncode",
|
1023 |
+
"pos": [
|
1024 |
+
309,
|
1025 |
+
171
|
1026 |
+
],
|
1027 |
+
"size": {
|
1028 |
+
"0": 291.9967346191406,
|
1029 |
+
"1": 128.62518310546875
|
1030 |
+
},
|
1031 |
+
"flags": {},
|
1032 |
+
"order": 10,
|
1033 |
+
"mode": 0,
|
1034 |
+
"inputs": [
|
1035 |
+
{
|
1036 |
+
"name": "clip",
|
1037 |
+
"type": "CLIP",
|
1038 |
+
"link": 122
|
1039 |
+
}
|
1040 |
+
],
|
1041 |
+
"outputs": [
|
1042 |
+
{
|
1043 |
+
"name": "CONDITIONING",
|
1044 |
+
"type": "CONDITIONING",
|
1045 |
+
"links": [
|
1046 |
+
203
|
1047 |
+
],
|
1048 |
+
"shape": 3,
|
1049 |
+
"slot_index": 0
|
1050 |
+
}
|
1051 |
+
],
|
1052 |
+
"properties": {
|
1053 |
+
"Node name for S&R": "CLIPTextEncode"
|
1054 |
+
},
|
1055 |
+
"widgets_values": [
|
1056 |
+
"comic female character. graphic illustration, comic art, graphic novel art, vibrant, highly detailed. New York background"
|
1057 |
+
],
|
1058 |
+
"color": "#232",
|
1059 |
+
"bgcolor": "#353"
|
1060 |
+
}
|
1061 |
+
],
|
1062 |
+
"links": [
|
1063 |
+
[
|
1064 |
+
2,
|
1065 |
+
5,
|
1066 |
+
0,
|
1067 |
+
3,
|
1068 |
+
3,
|
1069 |
+
"LATENT"
|
1070 |
+
],
|
1071 |
+
[
|
1072 |
+
7,
|
1073 |
+
3,
|
1074 |
+
0,
|
1075 |
+
8,
|
1076 |
+
0,
|
1077 |
+
"LATENT"
|
1078 |
+
],
|
1079 |
+
[
|
1080 |
+
19,
|
1081 |
+
8,
|
1082 |
+
0,
|
1083 |
+
15,
|
1084 |
+
0,
|
1085 |
+
"IMAGE"
|
1086 |
+
],
|
1087 |
+
[
|
1088 |
+
122,
|
1089 |
+
4,
|
1090 |
+
1,
|
1091 |
+
39,
|
1092 |
+
0,
|
1093 |
+
"CLIP"
|
1094 |
+
],
|
1095 |
+
[
|
1096 |
+
123,
|
1097 |
+
4,
|
1098 |
+
1,
|
1099 |
+
40,
|
1100 |
+
0,
|
1101 |
+
"CLIP"
|
1102 |
+
],
|
1103 |
+
[
|
1104 |
+
197,
|
1105 |
+
11,
|
1106 |
+
0,
|
1107 |
+
60,
|
1108 |
+
0,
|
1109 |
+
"INSTANTID"
|
1110 |
+
],
|
1111 |
+
[
|
1112 |
+
198,
|
1113 |
+
38,
|
1114 |
+
0,
|
1115 |
+
60,
|
1116 |
+
1,
|
1117 |
+
"FACEANALYSIS"
|
1118 |
+
],
|
1119 |
+
[
|
1120 |
+
199,
|
1121 |
+
16,
|
1122 |
+
0,
|
1123 |
+
60,
|
1124 |
+
2,
|
1125 |
+
"CONTROL_NET"
|
1126 |
+
],
|
1127 |
+
[
|
1128 |
+
203,
|
1129 |
+
39,
|
1130 |
+
0,
|
1131 |
+
60,
|
1132 |
+
5,
|
1133 |
+
"CONDITIONING"
|
1134 |
+
],
|
1135 |
+
[
|
1136 |
+
204,
|
1137 |
+
40,
|
1138 |
+
0,
|
1139 |
+
60,
|
1140 |
+
6,
|
1141 |
+
"CONDITIONING"
|
1142 |
+
],
|
1143 |
+
[
|
1144 |
+
206,
|
1145 |
+
4,
|
1146 |
+
0,
|
1147 |
+
60,
|
1148 |
+
4,
|
1149 |
+
"MODEL"
|
1150 |
+
],
|
1151 |
+
[
|
1152 |
+
214,
|
1153 |
+
13,
|
1154 |
+
0,
|
1155 |
+
60,
|
1156 |
+
3,
|
1157 |
+
"IMAGE"
|
1158 |
+
],
|
1159 |
+
[
|
1160 |
+
238,
|
1161 |
+
11,
|
1162 |
+
0,
|
1163 |
+
77,
|
1164 |
+
0,
|
1165 |
+
"INSTANTID"
|
1166 |
+
],
|
1167 |
+
[
|
1168 |
+
239,
|
1169 |
+
38,
|
1170 |
+
0,
|
1171 |
+
77,
|
1172 |
+
1,
|
1173 |
+
"FACEANALYSIS"
|
1174 |
+
],
|
1175 |
+
[
|
1176 |
+
240,
|
1177 |
+
16,
|
1178 |
+
0,
|
1179 |
+
77,
|
1180 |
+
2,
|
1181 |
+
"CONTROL_NET"
|
1182 |
+
],
|
1183 |
+
[
|
1184 |
+
246,
|
1185 |
+
78,
|
1186 |
+
0,
|
1187 |
+
77,
|
1188 |
+
3,
|
1189 |
+
"IMAGE"
|
1190 |
+
],
|
1191 |
+
[
|
1192 |
+
247,
|
1193 |
+
77,
|
1194 |
+
1,
|
1195 |
+
79,
|
1196 |
+
0,
|
1197 |
+
"CONDITIONING"
|
1198 |
+
],
|
1199 |
+
[
|
1200 |
+
248,
|
1201 |
+
60,
|
1202 |
+
1,
|
1203 |
+
79,
|
1204 |
+
1,
|
1205 |
+
"CONDITIONING"
|
1206 |
+
],
|
1207 |
+
[
|
1208 |
+
249,
|
1209 |
+
79,
|
1210 |
+
0,
|
1211 |
+
3,
|
1212 |
+
1,
|
1213 |
+
"CONDITIONING"
|
1214 |
+
],
|
1215 |
+
[
|
1216 |
+
253,
|
1217 |
+
4,
|
1218 |
+
2,
|
1219 |
+
81,
|
1220 |
+
0,
|
1221 |
+
"*"
|
1222 |
+
],
|
1223 |
+
[
|
1224 |
+
254,
|
1225 |
+
81,
|
1226 |
+
0,
|
1227 |
+
8,
|
1228 |
+
1,
|
1229 |
+
"VAE"
|
1230 |
+
],
|
1231 |
+
[
|
1232 |
+
255,
|
1233 |
+
60,
|
1234 |
+
0,
|
1235 |
+
77,
|
1236 |
+
4,
|
1237 |
+
"MODEL"
|
1238 |
+
],
|
1239 |
+
[
|
1240 |
+
256,
|
1241 |
+
77,
|
1242 |
+
0,
|
1243 |
+
3,
|
1244 |
+
0,
|
1245 |
+
"MODEL"
|
1246 |
+
],
|
1247 |
+
[
|
1248 |
+
257,
|
1249 |
+
82,
|
1250 |
+
0,
|
1251 |
+
60,
|
1252 |
+
7,
|
1253 |
+
"IMAGE"
|
1254 |
+
],
|
1255 |
+
[
|
1256 |
+
258,
|
1257 |
+
82,
|
1258 |
+
0,
|
1259 |
+
84,
|
1260 |
+
0,
|
1261 |
+
"IMAGE"
|
1262 |
+
],
|
1263 |
+
[
|
1264 |
+
259,
|
1265 |
+
84,
|
1266 |
+
0,
|
1267 |
+
77,
|
1268 |
+
7,
|
1269 |
+
"IMAGE"
|
1270 |
+
],
|
1271 |
+
[
|
1272 |
+
260,
|
1273 |
+
85,
|
1274 |
+
0,
|
1275 |
+
87,
|
1276 |
+
0,
|
1277 |
+
"MASK"
|
1278 |
+
],
|
1279 |
+
[
|
1280 |
+
261,
|
1281 |
+
86,
|
1282 |
+
0,
|
1283 |
+
87,
|
1284 |
+
1,
|
1285 |
+
"MASK"
|
1286 |
+
],
|
1287 |
+
[
|
1288 |
+
262,
|
1289 |
+
87,
|
1290 |
+
0,
|
1291 |
+
60,
|
1292 |
+
8,
|
1293 |
+
"MASK"
|
1294 |
+
],
|
1295 |
+
[
|
1296 |
+
263,
|
1297 |
+
87,
|
1298 |
+
0,
|
1299 |
+
88,
|
1300 |
+
0,
|
1301 |
+
"MASK"
|
1302 |
+
],
|
1303 |
+
[
|
1304 |
+
264,
|
1305 |
+
88,
|
1306 |
+
0,
|
1307 |
+
77,
|
1308 |
+
8,
|
1309 |
+
"MASK"
|
1310 |
+
],
|
1311 |
+
[
|
1312 |
+
266,
|
1313 |
+
4,
|
1314 |
+
1,
|
1315 |
+
89,
|
1316 |
+
0,
|
1317 |
+
"CLIP"
|
1318 |
+
],
|
1319 |
+
[
|
1320 |
+
272,
|
1321 |
+
89,
|
1322 |
+
0,
|
1323 |
+
77,
|
1324 |
+
5,
|
1325 |
+
"CONDITIONING"
|
1326 |
+
],
|
1327 |
+
[
|
1328 |
+
278,
|
1329 |
+
40,
|
1330 |
+
0,
|
1331 |
+
77,
|
1332 |
+
6,
|
1333 |
+
"CONDITIONING"
|
1334 |
+
],
|
1335 |
+
[
|
1336 |
+
287,
|
1337 |
+
60,
|
1338 |
+
2,
|
1339 |
+
80,
|
1340 |
+
1,
|
1341 |
+
"CONDITIONING"
|
1342 |
+
],
|
1343 |
+
[
|
1344 |
+
288,
|
1345 |
+
80,
|
1346 |
+
0,
|
1347 |
+
3,
|
1348 |
+
2,
|
1349 |
+
"CONDITIONING"
|
1350 |
+
],
|
1351 |
+
[
|
1352 |
+
290,
|
1353 |
+
77,
|
1354 |
+
2,
|
1355 |
+
80,
|
1356 |
+
0,
|
1357 |
+
"CONDITIONING"
|
1358 |
+
]
|
1359 |
+
],
|
1360 |
+
"groups": [],
|
1361 |
+
"config": {},
|
1362 |
+
"extra": {},
|
1363 |
+
"version": 0.4
|
1364 |
+
}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/InstantID_posed.json
ADDED
@@ -0,0 +1,704 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"last_node_id": 67,
|
3 |
+
"last_link_id": 221,
|
4 |
+
"nodes": [
|
5 |
+
{
|
6 |
+
"id": 11,
|
7 |
+
"type": "InstantIDModelLoader",
|
8 |
+
"pos": [
|
9 |
+
560,
|
10 |
+
70
|
11 |
+
],
|
12 |
+
"size": {
|
13 |
+
"0": 238.72393798828125,
|
14 |
+
"1": 58
|
15 |
+
},
|
16 |
+
"flags": {},
|
17 |
+
"order": 0,
|
18 |
+
"mode": 0,
|
19 |
+
"outputs": [
|
20 |
+
{
|
21 |
+
"name": "INSTANTID",
|
22 |
+
"type": "INSTANTID",
|
23 |
+
"links": [
|
24 |
+
197
|
25 |
+
],
|
26 |
+
"shape": 3,
|
27 |
+
"slot_index": 0
|
28 |
+
}
|
29 |
+
],
|
30 |
+
"properties": {
|
31 |
+
"Node name for S&R": "InstantIDModelLoader"
|
32 |
+
},
|
33 |
+
"widgets_values": [
|
34 |
+
"ip-adapter.bin"
|
35 |
+
]
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"id": 38,
|
39 |
+
"type": "InstantIDFaceAnalysis",
|
40 |
+
"pos": [
|
41 |
+
570,
|
42 |
+
180
|
43 |
+
],
|
44 |
+
"size": {
|
45 |
+
"0": 227.09793090820312,
|
46 |
+
"1": 58
|
47 |
+
},
|
48 |
+
"flags": {},
|
49 |
+
"order": 1,
|
50 |
+
"mode": 0,
|
51 |
+
"outputs": [
|
52 |
+
{
|
53 |
+
"name": "FACEANALYSIS",
|
54 |
+
"type": "FACEANALYSIS",
|
55 |
+
"links": [
|
56 |
+
198
|
57 |
+
],
|
58 |
+
"shape": 3,
|
59 |
+
"slot_index": 0
|
60 |
+
}
|
61 |
+
],
|
62 |
+
"properties": {
|
63 |
+
"Node name for S&R": "InstantIDFaceAnalysis"
|
64 |
+
},
|
65 |
+
"widgets_values": [
|
66 |
+
"CPU"
|
67 |
+
]
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"id": 16,
|
71 |
+
"type": "ControlNetLoader",
|
72 |
+
"pos": [
|
73 |
+
560,
|
74 |
+
290
|
75 |
+
],
|
76 |
+
"size": {
|
77 |
+
"0": 250.07241821289062,
|
78 |
+
"1": 58
|
79 |
+
},
|
80 |
+
"flags": {},
|
81 |
+
"order": 2,
|
82 |
+
"mode": 0,
|
83 |
+
"outputs": [
|
84 |
+
{
|
85 |
+
"name": "CONTROL_NET",
|
86 |
+
"type": "CONTROL_NET",
|
87 |
+
"links": [
|
88 |
+
199
|
89 |
+
],
|
90 |
+
"shape": 3,
|
91 |
+
"slot_index": 0
|
92 |
+
}
|
93 |
+
],
|
94 |
+
"properties": {
|
95 |
+
"Node name for S&R": "ControlNetLoader"
|
96 |
+
},
|
97 |
+
"widgets_values": [
|
98 |
+
"instantid/diffusion_pytorch_model.safetensors"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"id": 15,
|
103 |
+
"type": "PreviewImage",
|
104 |
+
"pos": [
|
105 |
+
1670,
|
106 |
+
300
|
107 |
+
],
|
108 |
+
"size": {
|
109 |
+
"0": 584.0855712890625,
|
110 |
+
"1": 610.4592895507812
|
111 |
+
},
|
112 |
+
"flags": {},
|
113 |
+
"order": 12,
|
114 |
+
"mode": 0,
|
115 |
+
"inputs": [
|
116 |
+
{
|
117 |
+
"name": "images",
|
118 |
+
"type": "IMAGE",
|
119 |
+
"link": 19
|
120 |
+
}
|
121 |
+
],
|
122 |
+
"properties": {
|
123 |
+
"Node name for S&R": "PreviewImage"
|
124 |
+
}
|
125 |
+
},
|
126 |
+
{
|
127 |
+
"id": 5,
|
128 |
+
"type": "EmptyLatentImage",
|
129 |
+
"pos": [
|
130 |
+
910,
|
131 |
+
540
|
132 |
+
],
|
133 |
+
"size": {
|
134 |
+
"0": 315,
|
135 |
+
"1": 106
|
136 |
+
},
|
137 |
+
"flags": {},
|
138 |
+
"order": 3,
|
139 |
+
"mode": 0,
|
140 |
+
"outputs": [
|
141 |
+
{
|
142 |
+
"name": "LATENT",
|
143 |
+
"type": "LATENT",
|
144 |
+
"links": [
|
145 |
+
2
|
146 |
+
],
|
147 |
+
"slot_index": 0
|
148 |
+
}
|
149 |
+
],
|
150 |
+
"properties": {
|
151 |
+
"Node name for S&R": "EmptyLatentImage"
|
152 |
+
},
|
153 |
+
"widgets_values": [
|
154 |
+
1016,
|
155 |
+
1016,
|
156 |
+
1
|
157 |
+
]
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"id": 8,
|
161 |
+
"type": "VAEDecode",
|
162 |
+
"pos": [
|
163 |
+
1670,
|
164 |
+
210
|
165 |
+
],
|
166 |
+
"size": {
|
167 |
+
"0": 210,
|
168 |
+
"1": 46
|
169 |
+
},
|
170 |
+
"flags": {},
|
171 |
+
"order": 11,
|
172 |
+
"mode": 0,
|
173 |
+
"inputs": [
|
174 |
+
{
|
175 |
+
"name": "samples",
|
176 |
+
"type": "LATENT",
|
177 |
+
"link": 7
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"name": "vae",
|
181 |
+
"type": "VAE",
|
182 |
+
"link": 8
|
183 |
+
}
|
184 |
+
],
|
185 |
+
"outputs": [
|
186 |
+
{
|
187 |
+
"name": "IMAGE",
|
188 |
+
"type": "IMAGE",
|
189 |
+
"links": [
|
190 |
+
19
|
191 |
+
],
|
192 |
+
"slot_index": 0
|
193 |
+
}
|
194 |
+
],
|
195 |
+
"properties": {
|
196 |
+
"Node name for S&R": "VAEDecode"
|
197 |
+
}
|
198 |
+
},
|
199 |
+
{
|
200 |
+
"id": 60,
|
201 |
+
"type": "ApplyInstantID",
|
202 |
+
"pos": [
|
203 |
+
910,
|
204 |
+
210
|
205 |
+
],
|
206 |
+
"size": {
|
207 |
+
"0": 315,
|
208 |
+
"1": 266
|
209 |
+
},
|
210 |
+
"flags": {},
|
211 |
+
"order": 9,
|
212 |
+
"mode": 0,
|
213 |
+
"inputs": [
|
214 |
+
{
|
215 |
+
"name": "instantid",
|
216 |
+
"type": "INSTANTID",
|
217 |
+
"link": 197
|
218 |
+
},
|
219 |
+
{
|
220 |
+
"name": "insightface",
|
221 |
+
"type": "FACEANALYSIS",
|
222 |
+
"link": 198
|
223 |
+
},
|
224 |
+
{
|
225 |
+
"name": "control_net",
|
226 |
+
"type": "CONTROL_NET",
|
227 |
+
"link": 199
|
228 |
+
},
|
229 |
+
{
|
230 |
+
"name": "image",
|
231 |
+
"type": "IMAGE",
|
232 |
+
"link": 214
|
233 |
+
},
|
234 |
+
{
|
235 |
+
"name": "model",
|
236 |
+
"type": "MODEL",
|
237 |
+
"link": 206
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"name": "positive",
|
241 |
+
"type": "CONDITIONING",
|
242 |
+
"link": 203
|
243 |
+
},
|
244 |
+
{
|
245 |
+
"name": "negative",
|
246 |
+
"type": "CONDITIONING",
|
247 |
+
"link": 204
|
248 |
+
},
|
249 |
+
{
|
250 |
+
"name": "image_kps",
|
251 |
+
"type": "IMAGE",
|
252 |
+
"link": 221
|
253 |
+
},
|
254 |
+
{
|
255 |
+
"name": "mask",
|
256 |
+
"type": "MASK",
|
257 |
+
"link": null
|
258 |
+
}
|
259 |
+
],
|
260 |
+
"outputs": [
|
261 |
+
{
|
262 |
+
"name": "MODEL",
|
263 |
+
"type": "MODEL",
|
264 |
+
"links": [
|
265 |
+
220
|
266 |
+
],
|
267 |
+
"shape": 3,
|
268 |
+
"slot_index": 0
|
269 |
+
},
|
270 |
+
{
|
271 |
+
"name": "POSITIVE",
|
272 |
+
"type": "CONDITIONING",
|
273 |
+
"links": [
|
274 |
+
200
|
275 |
+
],
|
276 |
+
"shape": 3,
|
277 |
+
"slot_index": 1
|
278 |
+
},
|
279 |
+
{
|
280 |
+
"name": "NEGATIVE",
|
281 |
+
"type": "CONDITIONING",
|
282 |
+
"links": [
|
283 |
+
201
|
284 |
+
],
|
285 |
+
"shape": 3,
|
286 |
+
"slot_index": 2
|
287 |
+
}
|
288 |
+
],
|
289 |
+
"properties": {
|
290 |
+
"Node name for S&R": "ApplyInstantID"
|
291 |
+
},
|
292 |
+
"widgets_values": [
|
293 |
+
0.8,
|
294 |
+
0,
|
295 |
+
1
|
296 |
+
]
|
297 |
+
},
|
298 |
+
{
|
299 |
+
"id": 39,
|
300 |
+
"type": "CLIPTextEncode",
|
301 |
+
"pos": [
|
302 |
+
520,
|
303 |
+
430
|
304 |
+
],
|
305 |
+
"size": {
|
306 |
+
"0": 291.9967346191406,
|
307 |
+
"1": 128.62518310546875
|
308 |
+
},
|
309 |
+
"flags": {},
|
310 |
+
"order": 7,
|
311 |
+
"mode": 0,
|
312 |
+
"inputs": [
|
313 |
+
{
|
314 |
+
"name": "clip",
|
315 |
+
"type": "CLIP",
|
316 |
+
"link": 122
|
317 |
+
}
|
318 |
+
],
|
319 |
+
"outputs": [
|
320 |
+
{
|
321 |
+
"name": "CONDITIONING",
|
322 |
+
"type": "CONDITIONING",
|
323 |
+
"links": [
|
324 |
+
203
|
325 |
+
],
|
326 |
+
"shape": 3,
|
327 |
+
"slot_index": 0
|
328 |
+
}
|
329 |
+
],
|
330 |
+
"properties": {
|
331 |
+
"Node name for S&R": "CLIPTextEncode"
|
332 |
+
},
|
333 |
+
"widgets_values": [
|
334 |
+
"comic character. graphic illustration, comic art, graphic novel art, vibrant, highly detailed"
|
335 |
+
]
|
336 |
+
},
|
337 |
+
{
|
338 |
+
"id": 40,
|
339 |
+
"type": "CLIPTextEncode",
|
340 |
+
"pos": [
|
341 |
+
520,
|
342 |
+
620
|
343 |
+
],
|
344 |
+
"size": {
|
345 |
+
"0": 286.3603515625,
|
346 |
+
"1": 112.35245513916016
|
347 |
+
},
|
348 |
+
"flags": {},
|
349 |
+
"order": 8,
|
350 |
+
"mode": 0,
|
351 |
+
"inputs": [
|
352 |
+
{
|
353 |
+
"name": "clip",
|
354 |
+
"type": "CLIP",
|
355 |
+
"link": 123
|
356 |
+
}
|
357 |
+
],
|
358 |
+
"outputs": [
|
359 |
+
{
|
360 |
+
"name": "CONDITIONING",
|
361 |
+
"type": "CONDITIONING",
|
362 |
+
"links": [
|
363 |
+
204
|
364 |
+
],
|
365 |
+
"shape": 3,
|
366 |
+
"slot_index": 0
|
367 |
+
}
|
368 |
+
],
|
369 |
+
"properties": {
|
370 |
+
"Node name for S&R": "CLIPTextEncode"
|
371 |
+
},
|
372 |
+
"widgets_values": [
|
373 |
+
"photograph, deformed, glitch, noisy, realistic, stock photo"
|
374 |
+
]
|
375 |
+
},
|
376 |
+
{
|
377 |
+
"id": 4,
|
378 |
+
"type": "CheckpointLoaderSimple",
|
379 |
+
"pos": [
|
380 |
+
70,
|
381 |
+
520
|
382 |
+
],
|
383 |
+
"size": {
|
384 |
+
"0": 315,
|
385 |
+
"1": 98
|
386 |
+
},
|
387 |
+
"flags": {},
|
388 |
+
"order": 4,
|
389 |
+
"mode": 0,
|
390 |
+
"outputs": [
|
391 |
+
{
|
392 |
+
"name": "MODEL",
|
393 |
+
"type": "MODEL",
|
394 |
+
"links": [
|
395 |
+
206
|
396 |
+
],
|
397 |
+
"slot_index": 0
|
398 |
+
},
|
399 |
+
{
|
400 |
+
"name": "CLIP",
|
401 |
+
"type": "CLIP",
|
402 |
+
"links": [
|
403 |
+
122,
|
404 |
+
123
|
405 |
+
],
|
406 |
+
"slot_index": 1
|
407 |
+
},
|
408 |
+
{
|
409 |
+
"name": "VAE",
|
410 |
+
"type": "VAE",
|
411 |
+
"links": [
|
412 |
+
8
|
413 |
+
],
|
414 |
+
"slot_index": 2
|
415 |
+
}
|
416 |
+
],
|
417 |
+
"properties": {
|
418 |
+
"Node name for S&R": "CheckpointLoaderSimple"
|
419 |
+
},
|
420 |
+
"widgets_values": [
|
421 |
+
"sdxl/AlbedoBaseXL.safetensors"
|
422 |
+
]
|
423 |
+
},
|
424 |
+
{
|
425 |
+
"id": 13,
|
426 |
+
"type": "LoadImage",
|
427 |
+
"pos": [
|
428 |
+
290,
|
429 |
+
70
|
430 |
+
],
|
431 |
+
"size": {
|
432 |
+
"0": 210,
|
433 |
+
"1": 314
|
434 |
+
},
|
435 |
+
"flags": {},
|
436 |
+
"order": 5,
|
437 |
+
"mode": 0,
|
438 |
+
"outputs": [
|
439 |
+
{
|
440 |
+
"name": "IMAGE",
|
441 |
+
"type": "IMAGE",
|
442 |
+
"links": [
|
443 |
+
214
|
444 |
+
],
|
445 |
+
"shape": 3,
|
446 |
+
"slot_index": 0
|
447 |
+
},
|
448 |
+
{
|
449 |
+
"name": "MASK",
|
450 |
+
"type": "MASK",
|
451 |
+
"links": null,
|
452 |
+
"shape": 3
|
453 |
+
}
|
454 |
+
],
|
455 |
+
"properties": {
|
456 |
+
"Node name for S&R": "LoadImage"
|
457 |
+
},
|
458 |
+
"widgets_values": [
|
459 |
+
"joseph-gonzalez-iFgRcqHznqg-unsplash.jpg",
|
460 |
+
"image"
|
461 |
+
]
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"id": 67,
|
465 |
+
"type": "LoadImage",
|
466 |
+
"pos": [
|
467 |
+
592,
|
468 |
+
781
|
469 |
+
],
|
470 |
+
"size": {
|
471 |
+
"0": 210,
|
472 |
+
"1": 314
|
473 |
+
},
|
474 |
+
"flags": {},
|
475 |
+
"order": 6,
|
476 |
+
"mode": 0,
|
477 |
+
"outputs": [
|
478 |
+
{
|
479 |
+
"name": "IMAGE",
|
480 |
+
"type": "IMAGE",
|
481 |
+
"links": [
|
482 |
+
221
|
483 |
+
],
|
484 |
+
"shape": 3,
|
485 |
+
"slot_index": 0
|
486 |
+
},
|
487 |
+
{
|
488 |
+
"name": "MASK",
|
489 |
+
"type": "MASK",
|
490 |
+
"links": null,
|
491 |
+
"shape": 3
|
492 |
+
}
|
493 |
+
],
|
494 |
+
"properties": {
|
495 |
+
"Node name for S&R": "LoadImage"
|
496 |
+
},
|
497 |
+
"widgets_values": [
|
498 |
+
"miranda.jpg",
|
499 |
+
"image"
|
500 |
+
]
|
501 |
+
},
|
502 |
+
{
|
503 |
+
"id": 3,
|
504 |
+
"type": "KSampler",
|
505 |
+
"pos": [
|
506 |
+
1300,
|
507 |
+
210
|
508 |
+
],
|
509 |
+
"size": {
|
510 |
+
"0": 315,
|
511 |
+
"1": 262
|
512 |
+
},
|
513 |
+
"flags": {},
|
514 |
+
"order": 10,
|
515 |
+
"mode": 0,
|
516 |
+
"inputs": [
|
517 |
+
{
|
518 |
+
"name": "model",
|
519 |
+
"type": "MODEL",
|
520 |
+
"link": 220
|
521 |
+
},
|
522 |
+
{
|
523 |
+
"name": "positive",
|
524 |
+
"type": "CONDITIONING",
|
525 |
+
"link": 200
|
526 |
+
},
|
527 |
+
{
|
528 |
+
"name": "negative",
|
529 |
+
"type": "CONDITIONING",
|
530 |
+
"link": 201
|
531 |
+
},
|
532 |
+
{
|
533 |
+
"name": "latent_image",
|
534 |
+
"type": "LATENT",
|
535 |
+
"link": 2
|
536 |
+
}
|
537 |
+
],
|
538 |
+
"outputs": [
|
539 |
+
{
|
540 |
+
"name": "LATENT",
|
541 |
+
"type": "LATENT",
|
542 |
+
"links": [
|
543 |
+
7
|
544 |
+
],
|
545 |
+
"slot_index": 0
|
546 |
+
}
|
547 |
+
],
|
548 |
+
"properties": {
|
549 |
+
"Node name for S&R": "KSampler"
|
550 |
+
},
|
551 |
+
"widgets_values": [
|
552 |
+
1631591431,
|
553 |
+
"fixed",
|
554 |
+
30,
|
555 |
+
4.5,
|
556 |
+
"ddpm",
|
557 |
+
"karras",
|
558 |
+
1
|
559 |
+
]
|
560 |
+
}
|
561 |
+
],
|
562 |
+
"links": [
|
563 |
+
[
|
564 |
+
2,
|
565 |
+
5,
|
566 |
+
0,
|
567 |
+
3,
|
568 |
+
3,
|
569 |
+
"LATENT"
|
570 |
+
],
|
571 |
+
[
|
572 |
+
7,
|
573 |
+
3,
|
574 |
+
0,
|
575 |
+
8,
|
576 |
+
0,
|
577 |
+
"LATENT"
|
578 |
+
],
|
579 |
+
[
|
580 |
+
8,
|
581 |
+
4,
|
582 |
+
2,
|
583 |
+
8,
|
584 |
+
1,
|
585 |
+
"VAE"
|
586 |
+
],
|
587 |
+
[
|
588 |
+
19,
|
589 |
+
8,
|
590 |
+
0,
|
591 |
+
15,
|
592 |
+
0,
|
593 |
+
"IMAGE"
|
594 |
+
],
|
595 |
+
[
|
596 |
+
122,
|
597 |
+
4,
|
598 |
+
1,
|
599 |
+
39,
|
600 |
+
0,
|
601 |
+
"CLIP"
|
602 |
+
],
|
603 |
+
[
|
604 |
+
123,
|
605 |
+
4,
|
606 |
+
1,
|
607 |
+
40,
|
608 |
+
0,
|
609 |
+
"CLIP"
|
610 |
+
],
|
611 |
+
[
|
612 |
+
197,
|
613 |
+
11,
|
614 |
+
0,
|
615 |
+
60,
|
616 |
+
0,
|
617 |
+
"INSTANTID"
|
618 |
+
],
|
619 |
+
[
|
620 |
+
198,
|
621 |
+
38,
|
622 |
+
0,
|
623 |
+
60,
|
624 |
+
1,
|
625 |
+
"FACEANALYSIS"
|
626 |
+
],
|
627 |
+
[
|
628 |
+
199,
|
629 |
+
16,
|
630 |
+
0,
|
631 |
+
60,
|
632 |
+
2,
|
633 |
+
"CONTROL_NET"
|
634 |
+
],
|
635 |
+
[
|
636 |
+
200,
|
637 |
+
60,
|
638 |
+
1,
|
639 |
+
3,
|
640 |
+
1,
|
641 |
+
"CONDITIONING"
|
642 |
+
],
|
643 |
+
[
|
644 |
+
201,
|
645 |
+
60,
|
646 |
+
2,
|
647 |
+
3,
|
648 |
+
2,
|
649 |
+
"CONDITIONING"
|
650 |
+
],
|
651 |
+
[
|
652 |
+
203,
|
653 |
+
39,
|
654 |
+
0,
|
655 |
+
60,
|
656 |
+
5,
|
657 |
+
"CONDITIONING"
|
658 |
+
],
|
659 |
+
[
|
660 |
+
204,
|
661 |
+
40,
|
662 |
+
0,
|
663 |
+
60,
|
664 |
+
6,
|
665 |
+
"CONDITIONING"
|
666 |
+
],
|
667 |
+
[
|
668 |
+
206,
|
669 |
+
4,
|
670 |
+
0,
|
671 |
+
60,
|
672 |
+
4,
|
673 |
+
"MODEL"
|
674 |
+
],
|
675 |
+
[
|
676 |
+
214,
|
677 |
+
13,
|
678 |
+
0,
|
679 |
+
60,
|
680 |
+
3,
|
681 |
+
"IMAGE"
|
682 |
+
],
|
683 |
+
[
|
684 |
+
220,
|
685 |
+
60,
|
686 |
+
0,
|
687 |
+
3,
|
688 |
+
0,
|
689 |
+
"MODEL"
|
690 |
+
],
|
691 |
+
[
|
692 |
+
221,
|
693 |
+
67,
|
694 |
+
0,
|
695 |
+
60,
|
696 |
+
7,
|
697 |
+
"IMAGE"
|
698 |
+
]
|
699 |
+
],
|
700 |
+
"groups": [],
|
701 |
+
"config": {},
|
702 |
+
"extra": {},
|
703 |
+
"version": 0.4
|
704 |
+
}
|
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/daydreaming.jpg
ADDED
![]() |
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/instant_id_ipadapter.jpg
ADDED
![]() |
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/instantid_basic_workflow.jpg
ADDED
![]() |
ComfyUI/custom_nodes/ComfyUI_InstantID/examples/instantid_multi_id.jpg
ADDED
![]() |
ComfyUI/custom_nodes/ComfyUI_InstantID/pyproject.toml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[project]
|
2 |
+
name = "comfyui_instantid"
|
3 |
+
description = "Native InstantID support for ComfyUI. This extension differs from the many already available as it doesn't use diffusers but instead implements InstantID natively and it fully integrates with ComfyUI."
|
4 |
+
version = "1.0.0"
|
5 |
+
license = "LICENSE"
|
6 |
+
dependencies = ["insightface", "onnxruntime", "onnxruntime-gpu"]
|
7 |
+
|
8 |
+
[project.urls]
|
9 |
+
Repository = "https://github.com/cubiq/ComfyUI_InstantID"
|
10 |
+
# Used by Comfy Registry https://comfyregistry.org
|
11 |
+
|
12 |
+
[tool.comfy]
|
13 |
+
PublisherId = "matteo"
|
14 |
+
DisplayName = "ComfyUI_InstantID"
|
15 |
+
Icon = ""
|
ComfyUI/custom_nodes/ComfyUI_InstantID/requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
insightface
|
2 |
+
onnxruntime
|
3 |
+
onnxruntime-gpu
|
ComfyUI/custom_nodes/ComfyUI_InstantID/resampler.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# modified from https://github.com/mlfoundations/open_flamingo/blob/main/open_flamingo/src/helpers.py
|
2 |
+
import math
|
3 |
+
|
4 |
+
import torch
|
5 |
+
import torch.nn as nn
|
6 |
+
|
7 |
+
|
8 |
+
# FFN
|
9 |
+
def FeedForward(dim, mult=4):
|
10 |
+
inner_dim = int(dim * mult)
|
11 |
+
return nn.Sequential(
|
12 |
+
nn.LayerNorm(dim),
|
13 |
+
nn.Linear(dim, inner_dim, bias=False),
|
14 |
+
nn.GELU(),
|
15 |
+
nn.Linear(inner_dim, dim, bias=False),
|
16 |
+
)
|
17 |
+
|
18 |
+
|
19 |
+
def reshape_tensor(x, heads):
|
20 |
+
bs, length, width = x.shape
|
21 |
+
#(bs, length, width) --> (bs, length, n_heads, dim_per_head)
|
22 |
+
x = x.view(bs, length, heads, -1)
|
23 |
+
# (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head)
|
24 |
+
x = x.transpose(1, 2)
|
25 |
+
# (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head)
|
26 |
+
x = x.reshape(bs, heads, length, -1)
|
27 |
+
return x
|
28 |
+
|
29 |
+
|
30 |
+
class PerceiverAttention(nn.Module):
|
31 |
+
def __init__(self, *, dim, dim_head=64, heads=8):
|
32 |
+
super().__init__()
|
33 |
+
self.scale = dim_head**-0.5
|
34 |
+
self.dim_head = dim_head
|
35 |
+
self.heads = heads
|
36 |
+
inner_dim = dim_head * heads
|
37 |
+
|
38 |
+
self.norm1 = nn.LayerNorm(dim)
|
39 |
+
self.norm2 = nn.LayerNorm(dim)
|
40 |
+
|
41 |
+
self.to_q = nn.Linear(dim, inner_dim, bias=False)
|
42 |
+
self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False)
|
43 |
+
self.to_out = nn.Linear(inner_dim, dim, bias=False)
|
44 |
+
|
45 |
+
|
46 |
+
def forward(self, x, latents):
|
47 |
+
"""
|
48 |
+
Args:
|
49 |
+
x (torch.Tensor): image features
|
50 |
+
shape (b, n1, D)
|
51 |
+
latent (torch.Tensor): latent features
|
52 |
+
shape (b, n2, D)
|
53 |
+
"""
|
54 |
+
x = self.norm1(x)
|
55 |
+
latents = self.norm2(latents)
|
56 |
+
|
57 |
+
b, l, _ = latents.shape
|
58 |
+
|
59 |
+
q = self.to_q(latents)
|
60 |
+
kv_input = torch.cat((x, latents), dim=-2)
|
61 |
+
k, v = self.to_kv(kv_input).chunk(2, dim=-1)
|
62 |
+
|
63 |
+
q = reshape_tensor(q, self.heads)
|
64 |
+
k = reshape_tensor(k, self.heads)
|
65 |
+
v = reshape_tensor(v, self.heads)
|
66 |
+
|
67 |
+
# attention
|
68 |
+
scale = 1 / math.sqrt(math.sqrt(self.dim_head))
|
69 |
+
weight = (q * scale) @ (k * scale).transpose(-2, -1) # More stable with f16 than dividing afterwards
|
70 |
+
weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype)
|
71 |
+
out = weight @ v
|
72 |
+
|
73 |
+
out = out.permute(0, 2, 1, 3).reshape(b, l, -1)
|
74 |
+
|
75 |
+
return self.to_out(out)
|
76 |
+
|
77 |
+
|
78 |
+
class Resampler(nn.Module):
|
79 |
+
def __init__(
|
80 |
+
self,
|
81 |
+
dim=1024,
|
82 |
+
depth=8,
|
83 |
+
dim_head=64,
|
84 |
+
heads=16,
|
85 |
+
num_queries=8,
|
86 |
+
embedding_dim=768,
|
87 |
+
output_dim=1024,
|
88 |
+
ff_mult=4,
|
89 |
+
):
|
90 |
+
super().__init__()
|
91 |
+
|
92 |
+
self.latents = nn.Parameter(torch.randn(1, num_queries, dim) / dim**0.5)
|
93 |
+
|
94 |
+
self.proj_in = nn.Linear(embedding_dim, dim)
|
95 |
+
|
96 |
+
self.proj_out = nn.Linear(dim, output_dim)
|
97 |
+
self.norm_out = nn.LayerNorm(output_dim)
|
98 |
+
|
99 |
+
self.layers = nn.ModuleList([])
|
100 |
+
for _ in range(depth):
|
101 |
+
self.layers.append(
|
102 |
+
nn.ModuleList(
|
103 |
+
[
|
104 |
+
PerceiverAttention(dim=dim, dim_head=dim_head, heads=heads),
|
105 |
+
FeedForward(dim=dim, mult=ff_mult),
|
106 |
+
]
|
107 |
+
)
|
108 |
+
)
|
109 |
+
|
110 |
+
def forward(self, x):
|
111 |
+
|
112 |
+
latents = self.latents.repeat(x.size(0), 1, 1)
|
113 |
+
|
114 |
+
x = self.proj_in(x)
|
115 |
+
|
116 |
+
for attn, ff in self.layers:
|
117 |
+
latents = attn(x, latents) + latents
|
118 |
+
latents = ff(latents) + latents
|
119 |
+
|
120 |
+
latents = self.proj_out(latents)
|
121 |
+
return self.norm_out(latents)
|
ComfyUI/custom_nodes/ComfyUI_InstantID/utils.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
def tensor_to_size(source, dest_size):
|
4 |
+
if isinstance(dest_size, torch.Tensor):
|
5 |
+
dest_size = dest_size.shape[0]
|
6 |
+
source_size = source.shape[0]
|
7 |
+
|
8 |
+
if source_size < dest_size:
|
9 |
+
shape = [dest_size - source_size] + [1]*(source.dim()-1)
|
10 |
+
source = torch.cat((source, source[-1:].repeat(shape)), dim=0)
|
11 |
+
elif source_size > dest_size:
|
12 |
+
source = source[:dest_size]
|
13 |
+
|
14 |
+
return source
|
15 |
+
|
16 |
+
def tensor_to_image(tensor):
|
17 |
+
image = tensor.mul(255).clamp(0, 255).byte().cpu()
|
18 |
+
image = image[..., [2, 1, 0]].numpy()
|
19 |
+
return image
|
20 |
+
|
21 |
+
def image_to_tensor(image):
|
22 |
+
tensor = torch.clamp(torch.from_numpy(image).float() / 255., 0, 1)
|
23 |
+
tensor = tensor[..., [2, 1, 0]]
|
24 |
+
return tensor
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/.github/workflows/publish.yml
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Publish to Comfy registry
|
2 |
+
on:
|
3 |
+
workflow_dispatch:
|
4 |
+
push:
|
5 |
+
branches:
|
6 |
+
- main
|
7 |
+
paths:
|
8 |
+
- "pyproject.toml"
|
9 |
+
|
10 |
+
jobs:
|
11 |
+
publish-node:
|
12 |
+
name: Publish Custom Node to registry
|
13 |
+
runs-on: ubuntu-latest
|
14 |
+
steps:
|
15 |
+
- name: Check out code
|
16 |
+
uses: actions/checkout@v4
|
17 |
+
- name: Publish Custom Node
|
18 |
+
uses: Comfy-Org/publish-node-action@main
|
19 |
+
with:
|
20 |
+
## Add your own personal access token to your Github Repository secrets and reference it here.
|
21 |
+
personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/.gitignore
ADDED
@@ -0,0 +1,183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Initially taken from Github's Python gitignore file
|
2 |
+
|
3 |
+
# Byte-compiled / optimized / DLL files
|
4 |
+
__pycache__/
|
5 |
+
*.py[cod]
|
6 |
+
*$py.class
|
7 |
+
|
8 |
+
# C extensions
|
9 |
+
*.so
|
10 |
+
|
11 |
+
# tests and logs
|
12 |
+
tests/fixtures/cached_*_text.txt
|
13 |
+
logs/
|
14 |
+
lightning_logs/
|
15 |
+
lang_code_data/
|
16 |
+
tests/outputs
|
17 |
+
|
18 |
+
# Distribution / packaging
|
19 |
+
.Python
|
20 |
+
build/
|
21 |
+
develop-eggs/
|
22 |
+
dist/
|
23 |
+
downloads/
|
24 |
+
eggs/
|
25 |
+
.eggs/
|
26 |
+
lib/
|
27 |
+
lib64/
|
28 |
+
parts/
|
29 |
+
sdist/
|
30 |
+
var/
|
31 |
+
wheels/
|
32 |
+
*.egg-info/
|
33 |
+
.installed.cfg
|
34 |
+
*.egg
|
35 |
+
MANIFEST
|
36 |
+
|
37 |
+
# PyInstaller
|
38 |
+
# Usually these files are written by a python script from a template
|
39 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
40 |
+
*.manifest
|
41 |
+
*.spec
|
42 |
+
|
43 |
+
# Installer logs
|
44 |
+
pip-log.txt
|
45 |
+
pip-delete-this-directory.txt
|
46 |
+
|
47 |
+
# Unit test / coverage reports
|
48 |
+
htmlcov/
|
49 |
+
.tox/
|
50 |
+
.nox/
|
51 |
+
.coverage
|
52 |
+
.coverage.*
|
53 |
+
.cache
|
54 |
+
nosetests.xml
|
55 |
+
coverage.xml
|
56 |
+
*.cover
|
57 |
+
.hypothesis/
|
58 |
+
.pytest_cache/
|
59 |
+
|
60 |
+
# Translations
|
61 |
+
*.mo
|
62 |
+
*.pot
|
63 |
+
|
64 |
+
# Django stuff:
|
65 |
+
*.log
|
66 |
+
local_settings.py
|
67 |
+
db.sqlite3
|
68 |
+
|
69 |
+
# Flask stuff:
|
70 |
+
instance/
|
71 |
+
.webassets-cache
|
72 |
+
|
73 |
+
# Scrapy stuff:
|
74 |
+
.scrapy
|
75 |
+
|
76 |
+
# Sphinx documentation
|
77 |
+
docs/_build/
|
78 |
+
|
79 |
+
# PyBuilder
|
80 |
+
target/
|
81 |
+
|
82 |
+
# Jupyter Notebook
|
83 |
+
.ipynb_checkpoints
|
84 |
+
|
85 |
+
# IPython
|
86 |
+
profile_default/
|
87 |
+
ipython_config.py
|
88 |
+
|
89 |
+
# pyenv
|
90 |
+
.python-version
|
91 |
+
|
92 |
+
# celery beat schedule file
|
93 |
+
celerybeat-schedule
|
94 |
+
|
95 |
+
# SageMath parsed files
|
96 |
+
*.sage.py
|
97 |
+
|
98 |
+
# Environments
|
99 |
+
.env
|
100 |
+
.venv
|
101 |
+
env/
|
102 |
+
venv/
|
103 |
+
ENV/
|
104 |
+
env.bak/
|
105 |
+
venv.bak/
|
106 |
+
|
107 |
+
# Spyder project settings
|
108 |
+
.spyderproject
|
109 |
+
.spyproject
|
110 |
+
|
111 |
+
# Rope project settings
|
112 |
+
.ropeproject
|
113 |
+
|
114 |
+
# mkdocs documentation
|
115 |
+
/site
|
116 |
+
|
117 |
+
# mypy
|
118 |
+
.mypy_cache/
|
119 |
+
.dmypy.json
|
120 |
+
dmypy.json
|
121 |
+
|
122 |
+
# Pyre type checker
|
123 |
+
.pyre/
|
124 |
+
|
125 |
+
# vscode
|
126 |
+
.vs
|
127 |
+
.vscode
|
128 |
+
|
129 |
+
# Pycharm
|
130 |
+
.idea
|
131 |
+
|
132 |
+
# TF code
|
133 |
+
tensorflow_code
|
134 |
+
|
135 |
+
# Models
|
136 |
+
proc_data
|
137 |
+
|
138 |
+
# examples
|
139 |
+
runs
|
140 |
+
/runs_old
|
141 |
+
/wandb
|
142 |
+
/examples/runs
|
143 |
+
/examples/**/*.args
|
144 |
+
/examples/rag/sweep
|
145 |
+
|
146 |
+
# data
|
147 |
+
/data
|
148 |
+
serialization_dir
|
149 |
+
|
150 |
+
# emacs
|
151 |
+
*.*~
|
152 |
+
debug.env
|
153 |
+
|
154 |
+
# vim
|
155 |
+
.*.swp
|
156 |
+
|
157 |
+
#ctags
|
158 |
+
tags
|
159 |
+
|
160 |
+
# pre-commit
|
161 |
+
.pre-commit*
|
162 |
+
|
163 |
+
# .lock
|
164 |
+
*.lock
|
165 |
+
|
166 |
+
# DS_Store (MacOS)
|
167 |
+
.DS_Store
|
168 |
+
# RL pipelines may produce mp4 outputs
|
169 |
+
*.mp4
|
170 |
+
|
171 |
+
# dependencies
|
172 |
+
/transformers
|
173 |
+
|
174 |
+
# ruff
|
175 |
+
.ruff_cache
|
176 |
+
|
177 |
+
wandb
|
178 |
+
|
179 |
+
ckpts/
|
180 |
+
|
181 |
+
test.ipynb
|
182 |
+
config.yaml
|
183 |
+
test.ipynb
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/LICENSE.txt
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright [yyyy] [name of copyright owner]
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/README.md
ADDED
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# ComfyUI's ControlNet Auxiliary Preprocessors
|
2 |
+

|
3 |
+
Plug-and-play [ComfyUI](https://github.com/comfyanonymous/ComfyUI) node sets for making [ControlNet](https://github.com/lllyasviel/ControlNet/) hint images
|
4 |
+
|
5 |
+
The code is copy-pasted from the respective folders in https://github.com/lllyasviel/ControlNet/tree/main/annotator and connected to [the 🤗 Hub](https://huggingface.co/lllyasviel/Annotators).
|
6 |
+
|
7 |
+
All credit & copyright goes to https://github.com/lllyasviel.
|
8 |
+
|
9 |
+
# Marigold
|
10 |
+
Check out Marigold Depth Estimator which can generate very detailed and sharp depth map from high-resolution still images. The mesh created by it is even 3D-printable. Due to diffusers, it can't be implemented in this extension but there is an Comfy implementation by Kijai
|
11 |
+
https://github.com/kijai/ComfyUI-Marigold
|
12 |
+
|
13 |
+

|
14 |
+

|
15 |
+
|
16 |
+
# Updates
|
17 |
+
Go to [Update page](./UPDATES.md) to follow updates
|
18 |
+
|
19 |
+
# Installation:
|
20 |
+
## Using ComfyUI Manager (recommended):
|
21 |
+
Install [ComfyUI Manager](https://github.com/ltdrdata/ComfyUI-Manager) and do steps introduced there to install this repo.
|
22 |
+
|
23 |
+
## Alternative:
|
24 |
+
If you're running on Linux, or non-admin account on windows you'll want to ensure `/ComfyUI/custom_nodes` and `comfyui_controlnet_aux` has write permissions.
|
25 |
+
|
26 |
+
There is now a **install.bat** you can run to install to portable if detected. Otherwise it will default to system and assume you followed ConfyUI's manual installation steps.
|
27 |
+
|
28 |
+
If you can't run **install.bat** (e.g. you are a Linux user). Open the CMD/Shell and do the following:
|
29 |
+
- Navigate to your `/ComfyUI/custom_nodes/` folder
|
30 |
+
- Run `git clone https://github.com/Fannovel16/comfyui_controlnet_aux/`
|
31 |
+
- Navigate to your `comfyui_controlnet_aux` folder
|
32 |
+
- Portable/venv:
|
33 |
+
- Run `path/to/ComfUI/python_embeded/python.exe -s -m pip install -r requirements.txt`
|
34 |
+
- With system python
|
35 |
+
- Run `pip install -r requirements.txt`
|
36 |
+
- Start ComfyUI
|
37 |
+
|
38 |
+
# Nodes
|
39 |
+
Please note that this repo only supports preprocessors making hint images (e.g. stickman, canny edge, etc).
|
40 |
+
All preprocessors except Inpaint are intergrated into `AIO Aux Preprocessor` node.
|
41 |
+
This node allow you to quickly get the preprocessor but a preprocessor's own threshold parameters won't be able to set.
|
42 |
+
You need to use its node directly to set thresholds.
|
43 |
+
|
44 |
+
# Nodes (sections are categories in Comfy menu)
|
45 |
+
## Line Extractors
|
46 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
47 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
48 |
+
| Binary Lines | binary | control_scribble |
|
49 |
+
| Canny Edge | canny | control_v11p_sd15_canny <br> control_canny <br> t2iadapter_canny |
|
50 |
+
| HED Soft-Edge Lines | hed | control_v11p_sd15_softedge <br> control_hed |
|
51 |
+
| Standard Lineart | standard_lineart | control_v11p_sd15_lineart |
|
52 |
+
| Realistic Lineart | lineart (or `lineart_coarse` if `coarse` is enabled) | control_v11p_sd15_lineart |
|
53 |
+
| Anime Lineart | lineart_anime | control_v11p_sd15s2_lineart_anime |
|
54 |
+
| Manga Lineart | lineart_anime_denoise | control_v11p_sd15s2_lineart_anime |
|
55 |
+
| M-LSD Lines | mlsd | control_v11p_sd15_mlsd <br> control_mlsd |
|
56 |
+
| PiDiNet Soft-Edge Lines | pidinet | control_v11p_sd15_softedge <br> control_scribble |
|
57 |
+
| Scribble Lines | scribble | control_v11p_sd15_scribble <br> control_scribble |
|
58 |
+
| Scribble XDoG Lines | scribble_xdog | control_v11p_sd15_scribble <br> control_scribble |
|
59 |
+
| Fake Scribble Lines | scribble_hed | control_v11p_sd15_scribble <br> control_scribble |
|
60 |
+
| TEED Soft-Edge Lines | teed | [controlnet-sd-xl-1.0-softedge-dexined](https://huggingface.co/SargeZT/controlnet-sd-xl-1.0-softedge-dexined/blob/main/controlnet-sd-xl-1.0-softedge-dexined.safetensors) <br> control_v11p_sd15_softedge (Theoretically)
|
61 |
+
| Scribble PiDiNet Lines | scribble_pidinet | control_v11p_sd15_scribble <br> control_scribble |
|
62 |
+
| AnyLine Lineart | | mistoLine_fp16.safetensors <br> mistoLine_rank256 <br> control_v11p_sd15s2_lineart_anime <br> control_v11p_sd15_lineart |
|
63 |
+
|
64 |
+
## Normal and Depth Estimators
|
65 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
66 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
67 |
+
| MiDaS Depth Map | (normal) depth | control_v11f1p_sd15_depth <br> control_depth <br> t2iadapter_depth |
|
68 |
+
| LeReS Depth Map | depth_leres | control_v11f1p_sd15_depth <br> control_depth <br> t2iadapter_depth |
|
69 |
+
| Zoe Depth Map | depth_zoe | control_v11f1p_sd15_depth <br> control_depth <br> t2iadapter_depth |
|
70 |
+
| MiDaS Normal Map | normal_map | control_normal |
|
71 |
+
| BAE Normal Map | normal_bae | control_v11p_sd15_normalbae |
|
72 |
+
| MeshGraphormer Hand Refiner ([HandRefinder](https://github.com/wenquanlu/HandRefiner)) | depth_hand_refiner | [control_sd15_inpaint_depth_hand_fp16](https://huggingface.co/hr16/ControlNet-HandRefiner-pruned/blob/main/control_sd15_inpaint_depth_hand_fp16.safetensors) |
|
73 |
+
| Depth Anything | depth_anything | [Depth-Anything](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints_controlnet/diffusion_pytorch_model.safetensors) |
|
74 |
+
| Zoe Depth Anything <br> (Basically Zoe but the encoder is replaced with DepthAnything) | depth_anything | [Depth-Anything](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints_controlnet/diffusion_pytorch_model.safetensors) |
|
75 |
+
| Normal DSINE | | control_normal/control_v11p_sd15_normalbae |
|
76 |
+
| Metric3D Depth | | control_v11f1p_sd15_depth <br> control_depth <br> t2iadapter_depth |
|
77 |
+
| Metric3D Normal | | control_v11p_sd15_normalbae |
|
78 |
+
| Depth Anything V2 | | [Depth-Anything](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints_controlnet/diffusion_pytorch_model.safetensors) |
|
79 |
+
|
80 |
+
## Faces and Poses Estimators
|
81 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
82 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
83 |
+
| DWPose Estimator | dw_openpose_full | control_v11p_sd15_openpose <br> control_openpose <br> t2iadapter_openpose |
|
84 |
+
| OpenPose Estimator | openpose (detect_body) <br> openpose_hand (detect_body + detect_hand) <br> openpose_faceonly (detect_face) <br> openpose_full (detect_hand + detect_body + detect_face) | control_v11p_sd15_openpose <br> control_openpose <br> t2iadapter_openpose |
|
85 |
+
| MediaPipe Face Mesh | mediapipe_face | controlnet_sd21_laion_face_v2 |
|
86 |
+
| Animal Estimator | animal_openpose | [control_sd15_animal_openpose_fp16](https://huggingface.co/huchenlei/animal_openpose/blob/main/control_sd15_animal_openpose_fp16.pth) |
|
87 |
+
|
88 |
+
## Optical Flow Estimators
|
89 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
90 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
91 |
+
| Unimatch Optical Flow | | [DragNUWA](https://github.com/ProjectNUWA/DragNUWA) |
|
92 |
+
|
93 |
+
### How to get OpenPose-format JSON?
|
94 |
+
#### User-side
|
95 |
+
This workflow will save images to ComfyUI's output folder (the same location as output images). If you haven't found `Save Pose Keypoints` node, update this extension
|
96 |
+

|
97 |
+
|
98 |
+
#### Dev-side
|
99 |
+
An array of [OpenPose-format JSON](https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/doc/02_output.md#json-output-format) corresponsding to each frame in an IMAGE batch can be gotten from DWPose and OpenPose using `app.nodeOutputs` on the UI or `/history` API endpoint. JSON output from AnimalPose uses a kinda similar format to OpenPose JSON:
|
100 |
+
```
|
101 |
+
[
|
102 |
+
{
|
103 |
+
"version": "ap10k",
|
104 |
+
"animals": [
|
105 |
+
[[x1, y1, 1], [x2, y2, 1],..., [x17, y17, 1]],
|
106 |
+
[[x1, y1, 1], [x2, y2, 1],..., [x17, y17, 1]],
|
107 |
+
...
|
108 |
+
],
|
109 |
+
"canvas_height": 512,
|
110 |
+
"canvas_width": 768
|
111 |
+
},
|
112 |
+
...
|
113 |
+
]
|
114 |
+
```
|
115 |
+
|
116 |
+
For extension developers (e.g. Openpose editor):
|
117 |
+
```js
|
118 |
+
const poseNodes = app.graph._nodes.filter(node => ["OpenposePreprocessor", "DWPreprocessor", "AnimalPosePreprocessor"].includes(node.type))
|
119 |
+
for (const poseNode of poseNodes) {
|
120 |
+
const openposeResults = JSON.parse(app.nodeOutputs[poseNode.id].openpose_json[0])
|
121 |
+
console.log(openposeResults) //An array containing Openpose JSON for each frame
|
122 |
+
}
|
123 |
+
```
|
124 |
+
|
125 |
+
For API users:
|
126 |
+
Javascript
|
127 |
+
```js
|
128 |
+
import fetch from "node-fetch" //Remember to add "type": "module" to "package.json"
|
129 |
+
async function main() {
|
130 |
+
const promptId = '792c1905-ecfe-41f4-8114-83e6a4a09a9f' //Too lazy to POST /queue
|
131 |
+
let history = await fetch(`http://127.0.0.1:8188/history/${promptId}`).then(re => re.json())
|
132 |
+
history = history[promptId]
|
133 |
+
const nodeOutputs = Object.values(history.outputs).filter(output => output.openpose_json)
|
134 |
+
for (const nodeOutput of nodeOutputs) {
|
135 |
+
const openposeResults = JSON.parse(nodeOutput.openpose_json[0])
|
136 |
+
console.log(openposeResults) //An array containing Openpose JSON for each frame
|
137 |
+
}
|
138 |
+
}
|
139 |
+
main()
|
140 |
+
```
|
141 |
+
|
142 |
+
Python
|
143 |
+
```py
|
144 |
+
import json, urllib.request
|
145 |
+
|
146 |
+
server_address = "127.0.0.1:8188"
|
147 |
+
prompt_id = '' #Too lazy to POST /queue
|
148 |
+
|
149 |
+
def get_history(prompt_id):
|
150 |
+
with urllib.request.urlopen("http://{}/history/{}".format(server_address, prompt_id)) as response:
|
151 |
+
return json.loads(response.read())
|
152 |
+
|
153 |
+
history = get_history(prompt_id)[prompt_id]
|
154 |
+
for o in history['outputs']:
|
155 |
+
for node_id in history['outputs']:
|
156 |
+
node_output = history['outputs'][node_id]
|
157 |
+
if 'openpose_json' in node_output:
|
158 |
+
print(json.loads(node_output['openpose_json'][0])) #An list containing Openpose JSON for each frame
|
159 |
+
```
|
160 |
+
## Semantic Segmentation
|
161 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
162 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
163 |
+
| OneFormer ADE20K Segmentor | oneformer_ade20k | control_v11p_sd15_seg |
|
164 |
+
| OneFormer COCO Segmentor | oneformer_coco | control_v11p_sd15_seg |
|
165 |
+
| UniFormer Segmentor | segmentation |control_sd15_seg <br> control_v11p_sd15_seg|
|
166 |
+
|
167 |
+
## T2IAdapter-only
|
168 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
169 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
170 |
+
| Color Pallete | color | t2iadapter_color |
|
171 |
+
| Content Shuffle | shuffle | t2iadapter_style |
|
172 |
+
|
173 |
+
## Recolor
|
174 |
+
| Preprocessor Node | sd-webui-controlnet/other | ControlNet/T2I-Adapter |
|
175 |
+
|-----------------------------|---------------------------|-------------------------------------------|
|
176 |
+
| Image Luminance | recolor_luminance | [ioclab_sd15_recolor](https://huggingface.co/lllyasviel/sd_control_collection/resolve/main/ioclab_sd15_recolor.safetensors) <br> [sai_xl_recolor_256lora](https://huggingface.co/lllyasviel/sd_control_collection/resolve/main/sai_xl_recolor_256lora.safetensors) <br> [bdsqlsz_controlllite_xl_recolor_luminance](https://huggingface.co/bdsqlsz/qinglong_controlnet-lllite/resolve/main/bdsqlsz_controlllite_xl_recolor_luminance.safetensors) |
|
177 |
+
| Image Intensity | recolor_intensity | Idk. Maybe same as above? |
|
178 |
+
|
179 |
+
# Examples
|
180 |
+
> A picture is worth a thousand words
|
181 |
+
|
182 |
+
Credit to https://huggingface.co/thibaud/controlnet-sd21 for most examples below. You can get the same kind of results from preprocessor nodes of this repo.
|
183 |
+
## Line Extractors
|
184 |
+
### Canny Edge
|
185 |
+

|
186 |
+
### HED Lines
|
187 |
+

|
188 |
+
### Realistic Lineart
|
189 |
+

|
190 |
+
### Scribble/Fake Scribble
|
191 |
+

|
192 |
+
### TEED Soft-Edge Lines
|
193 |
+

|
194 |
+
### Anyline Lineart
|
195 |
+

|
196 |
+
|
197 |
+
## Normal and Depth Map
|
198 |
+
### Depth (idk the preprocessor they use)
|
199 |
+

|
200 |
+
## Zoe - Depth Map
|
201 |
+

|
202 |
+
## BAE - Normal Map
|
203 |
+

|
204 |
+
## MeshGraphormer
|
205 |
+

|
206 |
+
## Depth Anything & Zoe Depth Anything
|
207 |
+

|
208 |
+
## DSINE
|
209 |
+

|
210 |
+
## Metric3D
|
211 |
+

|
212 |
+
## Depth Anything V2
|
213 |
+

|
214 |
+
|
215 |
+
## Faces and Poses
|
216 |
+
### OpenPose
|
217 |
+

|
218 |
+

|
219 |
+
|
220 |
+
### Animal Pose (AP-10K)
|
221 |
+

|
222 |
+
|
223 |
+
### DensePose
|
224 |
+

|
225 |
+
|
226 |
+
## Semantic Segmantation
|
227 |
+
### OneFormer ADE20K Segmentor
|
228 |
+

|
229 |
+
|
230 |
+
### Anime Face Segmentor
|
231 |
+

|
232 |
+
|
233 |
+
## T2IAdapter-only
|
234 |
+
### Color Pallete for T2I-Adapter
|
235 |
+

|
236 |
+
|
237 |
+
## Optical Flow
|
238 |
+
### Unimatch
|
239 |
+

|
240 |
+
|
241 |
+
## Recolor
|
242 |
+

|
243 |
+
|
244 |
+
# Testing workflow
|
245 |
+
https://github.com/Fannovel16/comfyui_controlnet_aux/blob/master/tests/test_cn_aux_full.json
|
246 |
+

|
247 |
+
|
248 |
+
# Q&A:
|
249 |
+
## Why some nodes doesn't appear after I installed this repo?
|
250 |
+
|
251 |
+
This repo has a new mechanism which will skip any custom node can't be imported. If you meet this case, please create a issue on [Issues tab](https://github.com/Fannovel16/comfyui_controlnet_aux/issues) with the log from the command line.
|
252 |
+
|
253 |
+
## DWPose/AnimalPose only uses CPU so it's so slow. How can I make it use GPU?
|
254 |
+
There are two ways to speed-up DWPose: using TorchScript checkpoints (.torchscript.pt) checkpoints or ONNXRuntime (.onnx). TorchScript way is little bit slower than ONNXRuntime but doesn't require any additional library and still way way faster than CPU.
|
255 |
+
|
256 |
+
A torchscript bbox detector is compatiable with an onnx pose estimator and vice versa.
|
257 |
+
### TorchScript
|
258 |
+
Set `bbox_detector` and `pose_estimator` according to this picture. You can try other bbox detector endings with `.torchscript.pt` to reduce bbox detection time if input images are ideal.
|
259 |
+

|
260 |
+
### ONNXRuntime
|
261 |
+
If onnxruntime is installed successfully and the checkpoint used endings with `.onnx`, it will replace default cv2 backend to take advantage of GPU. Note that if you are using NVidia card, this method currently can only works on CUDA 11.8 (ComfyUI_windows_portable_nvidia_cu118_or_cpu.7z) unless you compile onnxruntime yourself.
|
262 |
+
|
263 |
+
1. Know your onnxruntime build:
|
264 |
+
* * NVidia CUDA 11.x or bellow/AMD GPU: `onnxruntime-gpu`
|
265 |
+
* * NVidia CUDA 12.x: `onnxruntime-gpu --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-12/pypi/simple/`
|
266 |
+
* * DirectML: `onnxruntime-directml`
|
267 |
+
* * OpenVINO: `onnxruntime-openvino`
|
268 |
+
|
269 |
+
Note that if this is your first time using ComfyUI, please test if it can run on your device before doing next steps.
|
270 |
+
|
271 |
+
2. Add it into `requirements.txt`
|
272 |
+
|
273 |
+
3. Run `install.bat` or pip command mentioned in Installation
|
274 |
+
|
275 |
+

|
276 |
+
|
277 |
+
# Assets files of preprocessors
|
278 |
+
* anime_face_segment: [bdsqlsz/qinglong_controlnet-lllite/Annotators/UNet.pth](https://huggingface.co/bdsqlsz/qinglong_controlnet-lllite/blob/main/Annotators/UNet.pth), [anime-seg/isnetis.ckpt](https://huggingface.co/skytnt/anime-seg/blob/main/isnetis.ckpt)
|
279 |
+
* densepose: [LayerNorm/DensePose-TorchScript-with-hint-image/densepose_r50_fpn_dl.torchscript](https://huggingface.co/LayerNorm/DensePose-TorchScript-with-hint-image/blob/main/densepose_r50_fpn_dl.torchscript)
|
280 |
+
* dwpose:
|
281 |
+
* * bbox_detector: Either [yzd-v/DWPose/yolox_l.onnx](https://huggingface.co/yzd-v/DWPose/blob/main/yolox_l.onnx), [hr16/yolox-onnx/yolox_l.torchscript.pt](https://huggingface.co/hr16/yolox-onnx/blob/main/yolox_l.torchscript.pt), [hr16/yolo-nas-fp16/yolo_nas_l_fp16.onnx](https://huggingface.co/hr16/yolo-nas-fp16/blob/main/yolo_nas_l_fp16.onnx), [hr16/yolo-nas-fp16/yolo_nas_m_fp16.onnx](https://huggingface.co/hr16/yolo-nas-fp16/blob/main/yolo_nas_m_fp16.onnx), [hr16/yolo-nas-fp16/yolo_nas_s_fp16.onnx](https://huggingface.co/hr16/yolo-nas-fp16/blob/main/yolo_nas_s_fp16.onnx)
|
282 |
+
* * pose_estimator: Either [hr16/DWPose-TorchScript-BatchSize5/dw-ll_ucoco_384_bs5.torchscript.pt](https://huggingface.co/hr16/DWPose-TorchScript-BatchSize5/blob/main/dw-ll_ucoco_384_bs5.torchscript.pt), [yzd-v/DWPose/dw-ll_ucoco_384.onnx](https://huggingface.co/yzd-v/DWPose/blob/main/dw-ll_ucoco_384.onnx)
|
283 |
+
* animal_pose (ap10k):
|
284 |
+
* * bbox_detector: Either [yzd-v/DWPose/yolox_l.onnx](https://huggingface.co/yzd-v/DWPose/blob/main/yolox_l.onnx), [hr16/yolox-onnx/yolox_l.torchscript.pt](https://huggingface.co/hr16/yolox-onnx/blob/main/yolox_l.torchscript.pt), [hr16/yolo-nas-fp16/yolo_nas_l_fp16.onnx](https://huggingface.co/hr16/yolo-nas-fp16/blob/main/yolo_nas_l_fp16.onnx), [hr16/yolo-nas-fp16/yolo_nas_m_fp16.onnx](https://huggingface.co/hr16/yolo-nas-fp16/blob/main/yolo_nas_m_fp16.onnx), [hr16/yolo-nas-fp16/yolo_nas_s_fp16.onnx](https://huggingface.co/hr16/yolo-nas-fp16/blob/main/yolo_nas_s_fp16.onnx)
|
285 |
+
* * pose_estimator: Either [hr16/DWPose-TorchScript-BatchSize5/rtmpose-m_ap10k_256_bs5.torchscript.pt](https://huggingface.co/hr16/DWPose-TorchScript-BatchSize5/blob/main/rtmpose-m_ap10k_256_bs5.torchscript.pt), [hr16/UnJIT-DWPose/rtmpose-m_ap10k_256.onnx](https://huggingface.co/hr16/UnJIT-DWPose/blob/main/rtmpose-m_ap10k_256.onnx)
|
286 |
+
* hed: [lllyasviel/Annotators/ControlNetHED.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/ControlNetHED.pth)
|
287 |
+
* leres: [lllyasviel/Annotators/res101.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/res101.pth), [lllyasviel/Annotators/latest_net_G.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/latest_net_G.pth)
|
288 |
+
* lineart: [lllyasviel/Annotators/sk_model.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/sk_model.pth), [lllyasviel/Annotators/sk_model2.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/sk_model2.pth)
|
289 |
+
* lineart_anime: [lllyasviel/Annotators/netG.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/netG.pth)
|
290 |
+
* manga_line: [lllyasviel/Annotators/erika.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/erika.pth)
|
291 |
+
* mesh_graphormer: [hr16/ControlNet-HandRefiner-pruned/graphormer_hand_state_dict.bin](https://huggingface.co/hr16/ControlNet-HandRefiner-pruned/blob/main/graphormer_hand_state_dict.bin), [hr16/ControlNet-HandRefiner-pruned/hrnetv2_w64_imagenet_pretrained.pth](https://huggingface.co/hr16/ControlNet-HandRefiner-pruned/blob/main/hrnetv2_w64_imagenet_pretrained.pth)
|
292 |
+
* midas: [lllyasviel/Annotators/dpt_hybrid-midas-501f0c75.pt](https://huggingface.co/lllyasviel/Annotators/blob/main/dpt_hybrid-midas-501f0c75.pt)
|
293 |
+
* mlsd: [lllyasviel/Annotators/mlsd_large_512_fp32.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/mlsd_large_512_fp32.pth)
|
294 |
+
* normalbae: [lllyasviel/Annotators/scannet.pt](https://huggingface.co/lllyasviel/Annotators/blob/main/scannet.pt)
|
295 |
+
* oneformer: [lllyasviel/Annotators/250_16_swin_l_oneformer_ade20k_160k.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/250_16_swin_l_oneformer_ade20k_160k.pth)
|
296 |
+
* open_pose: [lllyasviel/Annotators/body_pose_model.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/body_pose_model.pth), [lllyasviel/Annotators/hand_pose_model.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/hand_pose_model.pth), [lllyasviel/Annotators/facenet.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/facenet.pth)
|
297 |
+
* pidi: [lllyasviel/Annotators/table5_pidinet.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/table5_pidinet.pth)
|
298 |
+
* sam: [dhkim2810/MobileSAM/mobile_sam.pt](https://huggingface.co/dhkim2810/MobileSAM/blob/main/mobile_sam.pt)
|
299 |
+
* uniformer: [lllyasviel/Annotators/upernet_global_small.pth](https://huggingface.co/lllyasviel/Annotators/blob/main/upernet_global_small.pth)
|
300 |
+
* zoe: [lllyasviel/Annotators/ZoeD_M12_N.pt](https://huggingface.co/lllyasviel/Annotators/blob/main/ZoeD_M12_N.pt)
|
301 |
+
* teed: [bdsqlsz/qinglong_controlnet-lllite/7_model.pth](https://huggingface.co/bdsqlsz/qinglong_controlnet-lllite/blob/main/Annotators/7_model.pth)
|
302 |
+
* depth_anything: Either [LiheYoung/Depth-Anything/checkpoints/depth_anything_vitl14.pth](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints/depth_anything_vitl14.pth), [LiheYoung/Depth-Anything/checkpoints/depth_anything_vitb14.pth](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints/depth_anything_vitb14.pth) or [LiheYoung/Depth-Anything/checkpoints/depth_anything_vits14.pth](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints/depth_anything_vits14.pth)
|
303 |
+
* diffusion_edge: Either [hr16/Diffusion-Edge/diffusion_edge_indoor.pt](https://huggingface.co/hr16/Diffusion-Edge/blob/main/diffusion_edge_indoor.pt), [hr16/Diffusion-Edge/diffusion_edge_urban.pt](https://huggingface.co/hr16/Diffusion-Edge/blob/main/diffusion_edge_urban.pt) or [hr16/Diffusion-Edge/diffusion_edge_natrual.pt](https://huggingface.co/hr16/Diffusion-Edge/blob/main/diffusion_edge_natrual.pt)
|
304 |
+
* unimatch: Either [hr16/Unimatch/gmflow-scale2-regrefine6-mixdata.pth](https://huggingface.co/hr16/Unimatch/blob/main/gmflow-scale2-regrefine6-mixdata.pth), [hr16/Unimatch/gmflow-scale2-mixdata.pth](https://huggingface.co/hr16/Unimatch/blob/main/gmflow-scale2-mixdata.pth) or [hr16/Unimatch/gmflow-scale1-mixdata.pth](https://huggingface.co/hr16/Unimatch/blob/main/gmflow-scale1-mixdata.pth)
|
305 |
+
* zoe_depth_anything: Either [LiheYoung/Depth-Anything/checkpoints_metric_depth/depth_anything_metric_depth_indoor.pt](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints_metric_depth/depth_anything_metric_depth_indoor.pt) or [LiheYoung/Depth-Anything/checkpoints_metric_depth/depth_anything_metric_depth_outdoor.pt](https://huggingface.co/spaces/LiheYoung/Depth-Anything/blob/main/checkpoints_metric_depth/depth_anything_metric_depth_outdoor.pt)
|
306 |
+
# 1500 Stars 😄
|
307 |
+
<a href="https://star-history.com/#Fannovel16/comfyui_controlnet_aux&Date">
|
308 |
+
<picture>
|
309 |
+
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=Fannovel16/comfyui_controlnet_aux&type=Date&theme=dark" />
|
310 |
+
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=Fannovel16/comfyui_controlnet_aux&type=Date" />
|
311 |
+
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=Fannovel16/comfyui_controlnet_aux&type=Date" />
|
312 |
+
</picture>
|
313 |
+
</a>
|
314 |
+
|
315 |
+
Thanks for yalls supports. I never thought the graph for stars would be linear lol.
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/UPDATES.md
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
* `AIO Aux Preprocessor` intergrating all loadable aux preprocessors as dropdown options. Easy to copy, paste and get the preprocessor faster.
|
2 |
+
* Added OpenPose-format JSON output from OpenPose Preprocessor and DWPose Preprocessor. Checks [here](#faces-and-poses).
|
3 |
+
* Fixed wrong model path when downloading DWPose.
|
4 |
+
* Make hint images less blurry.
|
5 |
+
* Added `resolution` option, `PixelPerfectResolution` and `HintImageEnchance` nodes (TODO: Documentation).
|
6 |
+
* Added `RAFT Optical Flow Embedder` for TemporalNet2 (TODO: Workflow example).
|
7 |
+
* Fixed opencv's conflicts between this extension, [ReActor](https://github.com/Gourieff/comfyui-reactor-node) and Roop. Thanks `Gourieff` for [the solution](https://github.com/Fannovel16/comfyui_controlnet_aux/issues/7#issuecomment-1734319075)!
|
8 |
+
* RAFT is removed as the code behind it doesn't match what what the original code does
|
9 |
+
* Changed `lineart`'s display name from `Normal Lineart` to `Realistic Lineart`. This change won't affect old workflows
|
10 |
+
* Added support for `onnxruntime` to speed-up DWPose (see the Q&A)
|
11 |
+
* Fixed TypeError: expected size to be one of int or Tuple[int] or Tuple[int, int] or Tuple[int, int, int], but got size with types [<class 'numpy.int64'>, <class 'numpy.int64'>]: [Issue](https://github.com/Fannovel16/comfyui_controlnet_aux/issues/2), [PR](https://github.com/Fannovel16/comfyui_controlnet_aux/pull/71))
|
12 |
+
* Fixed ImageGenResolutionFromImage mishape (https://github.com/Fannovel16/comfyui_controlnet_aux/pull/74)
|
13 |
+
* Fixed LeRes and MiDaS's incomatipility with MPS device
|
14 |
+
* Fixed checking DWPose onnxruntime session multiple times: https://github.com/Fannovel16/comfyui_controlnet_aux/issues/89)
|
15 |
+
* Added `Anime Face Segmentor` (in `ControlNet Preprocessors/Semantic Segmentation`) for [ControlNet AnimeFaceSegmentV2](https://huggingface.co/bdsqlsz/qinglong_controlnet-lllite#animefacesegmentv2). Checks [here](#anime-face-segmentor)
|
16 |
+
* Change download functions and fix [download error](https://github.com/Fannovel16/comfyui_controlnet_aux/issues/39): [PR](https://github.com/Fannovel16/comfyui_controlnet_aux/pull/96)
|
17 |
+
* Caching DWPose Onnxruntime during the first use of DWPose node instead of ComfyUI startup
|
18 |
+
* Added alternative YOLOX models for faster speed when using DWPose
|
19 |
+
* Added alternative DWPose models
|
20 |
+
* Implemented the preprocessor for [AnimalPose ControlNet](https://github.com/abehonest/ControlNet_AnimalPose/tree/main). Check [Animal Pose AP-10K](#animal-pose-ap-10k)
|
21 |
+
* Added YOLO-NAS models which are drop-in replacements of YOLOX
|
22 |
+
* Fixed Openpose Face/Hands no longer detecting: https://github.com/Fannovel16/comfyui_controlnet_aux/issues/54
|
23 |
+
* Added TorchScript implementation of DWPose and AnimalPose
|
24 |
+
* Added TorchScript implementation of DensePose from [Colab notebook](https://colab.research.google.com/drive/16hcaaKs210ivpxjoyGNuvEXZD4eqOOSQ) which doesn't require detectron2. [Example](#densepose). Thanks [@LayerNome](https://github.com/Layer-norm) for fixing bugs related.
|
25 |
+
* Added Standard Lineart Preprocessor
|
26 |
+
* Fixed OpenPose misplacements in some cases
|
27 |
+
* Added Mesh Graphormer - Hand Depth Map & Mask
|
28 |
+
* Misaligned hands bug from MeshGraphormer was fixed
|
29 |
+
* Added more mask options for MeshGraphormer
|
30 |
+
* Added Save Pose Keypoint node for editing
|
31 |
+
* Added Unimatch Optical Flow
|
32 |
+
* Added Depth Anything & Zoe Depth Anything
|
33 |
+
* Removed resolution field from Unimatch Optical Flow as that interpolating optical flow seems unstable
|
34 |
+
* Added TEED Soft-Edge Preprocessor
|
35 |
+
* Added DiffusionEdge
|
36 |
+
* Added Image Luminance and Image Intensity
|
37 |
+
* Added Normal DSINE
|
38 |
+
* Added TTPlanet Tile (09/05/2024, DD/MM/YYYY)
|
39 |
+
* Added AnyLine, Metric3D (18/05/2024)
|
40 |
+
* Added Depth Anything V2 (16/06/2024)
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/__init__.py
ADDED
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys, os
|
2 |
+
from .utils import here, create_node_input_types
|
3 |
+
from pathlib import Path
|
4 |
+
import threading
|
5 |
+
import traceback
|
6 |
+
import warnings
|
7 |
+
import importlib
|
8 |
+
from .log import log, blue_text, cyan_text, get_summary, get_label
|
9 |
+
from .hint_image_enchance import NODE_CLASS_MAPPINGS as HIE_NODE_CLASS_MAPPINGS
|
10 |
+
from .hint_image_enchance import NODE_DISPLAY_NAME_MAPPINGS as HIE_NODE_DISPLAY_NAME_MAPPINGS
|
11 |
+
#Ref: https://github.com/comfyanonymous/ComfyUI/blob/76d53c4622fc06372975ed2a43ad345935b8a551/nodes.py#L17
|
12 |
+
sys.path.insert(0, str(Path(here, "src").resolve()))
|
13 |
+
for pkg_name in ["controlnet_aux", "custom_mmpkg"]:
|
14 |
+
sys.path.append(str(Path(here, "src", pkg_name).resolve()))
|
15 |
+
|
16 |
+
#Enable CPU fallback for ops not being supported by MPS like upsample_bicubic2d.out
|
17 |
+
#https://github.com/pytorch/pytorch/issues/77764
|
18 |
+
#https://github.com/Fannovel16/comfyui_controlnet_aux/issues/2#issuecomment-1763579485
|
19 |
+
os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = os.getenv("PYTORCH_ENABLE_MPS_FALLBACK", '1')
|
20 |
+
|
21 |
+
|
22 |
+
def load_nodes():
|
23 |
+
shorted_errors = []
|
24 |
+
full_error_messages = []
|
25 |
+
node_class_mappings = {}
|
26 |
+
node_display_name_mappings = {}
|
27 |
+
|
28 |
+
for filename in (here / "node_wrappers").iterdir():
|
29 |
+
module_name = filename.stem
|
30 |
+
if module_name.startswith('.'): continue #Skip hidden files created by the OS (e.g. [.DS_Store](https://en.wikipedia.org/wiki/.DS_Store))
|
31 |
+
try:
|
32 |
+
module = importlib.import_module(
|
33 |
+
f".node_wrappers.{module_name}", package=__package__
|
34 |
+
)
|
35 |
+
node_class_mappings.update(getattr(module, "NODE_CLASS_MAPPINGS"))
|
36 |
+
if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS"):
|
37 |
+
node_display_name_mappings.update(getattr(module, "NODE_DISPLAY_NAME_MAPPINGS"))
|
38 |
+
|
39 |
+
log.debug(f"Imported {module_name} nodes")
|
40 |
+
|
41 |
+
except AttributeError:
|
42 |
+
pass # wip nodes
|
43 |
+
except Exception:
|
44 |
+
error_message = traceback.format_exc()
|
45 |
+
full_error_messages.append(error_message)
|
46 |
+
error_message = error_message.splitlines()[-1]
|
47 |
+
shorted_errors.append(
|
48 |
+
f"Failed to import module {module_name} because {error_message}"
|
49 |
+
)
|
50 |
+
|
51 |
+
if len(shorted_errors) > 0:
|
52 |
+
full_err_log = '\n\n'.join(full_error_messages)
|
53 |
+
print(f"\n\nFull error log from comfyui_controlnet_aux: \n{full_err_log}\n\n")
|
54 |
+
log.info(
|
55 |
+
f"Some nodes failed to load:\n\t"
|
56 |
+
+ "\n\t".join(shorted_errors)
|
57 |
+
+ "\n\n"
|
58 |
+
+ "Check that you properly installed the dependencies.\n"
|
59 |
+
+ "If you think this is a bug, please report it on the github page (https://github.com/Fannovel16/comfyui_controlnet_aux/issues)"
|
60 |
+
)
|
61 |
+
return node_class_mappings, node_display_name_mappings
|
62 |
+
|
63 |
+
AUX_NODE_MAPPINGS, AUX_DISPLAY_NAME_MAPPINGS = load_nodes()
|
64 |
+
|
65 |
+
AIO_NOT_SUPPORTED = ["InpaintPreprocessor"]
|
66 |
+
#For nodes not mapping image to image
|
67 |
+
|
68 |
+
def preprocessor_options():
|
69 |
+
auxs = list(AUX_NODE_MAPPINGS.keys())
|
70 |
+
auxs.insert(0, "none")
|
71 |
+
for name in AIO_NOT_SUPPORTED:
|
72 |
+
if name in auxs:
|
73 |
+
auxs.remove(name)
|
74 |
+
return auxs
|
75 |
+
|
76 |
+
|
77 |
+
PREPROCESSOR_OPTIONS = preprocessor_options()
|
78 |
+
|
79 |
+
class AIO_Preprocessor:
|
80 |
+
@classmethod
|
81 |
+
def INPUT_TYPES(s):
|
82 |
+
return create_node_input_types(preprocessor=(PREPROCESSOR_OPTIONS, {"default": "none"}))
|
83 |
+
|
84 |
+
RETURN_TYPES = ("IMAGE",)
|
85 |
+
FUNCTION = "execute"
|
86 |
+
|
87 |
+
CATEGORY = "ControlNet Preprocessors"
|
88 |
+
|
89 |
+
def execute(self, preprocessor, image, resolution=512):
|
90 |
+
if preprocessor == "none":
|
91 |
+
return (image, )
|
92 |
+
else:
|
93 |
+
aux_class = AUX_NODE_MAPPINGS[preprocessor]
|
94 |
+
input_types = aux_class.INPUT_TYPES()
|
95 |
+
input_types = {
|
96 |
+
**input_types["required"],
|
97 |
+
**(input_types["optional"] if "optional" in input_types else {})
|
98 |
+
}
|
99 |
+
params = {}
|
100 |
+
for name, input_type in input_types.items():
|
101 |
+
if name == "image":
|
102 |
+
params[name] = image
|
103 |
+
continue
|
104 |
+
|
105 |
+
if name == "resolution":
|
106 |
+
params[name] = resolution
|
107 |
+
continue
|
108 |
+
|
109 |
+
if len(input_type) == 2 and ("default" in input_type[1]):
|
110 |
+
params[name] = input_type[1]["default"]
|
111 |
+
continue
|
112 |
+
|
113 |
+
default_values = { "INT": 0, "FLOAT": 0.0 }
|
114 |
+
if input_type[0] in default_values:
|
115 |
+
params[name] = default_values[input_type[0]]
|
116 |
+
|
117 |
+
return getattr(aux_class(), aux_class.FUNCTION)(**params)
|
118 |
+
|
119 |
+
|
120 |
+
class ControlNetPreprocessorSelector:
|
121 |
+
@classmethod
|
122 |
+
def INPUT_TYPES(s):
|
123 |
+
return {
|
124 |
+
"required": {
|
125 |
+
"preprocessor": (PREPROCESSOR_OPTIONS,),
|
126 |
+
}
|
127 |
+
}
|
128 |
+
|
129 |
+
RETURN_TYPES = (PREPROCESSOR_OPTIONS,)
|
130 |
+
RETURN_NAMES = ("preprocessor",)
|
131 |
+
FUNCTION = "get_preprocessor"
|
132 |
+
|
133 |
+
CATEGORY = "ControlNet Preprocessors"
|
134 |
+
|
135 |
+
def get_preprocessor(self, preprocessor: str):
|
136 |
+
return (preprocessor,)
|
137 |
+
|
138 |
+
|
139 |
+
NODE_CLASS_MAPPINGS = {
|
140 |
+
**AUX_NODE_MAPPINGS,
|
141 |
+
"AIO_Preprocessor": AIO_Preprocessor,
|
142 |
+
"ControlNetPreprocessorSelector": ControlNetPreprocessorSelector,
|
143 |
+
**HIE_NODE_CLASS_MAPPINGS,
|
144 |
+
}
|
145 |
+
|
146 |
+
NODE_DISPLAY_NAME_MAPPINGS = {
|
147 |
+
**AUX_DISPLAY_NAME_MAPPINGS,
|
148 |
+
"AIO_Preprocessor": "AIO Aux Preprocessor",
|
149 |
+
"ControlNetPreprocessorSelector": "Preprocessor Selector",
|
150 |
+
**HIE_NODE_DISPLAY_NAME_MAPPINGS,
|
151 |
+
}
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/LiheYoung/Depth-Anything/checkpoints/depth_anything_vitl14.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6c6a383e33e51c5fdfbf31e7ebcda943973a9e6a1cbef1564afe58d7f2e8fe63
|
3 |
+
size 1341401882
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
*
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/download/body_pose_model.pth.metadata
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
982e7edaec38759d914a963c48c4726685de7d96
|
2 |
+
25a948c16078b0f08e236bda51a385d855ef4c153598947c28c0d47ed94bb746
|
3 |
+
1722775394.1729648
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/download/facenet.pth.metadata
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
982e7edaec38759d914a963c48c4726685de7d96
|
2 |
+
8beb52e548624ffcc4aed12af7aee7dcbfaeea420c75609fee999fe7add79d43
|
3 |
+
1722775422.8091826
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/.huggingface/download/hand_pose_model.pth.metadata
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
982e7edaec38759d914a963c48c4726685de7d96
|
2 |
+
b76b00d1750901abd07b9f9d8c98cc3385b8fe834a26d4b4f0aad439e75fc600
|
3 |
+
1722775408.2201152
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/body_pose_model.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25a948c16078b0f08e236bda51a385d855ef4c153598947c28c0d47ed94bb746
|
3 |
+
size 209267595
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/facenet.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8beb52e548624ffcc4aed12af7aee7dcbfaeea420c75609fee999fe7add79d43
|
3 |
+
size 153718792
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts/lllyasviel/Annotators/hand_pose_model.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b76b00d1750901abd07b9f9d8c98cc3385b8fe834a26d4b4f0aad439e75fc600
|
3 |
+
size 147341049
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/config.example.yaml
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# this is an example for config.yaml file, you can rename it to config.yaml if you want to use it
|
2 |
+
# ###############################################################################################
|
3 |
+
# This path is for custom pressesor models base folder. default is "./ckpts"
|
4 |
+
# you can also use absolute paths like: "/root/ComfyUI/custom_nodes/comfyui_controlnet_aux/ckpts" or "D:\\ComfyUI\\custom_nodes\\comfyui_controlnet_aux\\ckpts"
|
5 |
+
annotator_ckpts_path: "./ckpts"
|
6 |
+
# ###############################################################################################
|
7 |
+
# This path is for downloading temporary files.
|
8 |
+
# You SHOULD use absolute path for this like"D:\\temp", DO NOT use relative paths. Empty for default.
|
9 |
+
custom_temp_path:
|
10 |
+
# ###############################################################################################
|
11 |
+
# if you already have downloaded ckpts via huggingface hub into default cache path like: ~/.cache/huggingface/hub, you can set this True to use symlinks to save space
|
12 |
+
USE_SYMLINKS: False
|
13 |
+
# ###############################################################################################
|
14 |
+
# EP_list is a list of execution providers for onnxruntime, if one of them is not available or not working well, you can delete that provider from here(config.yaml)
|
15 |
+
# you can find all available providers here: https://onnxruntime.ai/docs/execution-providers
|
16 |
+
# for example, if you have CUDA installed, you can set it to: ["CUDAExecutionProvider", "CPUExecutionProvider"]
|
17 |
+
# empty list or only keep ["CPUExecutionProvider"] means you use cv2.dnn.readNetFromONNX to load onnx models
|
18 |
+
# if your onnx models can only run on the CPU or have other issues, we recommend using pt model instead.
|
19 |
+
# default value is ["CUDAExecutionProvider", "DirectMLExecutionProvider", "OpenVINOExecutionProvider", "ROCMExecutionProvider", "CPUExecutionProvider"]
|
20 |
+
EP_list: ["CUDAExecutionProvider", "DirectMLExecutionProvider", "OpenVINOExecutionProvider", "ROCMExecutionProvider", "CPUExecutionProvider"]
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/dev_interface.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pathlib import Path
|
2 |
+
from utils import here
|
3 |
+
import sys
|
4 |
+
sys.path.append(str(Path(here, "src")))
|
5 |
+
|
6 |
+
from controlnet_aux import *
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_animal_pose.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_anime_face_segmentor.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_anyline.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_densepose.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_depth_anything.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_depth_anything_v2.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_dsine.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_marigold.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_marigold_flat.jpg
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_mesh_graphormer.png
ADDED
![]() |
Git LFS Details
|
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_metric3d.png
ADDED
![]() |
ComfyUI/custom_nodes/comfyui_controlnet_aux/examples/example_onnx.png
ADDED
![]() |