Hyper-SD / comfyui /Hyper-SD15-Nsteps-lora-workflow.json
renyuxi's picture
upload comfyui workflows
ff8f0b1 verified
raw
history blame
7.2 kB
{
"last_node_id": 20,
"last_link_id": 27,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
-31,
-31
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 23,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
17
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a photo of a cat"
]
},
{
"id": 19,
"type": "CLIPTextEncode",
"pos": [
6,
541
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 25,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
27
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
""
]
},
{
"id": 20,
"type": "Note",
"pos": [
-509,
-66
],
"size": {
"0": 322.0227355957031,
"1": 164.27613830566406
},
"flags": {},
"order": 0,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"Use the LoRA corresponding to the number of inference steps to obtain the best inference effect.\n\nAttention: The scheduler type must be set to sgm_uniform!!! "
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
426,
145
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1274,
-187
],
"size": {
"0": 391.4791564941406,
"1": 700.6646728515625
},
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [
-568,
290
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
20
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
23,
25
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
8
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"v1-5-pruned-emaonly.ckpt"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [
463,
707
],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 2,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
26
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [
512,
512,
1
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
867,
250
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 22
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 17
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 27
},
{
"name": "latent_image",
"type": "LATENT",
"link": 26
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
229623203641397,
"randomize",
2,
1,
"ddim",
"sgm_uniform",
1
]
},
{
"id": 15,
"type": "LoraLoaderModelOnly",
"pos": [
55,
380
],
"size": {
"0": 315,
"1": 82
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 20
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
22
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Hyper-SD15-2steps-lora.safetensors",
1
]
}
],
"links": [
[
7,
3,
0,
8,
0,
"LATENT"
],
[
8,
4,
2,
8,
1,
"VAE"
],
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
17,
6,
0,
3,
1,
"CONDITIONING"
],
[
20,
4,
0,
15,
0,
"MODEL"
],
[
22,
15,
0,
3,
0,
"MODEL"
],
[
23,
4,
1,
6,
0,
"CLIP"
],
[
25,
4,
1,
19,
0,
"CLIP"
],
[
26,
5,
0,
3,
3,
"LATENT"
],
[
27,
19,
0,
3,
2,
"CONDITIONING"
]
],
"groups": [],
"config": {},
"extra": {},
"version": 0.4
}