File size: 2,485 Bytes
5353f6a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
# https://github.com/comfyanonymous/ComfyUI/blob/master/nodes.py
import os
from ldm_patched.pfn import model_loading
from ldm_patched.modules import model_management
import torch
import ldm_patched.modules.utils
import ldm_patched.utils.path_utils
class UpscaleModelLoader:
@classmethod
def INPUT_TYPES(s):
return {"required": { "model_name": (ldm_patched.utils.path_utils.get_filename_list("upscale_models"), ),
}}
RETURN_TYPES = ("UPSCALE_MODEL",)
FUNCTION = "load_model"
CATEGORY = "loaders"
def load_model(self, model_name):
model_path = ldm_patched.utils.path_utils.get_full_path("upscale_models", model_name)
sd = ldm_patched.modules.utils.load_torch_file(model_path, safe_load=True)
if "module.layers.0.residual_group.blocks.0.norm1.weight" in sd:
sd = ldm_patched.modules.utils.state_dict_prefix_replace(sd, {"module.":""})
out = model_loading.load_state_dict(sd).eval()
return (out, )
class ImageUpscaleWithModel:
@classmethod
def INPUT_TYPES(s):
return {"required": { "upscale_model": ("UPSCALE_MODEL",),
"image": ("IMAGE",),
}}
RETURN_TYPES = ("IMAGE",)
FUNCTION = "upscale"
CATEGORY = "image/upscaling"
def upscale(self, upscale_model, image):
device = model_management.get_torch_device()
upscale_model.to(device)
in_img = image.movedim(-1,-3).to(device)
free_memory = model_management.get_free_memory(device)
tile = 512
overlap = 32
oom = True
while oom:
try:
steps = in_img.shape[0] * ldm_patched.modules.utils.get_tiled_scale_steps(in_img.shape[3], in_img.shape[2], tile_x=tile, tile_y=tile, overlap=overlap)
pbar = ldm_patched.modules.utils.ProgressBar(steps)
s = ldm_patched.modules.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, upscale_amount=upscale_model.scale, pbar=pbar)
oom = False
except model_management.OOM_EXCEPTION as e:
tile //= 2
if tile < 128:
raise e
upscale_model.cpu()
s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0)
return (s,)
NODE_CLASS_MAPPINGS = {
"UpscaleModelLoader": UpscaleModelLoader,
"ImageUpscaleWithModel": ImageUpscaleWithModel
}
|