Spaces:
Paused
Paused
from typing import Tuple | |
import numpy as np | |
import torch | |
def resize_image_dimensions( | |
original_resolution_wh: Tuple[int, int], | |
maximum_dimension: int = 2048 | |
) -> Tuple[int, int]: | |
width, height = original_resolution_wh | |
if width <= maximum_dimension and height <= maximum_dimension: | |
width = width - (width % 32) | |
height = height - (height % 32) | |
return width, height | |
if width > height: | |
scaling_factor = maximum_dimension / width | |
else: | |
scaling_factor = maximum_dimension / height | |
new_width = int(width * scaling_factor) | |
new_height = int(height * scaling_factor) | |
new_width = new_width - (new_width % 32) | |
new_height = new_height - (new_height % 32) | |
return new_width, new_height | |
def make_inpaint_condition(init_image, mask_image): | |
init_image = np.array(init_image.convert("RGB")).astype(np.float32) / 255.0 | |
mask_image = np.array(mask_image.convert("L")).astype(np.float32) / 255.0 | |
assert init_image.shape[0:1] == mask_image.shape[0:1], "image and image_mask must have the same image size" | |
init_image[mask_image > 0.5] = -1.0 # set as masked pixel | |
init_image = np.expand_dims(init_image, 0).transpose(0, 3, 1, 2) | |
init_image = torch.from_numpy(init_image) | |
return init_image |