Spaces:
Running
Running
update
Browse files- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_canny.py +1 -1
- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_depth.py +1 -2
- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_hed.py +1 -1
- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_mlsd.py +1 -1
- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_pose.py +1 -1
- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_scribble.py +1 -1
- diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_seg.py +1 -2
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_canny.py
CHANGED
@@ -42,7 +42,7 @@ class StableDiffusionControlNetInpaintCannyGenerator:
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
-
image = Image.fromarray(
|
46 |
return image
|
47 |
|
48 |
def controlnet_canny_inpaint(
|
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
+
image = Image.fromarray(image)
|
46 |
return image
|
47 |
|
48 |
def controlnet_canny_inpaint(
|
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_depth.py
CHANGED
@@ -42,10 +42,9 @@ class StableDiffusionControlInpaintNetDepthGenerator:
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
-
image = Image.fromarray(
|
46 |
return image
|
47 |
|
48 |
-
|
49 |
def controlnet_inpaint_depth(self, image_path: str):
|
50 |
depth_estimator = pipeline("depth-estimation")
|
51 |
image = image_path["image"].convert("RGB").resize((512, 512))
|
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
+
image = Image.fromarray(image)
|
46 |
return image
|
47 |
|
|
|
48 |
def controlnet_inpaint_depth(self, image_path: str):
|
49 |
depth_estimator = pipeline("depth-estimation")
|
50 |
image = image_path["image"].convert("RGB").resize((512, 512))
|
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_hed.py
CHANGED
@@ -42,7 +42,7 @@ class StableDiffusionControlNetInpaintHedGenerator:
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
-
image = Image.fromarray(
|
46 |
return image
|
47 |
|
48 |
|
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
+
image = Image.fromarray(image)
|
46 |
return image
|
47 |
|
48 |
|
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_mlsd.py
CHANGED
@@ -42,7 +42,7 @@ class StableDiffusionControlNetInpaintMlsdGenerator:
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
-
image = Image.fromarray(
|
46 |
return image
|
47 |
|
48 |
|
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
+
image = Image.fromarray(image)
|
46 |
return image
|
47 |
|
48 |
|
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_pose.py
CHANGED
@@ -43,7 +43,7 @@ class StableDiffusionControlNetInpaintPoseGenerator:
|
|
43 |
|
44 |
def load_image(self, image_path):
|
45 |
image = np.array(image_path)
|
46 |
-
image = Image.fromarray(
|
47 |
return image
|
48 |
|
49 |
def controlnet_pose_inpaint(self, image_path: str):
|
|
|
43 |
|
44 |
def load_image(self, image_path):
|
45 |
image = np.array(image_path)
|
46 |
+
image = Image.fromarray(image)
|
47 |
return image
|
48 |
|
49 |
def controlnet_pose_inpaint(self, image_path: str):
|
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_scribble.py
CHANGED
@@ -42,7 +42,7 @@ class StableDiffusionControlNetInpaintScribbleGenerator:
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
-
image = Image.fromarray(
|
46 |
return image
|
47 |
|
48 |
def controlnet_inpaint_scribble(self, image_path: str):
|
|
|
42 |
|
43 |
def load_image(self, image_path):
|
44 |
image = np.array(image_path)
|
45 |
+
image = Image.fromarray(image)
|
46 |
return image
|
47 |
|
48 |
def controlnet_inpaint_scribble(self, image_path: str):
|
diffusion_webui/diffusion_models/controlnet/controlnet_inpaint/controlnet_inpaint_seg.py
CHANGED
@@ -203,9 +203,8 @@ class StableDiffusionControlNetInpaintSegGenerator:
|
|
203 |
|
204 |
def load_image(self, image_path):
|
205 |
image = np.array(image_path)
|
206 |
-
image = Image.fromarray(
|
207 |
return image
|
208 |
-
|
209 |
def controlnet_seg_inpaint(self, image_path: str):
|
210 |
image_processor = AutoImageProcessor.from_pretrained(
|
211 |
"openmmlab/upernet-convnext-small"
|
|
|
203 |
|
204 |
def load_image(self, image_path):
|
205 |
image = np.array(image_path)
|
206 |
+
image = Image.fromarray(image)
|
207 |
return image
|
|
|
208 |
def controlnet_seg_inpaint(self, image_path: str):
|
209 |
image_processor = AutoImageProcessor.from_pretrained(
|
210 |
"openmmlab/upernet-convnext-small"
|