Freak-ppa commited on
Commit
350e203
1 Parent(s): b15e30e

Update ComfyUI/custom_nodes/ComfyUI-BrushNet/brushnet_nodes.py

Browse files
ComfyUI/custom_nodes/ComfyUI-BrushNet/brushnet_nodes.py CHANGED
@@ -493,26 +493,31 @@ class BlendInpaint:
493
  cut_width = min(cut_width, width - x0)
494
  cut_height = min(cut_height, height - y0)
495
 
 
496
  scaled_inpaint = F.interpolate(inpaint[i].permute(2, 0, 1).unsqueeze(0), size=(cut_height, cut_width), mode='bilinear', align_corners=False).squeeze(0).permute(1, 2, 0)
497
 
498
- result = original[i].clone()
499
- result[y0:y0+cut_height, x0:x0+cut_width] = scaled_inpaint
500
-
501
- # Create a new mask for blending
502
- blend_mask = torch.zeros((height, width), device=mask.device, dtype=mask.dtype)
503
- blend_mask[y0:y0+cut_height, x0:x0+cut_width] = 1.0
504
 
505
- # Apply Gaussian blur to the blend mask
506
- blurred_mask = transform(blend_mask.unsqueeze(0).unsqueeze(0)).squeeze(0).squeeze(0)
507
  blurred.append(blurred_mask)
508
 
509
- # Apply the blended mask
510
- ret.append(original[i] * (1.0 - blurred_mask[:,:,None]) + result * blurred_mask[:,:,None])
 
 
 
 
 
 
511
 
512
  return (torch.stack(ret), torch.stack(blurred))
513
 
514
 
515
 
 
516
  def scale_mask_and_image(image, mask, width, height):
517
  h0, w0 = mask.shape
518
  iy, ix = (mask == 1).nonzero(as_tuple=True)
 
493
  cut_width = min(cut_width, width - x0)
494
  cut_height = min(cut_height, height - y0)
495
 
496
+ # Scale inpainted image to match the cut size
497
  scaled_inpaint = F.interpolate(inpaint[i].permute(2, 0, 1).unsqueeze(0), size=(cut_height, cut_width), mode='bilinear', align_corners=False).squeeze(0).permute(1, 2, 0)
498
 
499
+ # Create a mask for the inpainted region
500
+ inpaint_mask = torch.zeros((height, width), device=mask.device, dtype=mask.dtype)
501
+ inpaint_mask[y0:y0+cut_height, x0:x0+cut_width] = F.interpolate(mask[i][None, None, :, :], size=(cut_height, cut_width), mode='nearest').squeeze()
 
 
 
502
 
503
+ # Apply Gaussian blur to the inpaint mask
504
+ blurred_mask = transform(inpaint_mask.unsqueeze(0).unsqueeze(0)).squeeze(0).squeeze(0)
505
  blurred.append(blurred_mask)
506
 
507
+ # Create the result by blending only the masked area
508
+ result = original[i].clone()
509
+ result[y0:y0+cut_height, x0:x0+cut_width] = (
510
+ original[i][y0:y0+cut_height, x0:x0+cut_width] * (1 - blurred_mask[y0:y0+cut_height, x0:x0+cut_width, None]) +
511
+ scaled_inpaint * blurred_mask[y0:y0+cut_height, x0:x0+cut_width, None]
512
+ )
513
+
514
+ ret.append(result)
515
 
516
  return (torch.stack(ret), torch.stack(blurred))
517
 
518
 
519
 
520
+
521
  def scale_mask_and_image(image, mask, width, height):
522
  h0, w0 = mask.shape
523
  iy, ix = (mask == 1).nonzero(as_tuple=True)