Spaces:
Runtime error
Runtime error
max
commited on
Commit
•
3746f14
1
Parent(s):
89023a7
added tiled option
Browse files
app.py
CHANGED
@@ -283,21 +283,94 @@ class Predictor:
|
|
283 |
minpainted = mask_to_alpha(inpainted, m)
|
284 |
return inpainted, minpainted, ImageOps.invert(m)
|
285 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
286 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
287 |
predictor = Predictor()
|
288 |
|
289 |
# %%
|
290 |
|
291 |
|
292 |
-
def _outpaint(img, tosize, border, seed, size, model):
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
301 |
return img_op
|
302 |
# %%
|
303 |
|
@@ -330,6 +403,7 @@ with gr.Blocks() as demo:
|
|
330 |
border = gc.Slider(1, 50, 0, step=1, label='border to crop from the image before outpainting')
|
331 |
seed = gc.Slider(1, 65536, 10, step=1, label='seed')
|
332 |
size = gc.Slider(0, 1, .5, step=0.01,label='scale of the image before outpainting')
|
|
|
333 |
|
334 |
model = gc.Dropdown(
|
335 |
choices=['places2',
|
@@ -346,7 +420,7 @@ with gr.Blocks() as demo:
|
|
346 |
|
347 |
btn.click(
|
348 |
fn=_outpaint,
|
349 |
-
inputs=[searchimage, to_size, border, seed, size, model],
|
350 |
outputs=[outwithoutalpha, out, mask])
|
351 |
|
352 |
|
|
|
283 |
minpainted = mask_to_alpha(inpainted, m)
|
284 |
return inpainted, minpainted, ImageOps.invert(m)
|
285 |
|
286 |
+
def predict_tiled(
|
287 |
+
self,
|
288 |
+
img: Image.Image,
|
289 |
+
tosize=(512, 512),
|
290 |
+
border=5,
|
291 |
+
seed=42,
|
292 |
+
size=0.5,
|
293 |
+
model='places2',
|
294 |
+
) -> Image:
|
295 |
|
296 |
+
i, morig = pad(
|
297 |
+
img,
|
298 |
+
size=size, # (328, 328),
|
299 |
+
tosize=tosize,
|
300 |
+
border=border
|
301 |
+
)
|
302 |
+
i.putalpha(morig)
|
303 |
+
img = i
|
304 |
+
img.save('0.png')
|
305 |
+
assert img.width == img.height
|
306 |
+
assert img.width > 512 and img.width < 512*2
|
307 |
+
|
308 |
+
def tile_coords(image, n=2, tile_size=512):
|
309 |
+
assert image.width == image.height
|
310 |
+
offsets = np.linspace(0, image.width - tile_size, n).astype(int)
|
311 |
+
for i in range(n):
|
312 |
+
for j in range(n):
|
313 |
+
left = offsets[j]
|
314 |
+
upper = offsets[i]
|
315 |
+
right = left + tile_size
|
316 |
+
lower = upper + tile_size
|
317 |
+
# tile = image.crop((left, upper, right, lower))
|
318 |
+
yield [left, upper, right, lower]
|
319 |
+
|
320 |
+
for ix, tc in enumerate(tile_coords(img, n=2)):
|
321 |
+
i = img.crop(tc)
|
322 |
+
i.save(f't{ix}.png')
|
323 |
+
m = i.getchannel('A')
|
324 |
+
|
325 |
+
"""Run a single prediction on the model"""
|
326 |
+
imgs = self.models[model].generate_images2(
|
327 |
+
dpath=[i.resize((512, 512), resample=Image.Resampling.NEAREST)],
|
328 |
+
mpath=[m.resize((512, 512), resample=Image.Resampling.NEAREST)],
|
329 |
+
seed=seed,
|
330 |
+
)
|
331 |
+
img_op_raw = imgs[0].convert('RGBA')
|
332 |
+
# img_op_raw = img_op_raw.resize(tosize, resample=Image.Resampling.NEAREST)
|
333 |
+
inpainted = img_op_raw.copy()
|
334 |
+
|
335 |
+
# paste original image to remove inpainting/scaling artifacts
|
336 |
+
inpainted = blend(
|
337 |
+
i,
|
338 |
+
inpainted,
|
339 |
+
1-(np.array(m) / 255)
|
340 |
+
)
|
341 |
+
inpainted.save(f't{ix}_op.png')
|
342 |
+
minpainted = mask_to_alpha(inpainted, m)
|
343 |
+
# continue with partially inpainted image
|
344 |
+
# since the tiles overlap, the next tile will contain (possibly inpainted) parts of the previous tile
|
345 |
+
img.paste(inpainted, tc)
|
346 |
+
|
347 |
+
# restore original alpha channel
|
348 |
+
img.putalpha(morig)
|
349 |
+
return img.convert('RGB'), img, ImageOps.invert(img.getchannel('A'))
|
350 |
predictor = Predictor()
|
351 |
|
352 |
# %%
|
353 |
|
354 |
|
355 |
+
def _outpaint(img, tosize, border, seed, size, model, tiled):
|
356 |
+
if tiled:
|
357 |
+
img_op = predictor.predict_tiled(
|
358 |
+
img,
|
359 |
+
border=border,
|
360 |
+
seed=seed,
|
361 |
+
tosize=(tosize, tosize),
|
362 |
+
size=float(size),
|
363 |
+
model=model,
|
364 |
+
)
|
365 |
+
else:
|
366 |
+
img_op = predictor.predict(
|
367 |
+
img,
|
368 |
+
border=border,
|
369 |
+
seed=seed,
|
370 |
+
tosize=(tosize, tosize),
|
371 |
+
size=float(size),
|
372 |
+
model=model,
|
373 |
+
)
|
374 |
return img_op
|
375 |
# %%
|
376 |
|
|
|
403 |
border = gc.Slider(1, 50, 0, step=1, label='border to crop from the image before outpainting')
|
404 |
seed = gc.Slider(1, 65536, 10, step=1, label='seed')
|
405 |
size = gc.Slider(0, 1, .5, step=0.01,label='scale of the image before outpainting')
|
406 |
+
tiled = gc.Checkbox(label='tiled: run the network with 4 tiles of size 512x512 . only usable if output size >512 and <1024', value=False)
|
407 |
|
408 |
model = gc.Dropdown(
|
409 |
choices=['places2',
|
|
|
420 |
|
421 |
btn.click(
|
422 |
fn=_outpaint,
|
423 |
+
inputs=[searchimage, to_size, border, seed, size, model,tiled],
|
424 |
outputs=[outwithoutalpha, out, mask])
|
425 |
|
426 |
|