Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -129,11 +129,7 @@ models_rbm.generator.eval().requires_grad_(False)
|
|
129 |
|
130 |
|
131 |
def infer(ref_style_file, style_description, caption, progress):
|
132 |
-
global models_rbm, models_b, device
|
133 |
-
|
134 |
-
if sam_model:
|
135 |
-
models_to(sam_model, device="cpu")
|
136 |
-
models_to(sam_model.sam, device="cpu")
|
137 |
|
138 |
if low_vram:
|
139 |
models_to(models_rbm, device=device, excepts=["generator", "previewer"])
|
@@ -363,6 +359,8 @@ def infer_compo(style_description, ref_style_file, caption, ref_sub_file, progre
|
|
363 |
return sampled_image # Return the sampled_image PIL image
|
364 |
|
365 |
finally:
|
|
|
|
|
366 |
# Clear CUDA cache
|
367 |
torch.cuda.empty_cache()
|
368 |
gc.collect()
|
|
|
129 |
|
130 |
|
131 |
def infer(ref_style_file, style_description, caption, progress):
|
132 |
+
global models_rbm, models_b, device:
|
|
|
|
|
|
|
|
|
133 |
|
134 |
if low_vram:
|
135 |
models_to(models_rbm, device=device, excepts=["generator", "previewer"])
|
|
|
359 |
return sampled_image # Return the sampled_image PIL image
|
360 |
|
361 |
finally:
|
362 |
+
models_to(sam_model, device="cpu")
|
363 |
+
models_to(sam_model.sam, device="cpu")
|
364 |
# Clear CUDA cache
|
365 |
torch.cuda.empty_cache()
|
366 |
gc.collect()
|