Update app.py
Browse files
app.py
CHANGED
@@ -32,6 +32,7 @@ JS = """function () {
|
|
32 |
|
33 |
if torch.cuda.is_available():
|
34 |
pipe = FluxPipeline.from_pretrained(model, torch_dtype=torch.bfloat16)
|
|
|
35 |
|
36 |
|
37 |
def scrape_lora_link(url):
|
@@ -59,13 +60,14 @@ def scrape_lora_link(url):
|
|
59 |
return None
|
60 |
|
61 |
def enable_lora(lora_scale, lora_in, lora_add):
|
|
|
62 |
if not lora_in and not lora_add:
|
63 |
return
|
64 |
else:
|
65 |
if lora_add:
|
66 |
lora_in = lora_add
|
67 |
url = f'https://huggingface.co/{lora_in}/tree/main'
|
68 |
-
lora_name = scrape_lora_link(url)
|
69 |
pipe.load_lora_weights(lora_in, weight_name=lora_name)
|
70 |
pipe.fuse_lora(lora_scale=lora_scale)
|
71 |
|
|
|
32 |
|
33 |
if torch.cuda.is_available():
|
34 |
pipe = FluxPipeline.from_pretrained(model, torch_dtype=torch.bfloat16)
|
35 |
+
pipe.enable_model_cpu_offload()
|
36 |
|
37 |
|
38 |
def scrape_lora_link(url):
|
|
|
60 |
return None
|
61 |
|
62 |
def enable_lora(lora_scale, lora_in, lora_add):
|
63 |
+
pipe.unload_lora_weights()
|
64 |
if not lora_in and not lora_add:
|
65 |
return
|
66 |
else:
|
67 |
if lora_add:
|
68 |
lora_in = lora_add
|
69 |
url = f'https://huggingface.co/{lora_in}/tree/main'
|
70 |
+
lora_name = scrape_lora_link(url)
|
71 |
pipe.load_lora_weights(lora_in, weight_name=lora_name)
|
72 |
pipe.fuse_lora(lora_scale=lora_scale)
|
73 |
|