Spaces:
Running
on
Zero
Running
on
Zero
jychen0828
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ if not path.exists(cache_path):
|
|
33 |
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
|
34 |
pipe.load_lora_weights(hf_hub_download("RED-AIGC/TDD", "FLUX.1-dev_tdd_lora_weights.safetensors"),adapter_name="TDD")
|
35 |
pipe.load_lora_weights(hf_hub_download("RED-AIGC/TDD", "FLUX.1-dev_tdd_adv_lora_weights.safetensors"),adapter_name="TDD_adv")
|
36 |
-
pipe.fuse_lora(lora_scale=0.125)
|
37 |
pipe.to("cuda")
|
38 |
|
39 |
css = """
|
@@ -52,8 +52,7 @@ def process_image(prompt,acc,height, width, steps, scales, seed):
|
|
52 |
global loaded_acc
|
53 |
if loaded_acc != acc:
|
54 |
#pipe.load_lora_weights(ACC_lora[acc], adapter_name=acc)
|
55 |
-
pipe.set_adapters([acc], adapter_weights=[
|
56 |
-
pipe.fuse_lora(lora_scale=0.125)
|
57 |
print(pipe.get_active_adapters())
|
58 |
loaded_acc = acc
|
59 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|
|
|
33 |
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
|
34 |
pipe.load_lora_weights(hf_hub_download("RED-AIGC/TDD", "FLUX.1-dev_tdd_lora_weights.safetensors"),adapter_name="TDD")
|
35 |
pipe.load_lora_weights(hf_hub_download("RED-AIGC/TDD", "FLUX.1-dev_tdd_adv_lora_weights.safetensors"),adapter_name="TDD_adv")
|
36 |
+
# pipe.fuse_lora(lora_scale=0.125)
|
37 |
pipe.to("cuda")
|
38 |
|
39 |
css = """
|
|
|
52 |
global loaded_acc
|
53 |
if loaded_acc != acc:
|
54 |
#pipe.load_lora_weights(ACC_lora[acc], adapter_name=acc)
|
55 |
+
pipe.set_adapters([acc], adapter_weights=[0.125])
|
|
|
56 |
print(pipe.get_active_adapters())
|
57 |
loaded_acc = acc
|
58 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|