Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
bf027a7
1
Parent(s):
d85302e
Update app.py
Browse files
app.py
CHANGED
@@ -97,7 +97,6 @@ def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scal
|
|
97 |
):
|
98 |
yield img
|
99 |
|
100 |
-
@spaces.GPU(duration=70)
|
101 |
def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps, cfg_scale, width, height, lora_scale, seed):
|
102 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
103 |
pipe_i2i.to("cuda")
|
@@ -116,6 +115,7 @@ def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps
|
|
116 |
).images[0]
|
117 |
return final_image
|
118 |
|
|
|
119 |
def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
|
120 |
if selected_index is None:
|
121 |
raise gr.Error("You must select a LoRA before proceeding.")
|
|
|
97 |
):
|
98 |
yield img
|
99 |
|
|
|
100 |
def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps, cfg_scale, width, height, lora_scale, seed):
|
101 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
102 |
pipe_i2i.to("cuda")
|
|
|
115 |
).images[0]
|
116 |
return final_image
|
117 |
|
118 |
+
@spaces.GPU(duration=70)
|
119 |
def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
|
120 |
if selected_index is None:
|
121 |
raise gr.Error("You must select a LoRA before proceeding.")
|