Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
8937a7d
1
Parent(s):
0a94726
Update app.py
Browse files
app.py
CHANGED
@@ -35,12 +35,6 @@ def inference(
|
|
35 |
if randomize_seed:
|
36 |
seed = random.randint(0, MAX_SEED)
|
37 |
generator = torch.Generator(device=device).manual_seed(seed)
|
38 |
-
|
39 |
-
progress(0, "Starting image generation...")
|
40 |
-
|
41 |
-
for i in range(1, num_inference_steps + 1):
|
42 |
-
if i % (num_inference_steps // 10) == 0:
|
43 |
-
progress(i / num_inference_steps * 100, f"Processing step {i} of {num_inference_steps}...")
|
44 |
|
45 |
image = pipeline(
|
46 |
prompt=prompt,
|
@@ -52,8 +46,6 @@ def inference(
|
|
52 |
joint_attention_kwargs={"scale": lora_scale},
|
53 |
).images[0]
|
54 |
|
55 |
-
progress(100, "Completed!")
|
56 |
-
|
57 |
return image, seed
|
58 |
|
59 |
|
@@ -142,9 +134,15 @@ with gr.Blocks(css=css) as demo:
|
|
142 |
value=1.0,
|
143 |
)
|
144 |
|
145 |
-
gr.Examples(
|
|
|
|
|
|
|
|
|
|
|
|
|
146 |
|
147 |
-
gr.Markdown("
|
148 |
|
149 |
gr.on(
|
150 |
triggers=[run_button.click, prompt.submit],
|
|
|
35 |
if randomize_seed:
|
36 |
seed = random.randint(0, MAX_SEED)
|
37 |
generator = torch.Generator(device=device).manual_seed(seed)
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
image = pipeline(
|
40 |
prompt=prompt,
|
|
|
46 |
joint_attention_kwargs={"scale": lora_scale},
|
47 |
).images[0]
|
48 |
|
|
|
|
|
49 |
return image, seed
|
50 |
|
51 |
|
|
|
134 |
value=1.0,
|
135 |
)
|
136 |
|
137 |
+
gr.Examples(
|
138 |
+
examples=examples,
|
139 |
+
fn=inference,
|
140 |
+
inputs=[prompt],
|
141 |
+
outputs=[Image.open("./image.jpg"), seed],
|
142 |
+
cache_examples="lazy"
|
143 |
+
)
|
144 |
|
145 |
+
gr.Markdown("### Disclaimer\nFree of use, but both the dataset that FLUX has been fine-tuned on, as well as the FLUX.1-dev model are licensed under a non-commercial license.")
|
146 |
|
147 |
gr.on(
|
148 |
triggers=[run_button.click, prompt.submit],
|