Spaces:
Runtime error
Runtime error
osanseviero
commited on
Commit
•
2187818
1
Parent(s):
4bfdae4
Update app.py
Browse files
app.py
CHANGED
@@ -7,9 +7,9 @@ import numpy as np
|
|
7 |
pipeline = LatentDiffusionUncondPipeline.from_pretrained("CompVis/latent-diffusion-celeba-256")
|
8 |
|
9 |
|
10 |
-
def predict(seed):
|
11 |
generator = torch.manual_seed(seed)
|
12 |
-
image = pipeline(generator=generator, num_inference_steps=
|
13 |
image_processed = image.cpu().permute(0, 2, 3, 1)
|
14 |
image_processed = (image_processed + 1.0) * 127.5
|
15 |
image_processed = image_processed.clamp(0, 255).numpy().astype(np.uint8)
|
@@ -18,6 +18,7 @@ def predict(seed):
|
|
18 |
gr.Interface(
|
19 |
predict,
|
20 |
inputs=[
|
|
|
21 |
gr.inputs.Slider(0, 1000, label='Seed', default=42),
|
22 |
],
|
23 |
outputs="image",
|
|
|
7 |
pipeline = LatentDiffusionUncondPipeline.from_pretrained("CompVis/latent-diffusion-celeba-256")
|
8 |
|
9 |
|
10 |
+
def predict(steps=1, seed):
|
11 |
generator = torch.manual_seed(seed)
|
12 |
+
image = pipeline(generator=generator, num_inference_steps=steps)["sample"]
|
13 |
image_processed = image.cpu().permute(0, 2, 3, 1)
|
14 |
image_processed = (image_processed + 1.0) * 127.5
|
15 |
image_processed = image_processed.clamp(0, 255).numpy().astype(np.uint8)
|
|
|
18 |
gr.Interface(
|
19 |
predict,
|
20 |
inputs=[
|
21 |
+
gr.inputs.Slider(1, 10, label='Inference Steps', default=1),
|
22 |
gr.inputs.Slider(0, 1000, label='Seed', default=42),
|
23 |
],
|
24 |
outputs="image",
|