Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -145,16 +145,17 @@ def generate(
|
|
145 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
146 |
generator = torch.Generator().manual_seed(seed)
|
147 |
|
148 |
-
if not use_negative_prompt:
|
149 |
-
negative_prompt = None
|
150 |
prompt, negative_prompt = apply_style(style, prompt, negative_prompt)
|
151 |
|
152 |
images = pipe(
|
153 |
prompt=prompt,
|
|
|
154 |
width=width,
|
155 |
height=height,
|
156 |
negative_prompt=negative_prompt,
|
157 |
-
guidance_scale=
|
158 |
num_inference_steps=num_inference_steps,
|
159 |
generator=generator,
|
160 |
num_images_per_prompt=NUM_IMAGES_PER_PROMPT,
|
|
|
145 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
146 |
generator = torch.Generator().manual_seed(seed)
|
147 |
|
148 |
+
if not use_negative_prompt:
|
149 |
+
negative_prompt = None # type: ignore
|
150 |
prompt, negative_prompt = apply_style(style, prompt, negative_prompt)
|
151 |
|
152 |
images = pipe(
|
153 |
prompt=prompt,
|
154 |
+
negative_prompt=negative_prompt,
|
155 |
width=width,
|
156 |
height=height,
|
157 |
negative_prompt=negative_prompt,
|
158 |
+
guidance_scale=0,
|
159 |
num_inference_steps=num_inference_steps,
|
160 |
generator=generator,
|
161 |
num_images_per_prompt=NUM_IMAGES_PER_PROMPT,
|