nroggendorff commited on
Commit
4da0e12
·
verified ·
1 Parent(s): 97b0d60

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -9,13 +9,13 @@ pipeline = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dt
9
 
10
  @spaces.GPU(duration=190)
11
  def generate(prompt, negative_prompt, width, height, sample_steps):
12
- return pipeline(prompt=f"{prompt}\nDO NOT INCLUDE {negative_prompt} FOR ANY REASON", width=width, height=height, num_inference_steps=sample_steps, generator=torch.Generator("cpu").manual_seed(127), guidance_scale=7).images[0]
13
 
14
  with gr.Blocks() as interface:
15
  with gr.Column():
16
  with gr.Row():
17
  with gr.Column():
18
- prompt = gr.Textbox(label="Prompt", info="What do you want?", value="Keanu Reeves holding an extravagant sign reading 'Hello, world!', 32k HDR, paparazzi", lines=4, interactive=True)
19
  negative_prompt = gr.Textbox(label="Negative Prompt", info="What do you want to exclude from the image?", value="ugly, low quality", lines=4, interactive=True)
20
  with gr.Column():
21
  generate_button = gr.Button("Generate")
 
9
 
10
  @spaces.GPU(duration=190)
11
  def generate(prompt, negative_prompt, width, height, sample_steps):
12
+ return pipeline(prompt=f"{prompt}\nDO NOT INCLUDE {negative_prompt}", width=width, height=height, num_inference_steps=sample_steps, generator=torch.Generator("cpu").manual_seed(127), guidance_scale=7).images[0]
13
 
14
  with gr.Blocks() as interface:
15
  with gr.Column():
16
  with gr.Row():
17
  with gr.Column():
18
+ prompt = gr.Textbox(label="Prompt", info="What do you want?", value="Keanu Reeves holding a neon sign reading 'Hello, world!', 32k HDR, paparazzi", lines=4, interactive=True)
19
  negative_prompt = gr.Textbox(label="Negative Prompt", info="What do you want to exclude from the image?", value="ugly, low quality", lines=4, interactive=True)
20
  with gr.Column():
21
  generate_button = gr.Button("Generate")