multimodalart HF staff commited on
Commit
5bed75e
1 Parent(s): f5eeb4a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -4
app.py CHANGED
@@ -16,16 +16,14 @@ pipe_15 = DiffusionPipeline.from_pretrained(model_15, vae=vae, scheduler=schedul
16
  #pipe.enable_model_cpu_offload()
17
  pipe.enable_vae_tiling()
18
 
19
- # Apply hidiffusion with a single line of code.
20
- apply_hidiffusion(pipe)
21
- apply_hidiffusion(pipe_15)
22
-
23
  @spaces.GPU
24
  def run_hidiffusion(prompt, negative_prompt="", progress=gr.Progress(track_tqdm=True)):
 
25
  return pipe(prompt, guidance_scale=7.5, height=2048, width=2048, eta=1.0, negative_prompt=negative_prompt, num_inference_steps=25).images[0]
26
 
27
  @spaces.GPU
28
  def run_hidiffusion_15(prompt, negative_prompt="", progress=gr.Progress(track_tqdm=True)):
 
29
  return pipe_15(prompt, guidance_scale=7.5, height=1024, width=1024, eta=1.0, negative_prompt=negative_prompt, num_inference_steps=25).images[0]
30
 
31
  with gr.Blocks() as demo:
 
16
  #pipe.enable_model_cpu_offload()
17
  pipe.enable_vae_tiling()
18
 
 
 
 
 
19
  @spaces.GPU
20
  def run_hidiffusion(prompt, negative_prompt="", progress=gr.Progress(track_tqdm=True)):
21
+ apply_hidiffusion(pipe)
22
  return pipe(prompt, guidance_scale=7.5, height=2048, width=2048, eta=1.0, negative_prompt=negative_prompt, num_inference_steps=25).images[0]
23
 
24
  @spaces.GPU
25
  def run_hidiffusion_15(prompt, negative_prompt="", progress=gr.Progress(track_tqdm=True)):
26
+ apply_hidiffusion(pipe_15)
27
  return pipe_15(prompt, guidance_scale=7.5, height=1024, width=1024, eta=1.0, negative_prompt=negative_prompt, num_inference_steps=25).images[0]
28
 
29
  with gr.Blocks() as demo: