alfredplpl commited on
Commit
e775d9e
1 Parent(s): bd32138

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -14,22 +14,23 @@ auth_token=os.environ["ACCESS_TOKEN"]
14
 
15
  scheduler = EulerAncestralDiscreteScheduler.from_pretrained(model_id, subfolder="scheduler", use_auth_token=auth_token)
16
 
17
- pipe_merged = StableDiffusionXLPipeline.from_pretrained(
18
  model_id,
19
  torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
20
  scheduler=scheduler, use_auth_token=auth_token)
 
21
 
22
- pipe_i2i_merged = StableDiffusionXLImg2ImgPipeline.from_pretrained(
23
  model_id,
24
  torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
25
  scheduler=scheduler,
26
  use_auth_token=auth_token
27
  )
28
 
29
- pipe=pipe_merged.to("cuda")
30
- pipe_i2i=pipe_i2i_merged.to("cuda")
31
- pipe.enable_xformers_memory_efficient_attention()
32
- pipe_i2i.enable_xformers_memory_efficient_attention()
33
 
34
  def error_str(error, title="Error"):
35
  return f"""#### {title}
@@ -160,7 +161,7 @@ with gr.Blocks(css=css) as demo:
160
  with gr.Group():
161
 
162
  with gr.Row():
163
- prompt = gr.Textbox(label="Prompt", show_label=False, max_lines=2,placeholder="[your prompt]").style(container=False)
164
  generate = gr.Button(value="Generate")
165
 
166
  image_out = gr.Image(height=1024,width=1024)
 
14
 
15
  scheduler = EulerAncestralDiscreteScheduler.from_pretrained(model_id, subfolder="scheduler", use_auth_token=auth_token)
16
 
17
+ pipe = StableDiffusionXLPipeline.from_pretrained(
18
  model_id,
19
  torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
20
  scheduler=scheduler, use_auth_token=auth_token)
21
+ pipe.unet = torch.compile(pipe.unet, mode="reduce-overhead", fullgraph=True)
22
 
23
+ pipe_i2i = StableDiffusionXLImg2ImgPipeline.from_pretrained(
24
  model_id,
25
  torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
26
  scheduler=scheduler,
27
  use_auth_token=auth_token
28
  )
29
 
30
+ pipe=pipe.to("cuda")
31
+ pipe.enable_model_cpu_offload()
32
+ pipe_i2i=pipe_i2i.to("cuda")
33
+
34
 
35
  def error_str(error, title="Error"):
36
  return f"""#### {title}
 
161
  with gr.Group():
162
 
163
  with gr.Row():
164
+ prompt = gr.Textbox(label="Prompt", show_label=False, max_lines=2,placeholder="[your prompt]")
165
  generate = gr.Button(value="Generate")
166
 
167
  image_out = gr.Image(height=1024,width=1024)