fallenshock commited on
Commit
d7a1a69
1 Parent(s): b28cc7a
Files changed (1) hide show
  1. app.py +32 -24
app.py CHANGED
@@ -2,11 +2,12 @@ import gradio as gr
2
  import torch
3
  from diffusers import FluxPipeline, StableDiffusion3Pipeline
4
  from PIL import Image
 
5
 
6
  import random
7
  import numpy as np
8
  import spaces
9
-
10
  from FlowEdit_utils import FlowEditSD3, FlowEditFLUX
11
 
12
 
@@ -14,10 +15,10 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
14
  # device = "cpu"
15
  # model_type = 'SD3'
16
 
17
- pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
18
- scheduler = pipe.scheduler
19
- pipe = pipe.to(device)
20
- loaded_model = 'SD3'
21
 
22
 
23
  def on_model_change(model_type):
@@ -71,9 +72,13 @@ def FlowEditRun(
71
  n_min: int,
72
  n_avg: int,
73
  seed: int,
 
74
 
75
  ):
76
 
 
 
 
77
  if not len(src_prompt):
78
  raise gr.Error("source prompt cannot be empty")
79
  if not len(tar_prompt):
@@ -237,26 +242,29 @@ with gr.Blocks() as demo:
237
  seed = gr.Number(value=42, label="seed")
238
 
239
 
 
240
 
241
- submit_button.click(
242
- fn=FlowEditRun,
243
- inputs=[
244
- image_src,
245
- model_type,
246
- T_steps,
247
- src_guidance_scale,
248
- tar_guidance_scale,
249
- n_max,
250
- src_prompt,
251
- tar_prompt,
252
- n_min,
253
- n_avg,
254
- seed,
255
- ],
256
- outputs=[
257
- image_tar[0],
258
- ],
259
- )
 
 
260
 
261
  gr.Examples(
262
  label="Examples",
 
2
  import torch
3
  from diffusers import FluxPipeline, StableDiffusion3Pipeline
4
  from PIL import Image
5
+ from typing import Optional
6
 
7
  import random
8
  import numpy as np
9
  import spaces
10
+ import huggingface_hub
11
  from FlowEdit_utils import FlowEditSD3, FlowEditFLUX
12
 
13
 
 
15
  # device = "cpu"
16
  # model_type = 'SD3'
17
 
18
+ # pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3-medium-diffusers", torch_dtype=torch.float16)
19
+ # scheduler = pipe.scheduler
20
+ # pipe = pipe.to(device)
21
+ loaded_model = 'None'
22
 
23
 
24
  def on_model_change(model_type):
 
72
  n_min: int,
73
  n_avg: int,
74
  seed: int,
75
+ oauth_token: Optional[gr.OAuthToken] = None
76
 
77
  ):
78
 
79
+ if oauth_token is None:
80
+ raise gr.Error("Please login to HF to access SD3 and FLUX models")
81
+
82
  if not len(src_prompt):
83
  raise gr.Error("source prompt cannot be empty")
84
  if not len(tar_prompt):
 
242
  seed = gr.Number(value=42, label="seed")
243
 
244
 
245
+ with gr.Row():
246
 
247
+ submit_button.click(
248
+ fn=FlowEditRun,
249
+ inputs=[
250
+ image_src,
251
+ model_type,
252
+ T_steps,
253
+ src_guidance_scale,
254
+ tar_guidance_scale,
255
+ n_max,
256
+ src_prompt,
257
+ tar_prompt,
258
+ n_min,
259
+ n_avg,
260
+ seed,
261
+ ],
262
+ outputs=[
263
+ image_tar[0],
264
+ ],
265
+ scale=3)
266
+
267
+ gr.LoginButton(value="Login to HF (For SD3 and FLUX access)", scale=1)
268
 
269
  gr.Examples(
270
  label="Examples",