rajveeritme commited on
Commit
d7d36a5
1 Parent(s): 77142a3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -12
app.py CHANGED
@@ -16,8 +16,18 @@ dtype = torch.bfloat16
16
  device = "cuda" if torch.cuda.is_available() else "cpu"
17
 
18
  taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
19
- good_vae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-dev", subfolder="vae", torch_dtype=dtype, token=hf_token).to(device)
20
- pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype, vae=taef1, token=hf_token).to(device)
 
 
 
 
 
 
 
 
 
 
21
  torch.cuda.empty_cache()
22
 
23
  MAX_SEED = np.iinfo(np.int32).max
@@ -26,7 +36,16 @@ MAX_IMAGE_SIZE = 2048
26
  pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
27
 
28
  @spaces.GPU(duration=75)
29
- def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=3.5, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
 
 
 
 
 
 
 
 
 
30
  if randomize_seed:
31
  seed = random.randint(0, MAX_SEED)
32
  generator = torch.Generator().manual_seed(seed)
@@ -98,15 +117,8 @@ body {
98
  }
99
  """
100
 
101
- # Define the dark theme correctly using the 'base' parameter
102
- dark_theme = gr.themes.Default(
103
- primary_hue="blue",
104
- neutral_hue="gray",
105
- spacing_size="md",
106
- font="default",
107
- # Use 'base' instead of 'mode' to set the theme to dark
108
- base="dark"
109
- )
110
 
111
  with gr.Blocks(theme=dark_theme, css=css) as demo:
112
 
 
16
  device = "cuda" if torch.cuda.is_available() else "cpu"
17
 
18
  taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
19
+ good_vae = AutoencoderKL.from_pretrained(
20
+ "black-forest-labs/FLUX.1-dev",
21
+ subfolder="vae",
22
+ torch_dtype=dtype,
23
+ token=hf_token
24
+ ).to(device)
25
+ pipe = DiffusionPipeline.from_pretrained(
26
+ "black-forest-labs/FLUX.1-dev",
27
+ torch_dtype=dtype,
28
+ vae=taef1,
29
+ token=hf_token
30
+ ).to(device)
31
  torch.cuda.empty_cache()
32
 
33
  MAX_SEED = np.iinfo(np.int32).max
 
36
  pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
37
 
38
  @spaces.GPU(duration=75)
39
+ def infer(
40
+ prompt,
41
+ seed=42,
42
+ randomize_seed=False,
43
+ width=1024,
44
+ height=1024,
45
+ guidance_scale=3.5,
46
+ num_inference_steps=28,
47
+ progress=gr.Progress(track_tqdm=True)
48
+ ):
49
  if randomize_seed:
50
  seed = random.randint(0, MAX_SEED)
51
  generator = torch.Generator().manual_seed(seed)
 
117
  }
118
  """
119
 
120
+ # Define the dark theme using gr.themes.Dark()
121
+ dark_theme = gr.themes.Dark()
 
 
 
 
 
 
 
122
 
123
  with gr.Blocks(theme=dark_theme, css=css) as demo:
124