stazizov commited on
Commit
da08473
·
verified ·
1 Parent(s): ad9c1d0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -20
app.py CHANGED
@@ -22,8 +22,28 @@ if hf_token:
22
  login(token=hf_token)
23
  else:
24
  print("No Hugging Face token found.")
 
 
 
 
 
 
 
 
25
 
 
26
 
 
 
 
 
 
 
 
 
 
 
 
27
  def list_dirs(path):
28
  if path is None or path == "None" or path == "":
29
  return
@@ -124,7 +144,6 @@ def create_demo(
124
  offload: bool = False,
125
  ckpt_dir: str = "",
126
  ):
127
- xflux_pipeline = XFluxPipeline(model_type, device, offload)
128
  checkpoints = sorted(Path(ckpt_dir).glob("*.safetensors"))
129
 
130
  with gr.Blocks() as demo:
@@ -208,25 +227,6 @@ def create_demo(
208
 
209
  return demo
210
 
211
- @dataclass
212
- class Config:
213
- name: str = "flux-dev"
214
- device: str = "cuda" if torch.cuda.is_available() else "cpu"
215
- offload: bool = False
216
- share: bool = False
217
- ckpt_dir: str = "."
218
-
219
- def parse_args() -> Config:
220
- parser = argparse.ArgumentParser(description="Flux")
221
- parser.add_argument("--name", type=str, default="flux-dev", help="Model name")
222
- parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu", help="Device to use")
223
- parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
224
- parser.add_argument("--share", action="store_true", help="Create a public link to your demo")
225
- parser.add_argument("--ckpt_dir", type=str, default=".", help="Folder with checkpoints in safetensors format")
226
-
227
- args = parser.parse_args()
228
- return Config(**vars(args))
229
-
230
  if __name__ == "__main__":
231
  import torch.multiprocessing as mp
232
  mp.set_start_method('spawn', force=True) # Corrected start method for CUDA
 
22
  login(token=hf_token)
23
  else:
24
  print("No Hugging Face token found.")
25
+
26
+ @dataclass
27
+ class Config:
28
+ name: str = "flux-dev"
29
+ device: str = "cuda" if torch.cuda.is_available() else "cpu"
30
+ offload: bool = False
31
+ share: bool = False
32
+ ckpt_dir: str = "."
33
 
34
+ xflux_pipeline = XFluxPipeline(Config.name, Config.device, Config.offload)
35
 
36
+ def parse_args() -> Config:
37
+ parser = argparse.ArgumentParser(description="Flux")
38
+ parser.add_argument("--name", type=str, default="flux-dev", help="Model name")
39
+ parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu", help="Device to use")
40
+ parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
41
+ parser.add_argument("--share", action="store_true", help="Create a public link to your demo")
42
+ parser.add_argument("--ckpt_dir", type=str, default=".", help="Folder with checkpoints in safetensors format")
43
+
44
+ args = parser.parse_args()
45
+ return Config(**vars(args))
46
+
47
  def list_dirs(path):
48
  if path is None or path == "None" or path == "":
49
  return
 
144
  offload: bool = False,
145
  ckpt_dir: str = "",
146
  ):
 
147
  checkpoints = sorted(Path(ckpt_dir).glob("*.safetensors"))
148
 
149
  with gr.Blocks() as demo:
 
227
 
228
  return demo
229
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  if __name__ == "__main__":
231
  import torch.multiprocessing as mp
232
  mp.set_start_method('spawn', force=True) # Corrected start method for CUDA