John6666 commited on
Commit
18f4eda
1 Parent(s): 084e559

Upload mod.py

Browse files
Files changed (1) hide show
  1. mod.py +2 -0
mod.py CHANGED
@@ -13,11 +13,13 @@ from env import num_cns, model_trigger, HF_TOKEN, CIVITAI_API_KEY, DOWNLOAD_LORA
13
  from modutils import download_things
14
 
15
 
 
16
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
17
  #subprocess.run('pip cache purge', shell=True)
18
  device = "cuda" if torch.cuda.is_available() else "cpu"
19
  torch.set_grad_enabled(False)
20
 
 
21
  control_images = [None] * num_cns
22
  control_modes = [-1] * num_cns
23
  control_scales = [0] * num_cns
 
13
  from modutils import download_things
14
 
15
 
16
+ subprocess.run("rm -rf /data-nvme/zerogpu-offload/*", env={}, shell=True)
17
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
18
  #subprocess.run('pip cache purge', shell=True)
19
  device = "cuda" if torch.cuda.is_available() else "cpu"
20
  torch.set_grad_enabled(False)
21
 
22
+
23
  control_images = [None] * num_cns
24
  control_modes = [-1] * num_cns
25
  control_scales = [0] * num_cns