roubaofeipi commited on
Commit
d122a2a
1 Parent(s): 9166377

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -2
app.py CHANGED
@@ -12,6 +12,8 @@ import random
12
  import argparse
13
  import gradio as gr
14
  import spaces
 
 
15
 
16
  def parse_args():
17
  parser = argparse.ArgumentParser()
@@ -200,12 +202,28 @@ with gr.Blocks() as demo:
200
  polish_button.click(get_image, inputs=[height, width, seed, prompt, cfg, timesteps, stage_a_tiled], outputs=output_img)
201
  polish_button.click(clear_image, inputs=[], outputs=output_img)
202
 
203
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
204
  if __name__ == "__main__":
205
 
206
  args = parse_args()
207
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
208
-
209
  config_file = args.config_c
210
  with open(config_file, "r", encoding="utf-8") as file:
211
  loaded_config = yaml.safe_load(file)
 
12
  import argparse
13
  import gradio as gr
14
  import spaces
15
+ from huggingface_hub import hf_hub_url
16
+ import resquests
17
 
18
  def parse_args():
19
  parser = argparse.ArgumentParser()
 
202
  polish_button.click(get_image, inputs=[height, width, seed, prompt, cfg, timesteps, stage_a_tiled], outputs=output_img)
203
  polish_button.click(clear_image, inputs=[], outputs=output_img)
204
 
205
+ def download_model():
206
+ urls = [
207
+ 'https://huggingface.co/stabilityai/StableWurst/resolve/main/stage_a.safetensors',
208
+ 'https://huggingface.co/stabilityai/StableWurst/resolve/main/previewer.safetensors',
209
+ 'https://huggingface.co/stabilityai/StableWurst/resolve/main/effnet_encoder.safetensors',
210
+ 'https://huggingface.co/stabilityai/StableWurst/resolve/main/stage_b_lite_bf16.safetensors',
211
+ 'https://huggingface.co/stabilityai/StableWurst/resolve/main/stage_c_bf16.safetensors',
212
+ 'https://huggingface.co/roubaofeipi/UltraPixel/blob/main/ultrapixel_t2i.safetensors'
213
+ ]
214
+ for file_url in urls:
215
+
216
+ response = requests.get(file_url)
217
+ if response.status_code == 200:
218
+ file_path = os.path.join('models', file_url.split('/')[-1])
219
+ with open(file_path, "wb") as f:
220
+ f.write(response.content)
221
+ print('download successful! ', file_path)
222
  if __name__ == "__main__":
223
 
224
  args = parse_args()
225
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
226
+ download_model()
227
  config_file = args.config_c
228
  with open(config_file, "r", encoding="utf-8") as file:
229
  loaded_config = yaml.safe_load(file)