lang03383 commited on
Commit
4f3b6a3
1 Parent(s): 4d9c96e

Create downloading_en.py

Browse files
Files changed (1) hide show
  1. sd_yun/downloading_en.py +607 -0
sd_yun/downloading_en.py ADDED
@@ -0,0 +1,607 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
+
3
+ from directory_setup import *
4
+ from models_data import model_list, vae_list, controlnet_list
5
+
6
+ import os
7
+ import re
8
+ import time
9
+ import json
10
+ import shutil
11
+ import zipfile
12
+ import requests
13
+ import subprocess
14
+ from datetime import timedelta
15
+ from subprocess import getoutput
16
+ from IPython.utils import capture
17
+ from IPython.display import clear_output
18
+ from urllib.parse import urlparse, parse_qs
19
+
20
+
21
+ # Setup Env
22
+ env = os.getenv('ENV_NAME')
23
+ root_path = os.getenv('ROOT_PATH')
24
+ webui_path = os.getenv('WEBUI_PATH')
25
+ free_plan = os.getenv('FREE_PLAN')
26
+
27
+ UI = os.getenv('SDW_UI')
28
+ OLD_UI = os.getenv('SDW_OLD_UI')
29
+
30
+ os.chdir(root_path)
31
+
32
+
33
+ # ============ loading settings V4 =============
34
+ def load_settings(path):
35
+ if os.path.exists(path):
36
+ with open(path, 'r') as file:
37
+ return json.load(file)
38
+ return {}
39
+
40
+ settings = load_settings(f'{root_path}/settings.json')
41
+
42
+ VARIABLES = [
43
+ 'model', 'model_num', 'inpainting_model',
44
+ 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',
45
+ 'change_webui', 'detailed_download', 'controlnet',
46
+ 'controlnet_num', 'commit_hash', 'huggingface_token',
47
+ 'ngrok_token', 'zrok_token', 'commandline_arguments',
48
+ 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',
49
+ 'Extensions_url', 'custom_file_urls'
50
+ ]
51
+
52
+ locals().update({key: settings.get(key) for key in VARIABLES})
53
+
54
+
55
+ # ================ LIBRARIES V2 ================
56
+ flag_file = f"{root_path}/libraries_installed.txt"
57
+
58
+ if not os.path.exists(flag_file):
59
+ print("💿 Installing the libraries, it's going to take a while:\n")
60
+
61
+ install_lib = {
62
+ # "aria2": "apt -y install aria2",
63
+ "aria2": "pip install aria2",
64
+ "localtunnel": "npm install -g localtunnel",
65
+ }
66
+ if controlnet != 'none':
67
+ install_lib["insightface"] = "pip install insightface"
68
+
69
+ additional_libs = {
70
+ "Google Colab": {
71
+ "xformers": "pip install xformers==0.0.27 --no-deps"
72
+ },
73
+ "Kaggle": {
74
+ "xformers": "pip install xformers==0.0.26.post1",
75
+ # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
76
+ # "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
77
+ }
78
+ }
79
+ if env in additional_libs:
80
+ install_lib.update(additional_libs[env])
81
+
82
+ # Loop through libraries
83
+ for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
84
+ print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
85
+ subprocess.run(install_cmd, shell=True, capture_output=True)
86
+
87
+ # Additional specific packages
88
+ with capture.capture_output():
89
+ get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
90
+ get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
91
+ get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz')
92
+
93
+ clear_output()
94
+
95
+ # Save file install lib
96
+ with open(flag_file, "w") as f:
97
+ f.write(">W<'")
98
+
99
+ print("🍪 Libraries are installed!" + " "*35)
100
+ time.sleep(2)
101
+ clear_output()
102
+
103
+
104
+ # =================== OTHER ====================
105
+ # Setup Timer
106
+ start_colab = int(os.environ.get("START_COLAB", time.time() - 5))
107
+ os.environ["START_COLAB"] = str(start_colab)
108
+
109
+ def download_cfg_files(file_paths, destination_path):
110
+ base_url = "https://huggingface.co/NagisaNao/SD-CONFIGS/resolve/main"
111
+ for filename in file_paths:
112
+ file_name = filename.split('/')[-1]
113
+ get_ipython().system('wget -O {destination_path}/{file_name} {base_url}/{filename}')
114
+
115
+ def cfg_download():
116
+ common_files = ["styles.csv"]
117
+ a1111_files = ["A1111/config.json", "A1111/ui-config.json"]
118
+ forge_files = ["reForge/config.json", "reForge/ui-config.json"]
119
+
120
+ with capture.capture_output():
121
+ download_cfg_files(common_files, webui_path)
122
+ ui_files = a1111_files if UI == 'A1111' else forge_files
123
+ download_cfg_files(ui_files, webui_path)
124
+
125
+ def remove_dir(directory_path):
126
+ if directory_path and os.path.exists(directory_path):
127
+ try:
128
+ shutil.rmtree(directory_path)
129
+ except Exception:
130
+ get_ipython().system('rm -rf {directory_path}')
131
+
132
+ TEMPORARY_DIR = f'{root_path}/temp_dir'
133
+ def copy_items_with_replace(src_base, dst_base):
134
+ items_to_copy = [
135
+ 'embeddings',
136
+ 'models/Stable-diffusion',
137
+ 'models/VAE',
138
+ 'models/Lora',
139
+ 'models/ControlNet'
140
+ ]
141
+
142
+ print("⌚ Moving files...", end='')
143
+ for item in items_to_copy:
144
+ src = os.path.join(src_base, item)
145
+ dst = os.path.join(dst_base, item)
146
+
147
+ if os.path.exists(src):
148
+ if os.path.exists(dst):
149
+ remove_dir(dst)
150
+ os.makedirs(os.path.dirname(dst), exist_ok=True)
151
+ shutil.move(src, dst)
152
+ print("\r🔥 Files moved!" + " "*15)
153
+
154
+ def handle_colab_timer(webui_path, timer_colab):
155
+ timer_file_path = os.path.join(webui_path, 'static', 'colabTimer.txt')
156
+ if not os.path.exists(timer_file_path):
157
+ with open(timer_file_path, 'w') as timer_file:
158
+ timer_file.write(str(timer_colab))
159
+ else:
160
+ with open(timer_file_path, 'r') as timer_file:
161
+ timer_colab = float(timer_file.read())
162
+ return timer_colab
163
+
164
+ def unpack_webui():
165
+ start_install = time.time()
166
+ print(f"⌚ Unpacking Stable Diffusion{' (Forge)' if UI == 'Forge' else ''}...", end='')
167
+
168
+ with capture.capture_output():
169
+ download_url = "https://huggingface.co/lang03383/hisd/resolve/main/iiiiiss.zip"
170
+ if UI == 'Forge':
171
+ download_url = "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip"
172
+
173
+ zip_path = f"{root_path}/repo.zip"
174
+ get_ipython().system('aria2c --console-log-level=error -c -x 16 -s 16 -k 1M {download_url} -d {root_path} -o repo.zip')
175
+ get_ipython().system('unzip -q -o {zip_path} -d {webui_path}')
176
+ get_ipython().system('rm -rf {zip_path}')
177
+
178
+ handle_colab_timer(webui_path, start_colab)
179
+
180
+ install_time = time.time() - start_install
181
+ minutes, seconds = divmod(int(install_time), 60)
182
+ print(f"\r🚀 Unpacking complete! For {minutes:02}:{seconds:02} ⚡" + " "*15)
183
+
184
+ if os.path.exists(TEMPORARY_DIR):
185
+ copy_items_with_replace(TEMPORARY_DIR, webui_path)
186
+ remove_dir(TEMPORARY_DIR)
187
+
188
+ # ================= MAIN CODE ==================
189
+ if os.path.exists(webui_path):
190
+ if UI != OLD_UI:
191
+ print(f'Switching the WebUI from \033[33m{OLD_UI}\033[0m to \033[33m{UI}\033[0m:')
192
+ copy_items_with_replace(webui_path, TEMPORARY_DIR)
193
+ remove_dir(webui_path)
194
+ os.environ['SDW_OLD_UI'] = UI
195
+ time.sleep(2)
196
+ clear_output()
197
+
198
+ if not os.path.exists(webui_path):
199
+ unpack_webui()
200
+ cfg_download()
201
+ else:
202
+ print("🚀 All unpacked... Skip. ⚡")
203
+ timer_colab = handle_colab_timer(webui_path, start_colab)
204
+ elapsed_time = str(timedelta(seconds=time.time() - timer_colab)).split('.')[0]
205
+ print(f"⌚️ You have been conducting this session for - \033[33m{elapsed_time}\033[0m")
206
+
207
+
208
+ ## Changes extensions and WebUi
209
+ if latest_webui or latest_exstensions:
210
+ action = "WebUI and Extensions" if latest_webui and latest_exstensions else ("WebUI" if latest_webui else "Extensions")
211
+ print(f"⌚️ Updating {action}...", end='')
212
+ with capture.capture_output():
213
+ get_ipython().system('git config --global user.email "you@example.com"')
214
+ get_ipython().system('git config --global user.name "Your Name"')
215
+
216
+ ## Update Webui
217
+ if latest_webui:
218
+ get_ipython().run_line_magic('cd', '{webui_path}')
219
+ get_ipython().system('git restore .')
220
+ get_ipython().system('git pull -X theirs --rebase --autostash')
221
+
222
+ ## Update extensions
223
+ if latest_exstensions:
224
+ get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
225
+ print(f"\r✨ Updating {action} Completed!")
226
+
227
+
228
+ # === FIXING EXTENSIONS ===
229
+ anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
230
+ with capture.capture_output():
231
+ # --- Umi-Wildcard ---
232
+ get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
233
+ # --- Encrypt-Image ---
234
+ get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
235
+
236
+
237
+ ## Version switching
238
+ if commit_hash:
239
+ print('⏳ Time machine activation...', end="")
240
+ with capture.capture_output():
241
+ get_ipython().run_line_magic('cd', '{webui_path}')
242
+ get_ipython().system('git config --global user.email "you@example.com"')
243
+ get_ipython().system('git config --global user.name "Your Name"')
244
+ get_ipython().system('git reset --hard {commit_hash}')
245
+ print(f"\r⌛️ The time machine has been activated! Current commit: \033[34m{commit_hash}\033[0m")
246
+
247
+
248
+ ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
249
+ print("📦 Downloading models and stuff...", end='')
250
+
251
+ extension_repo = []
252
+ PREFIXES = {
253
+ "model": models_dir,
254
+ "vae": vaes_dir,
255
+ "lora": loras_dir,
256
+ "embed": embeddings_dir,
257
+ "extension": extensions_dir,
258
+ "control": control_dir,
259
+ "adetailer": adetailer_dir,
260
+ "config": webui_path
261
+ }
262
+ get_ipython().system('mkdir -p {" ".join(PREFIXES.values())}')
263
+
264
+ ''' Formatted Info Output '''
265
+
266
+ def center_text(text, terminal_width=45):
267
+ padding = (terminal_width - len(text)) // 2
268
+ return f"{' ' * padding}{text}{' ' * padding}"
269
+
270
+ def format_output(url, dst_dir, file_name, image_name=None, image_url=None):
271
+ info = center_text(f"[{file_name.split('.')[0]}]")
272
+ sep_line = '---' * 20
273
+
274
+ print(f"\n\033[32m{sep_line}\033[36;1m{info}\033[32m{sep_line}\033[0m")
275
+ print(f"\033[33mURL: {url}")
276
+ print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
277
+ print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
278
+ if 'civitai' in url and image_url:
279
+ print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
280
+
281
+ ''' GET CivitAi API - DATA '''
282
+
283
+ def CivitAi_API(url, file_name=None):
284
+ SUPPORT_TYPES = ('Checkpoint', 'TextualInversion', 'LORA')
285
+ CIVITAI_TOKEN = "62c0c5956b2f9defbd844d754000180b"
286
+
287
+ url = url.split('?token=')[0] if '?token=' in url else url
288
+ url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f"{url}?token={CIVITAI_TOKEN}"
289
+
290
+ def get_model_data(url):
291
+ base_url = "https://civitai.com/api/v1"
292
+ try:
293
+ if "civitai.com/models/" in url:
294
+ if '?modelVersionId=' in url:
295
+ version_id = url.split('?modelVersionId=')[1]
296
+ else:
297
+ model_id = url.split('/models/')[1].split('/')[0]
298
+ model_data = requests.get(f"{base_url}/models/{model_id}").json()
299
+ version_id = model_data['modelVersions'][0].get('id')
300
+ else:
301
+ version_id = url.split('/models/')[1].split('/')[0]
302
+
303
+ return requests.get(f"{base_url}/model-versions/{version_id}").json()
304
+ except (KeyError, IndexError, requests.RequestException) as e:
305
+ return None
306
+
307
+ data = get_model_data(url)
308
+
309
+ if not data:
310
+ print("\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n")
311
+ return 'None', None, None, None, None, None, None
312
+
313
+ def get_model_info(url, data):
314
+ model_type = data['model']['type']
315
+ model_name = data['files'][0]['name']
316
+
317
+ if 'type=' in url:
318
+ url_model_type = parse_qs(urlparse(url).query).get('type', [''])[0].lower()
319
+ if 'vae' in url_model_type:
320
+ model_type = data['files'][1]['type']
321
+ model_name = data['files'][1]['name']
322
+
323
+ if file_name and '.' not in file_name:
324
+ file_extension = model_name.split('.')[-1]
325
+ model_name = f"{file_name}.{file_extension}"
326
+ elif file_name:
327
+ model_name = file_name
328
+
329
+ return model_type, model_name
330
+
331
+ def get_download_url(data, model_type):
332
+ if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):
333
+ return data['files'][0]['downloadUrl']
334
+
335
+ return data['files'][1]['downloadUrl'] if 'type' in url else data['files'][0]['downloadUrl']
336
+
337
+ def get_image_info(data, model_type, model_name):
338
+ if not any(t in model_type for t in SUPPORT_TYPES):
339
+ return None, None
340
+
341
+ for image in data.get('images', []):
342
+ if image['nsfwLevel'] >= 4 and env == 'Kaggle': # Filter NSFW images for Kaggle
343
+ continue
344
+ image_url = image['url']
345
+ image_extension = image_url.split('.')[-1]
346
+ image_name = f"{model_name.split('.')[0]}.preview.{image_extension}" if image_url else None
347
+ return image_url, image_name
348
+ return None, None
349
+
350
+ model_type, model_name = get_model_info(url, data)
351
+ download_url = get_download_url(data, model_type)
352
+ image_url, image_name = get_image_info(data, model_type, model_name)
353
+
354
+ return f"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}", download_url, model_type, model_name, image_url, image_name, data
355
+
356
+ ''' Main Download Code '''
357
+
358
+ def strip_(url):
359
+ if 'github.com' in url:
360
+ return url.replace('/blob/', '/raw/')
361
+ elif "huggingface.co" in url:
362
+ url = url.replace('/blob/', '/resolve/')
363
+ return url.split('?')[0] if '?' in url else url
364
+ return url
365
+
366
+ def download(url):
367
+ links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
368
+
369
+ for link_or_path in links_and_paths:
370
+ if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):
371
+ handle_manual(link_or_path)
372
+ else:
373
+ url, dst_dir, file_name = link_or_path.split()
374
+ manual_download(url, dst_dir, file_name)
375
+
376
+ # Unpuck ZIPs Files
377
+ for directory in PREFIXES.values():
378
+ for root, _, files in os.walk(directory):
379
+ for file in files:
380
+ if file.endswith(".zip"):
381
+ zip_path = os.path.join(root, file)
382
+ extract_path = os.path.splitext(zip_path)[0]
383
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
384
+ zip_ref.extractall(extract_path)
385
+ os.remove(zip_path)
386
+
387
+ def handle_manual(url):
388
+ url_parts = url.split(':', 1)
389
+ prefix, path = url_parts[0], url_parts[1]
390
+
391
+ file_name_match = re.search(r'\[(.*?)\]', path)
392
+ file_name = file_name_match.group(1) if file_name_match else None
393
+ if file_name:
394
+ path = re.sub(r'\[.*?\]', '', path)
395
+
396
+ if prefix in PREFIXES:
397
+ dir = PREFIXES[prefix]
398
+ if prefix != "extension":
399
+ try:
400
+ manual_download(path, dir, file_name=file_name, prefix=prefix)
401
+ except Exception as e:
402
+ print(f"Error downloading file: {e}")
403
+ else:
404
+ extension_repo.append((path, file_name))
405
+
406
+ def manual_download(url, dst_dir, file_name, prefix=None):
407
+ hf_header = f"--header='Authorization: Bearer {huggingface_token}'" if huggingface_token else ""
408
+ aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
409
+ aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
410
+
411
+ clean_url = strip_(url)
412
+
413
+ if 'civitai' in url:
414
+ url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)
415
+ if image_url and image_name:
416
+ command = ["aria2c"] + aria2_args.split() + ["-d", dst_dir, "-o", image_name, image_url]
417
+ subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
418
+
419
+ elif 'github' in url or 'huggingface.co' in url:
420
+ if file_name and '.' not in file_name:
421
+ file_extension = f"{clean_url.split('/')[-1].split('.', 1)[1]}"
422
+ file_name = f"{file_name}.{file_extension}"
423
+ if not file_name:
424
+ file_name = clean_url.split("/")[-1]
425
+
426
+ """ Formatted info output """
427
+ try:
428
+ format_output(clean_url, dst_dir, file_name, image_name, image_url)
429
+ except UnboundLocalError:
430
+ format_output(clean_url, dst_dir, file_name, None, None)
431
+
432
+ # =====================
433
+ def run_aria2c(url, dst_dir, file_name=None, args="", header=""):
434
+ file_path = os.path.join(dst_dir, file_name) # replaces config files
435
+ if os.path.exists(file_path) and prefix == 'config':
436
+ os.remove(file_path)
437
+
438
+ out = f"-o '{file_name}'" if file_name else ""
439
+ get_ipython().system("aria2c {header} {args} -d {dst_dir} {out} '{url}'")
440
+
441
+ # -- Google Drive --
442
+ if 'drive.google' in url:
443
+ if not globals().get('have_drive_link', False):
444
+ os.system("pip install -U gdown > /dev/null")
445
+ globals()['have_drive_link'] = True
446
+
447
+ if 'folders' in url:
448
+ os.system(f"gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c")
449
+ else:
450
+ out_path = f"{dst_dir}/{file_name}" if file_name else dst_dir
451
+ os.system(f"gdown \"{url}\" -O {out_path} --fuzzy -c")
452
+
453
+ # -- GitHub or Hugging Face --
454
+ elif 'github' in url or 'huggingface' in url:
455
+ run_aria2c(clean_url, dst_dir, file_name, aria2_args, hf_header if 'huggingface' in url else '')
456
+
457
+ # -- Other HTTP/Sources --
458
+ elif 'http' in url:
459
+ run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)
460
+
461
+ ''' SubModels - Added URLs '''
462
+
463
+ # Separation of merged numbers
464
+ def split_numbers(num_str, max_num):
465
+ result = []
466
+ i = 0
467
+ while i < len(num_str):
468
+ found = False
469
+ for length in range(2, 0, -1):
470
+ if i + length <= len(num_str):
471
+ part = int(num_str[i:i + length])
472
+ if part <= max_num:
473
+ result.append(part)
474
+ i += length
475
+ found = True
476
+ break
477
+ if not found:
478
+ break
479
+ return result
480
+
481
+ def add_submodels(selection, num_selection, model_dict, dst_dir):
482
+ if selection == "none":
483
+ return []
484
+ selected_models = []
485
+
486
+ if selection == "ALL":
487
+ selected_models = sum(model_dict.values(), [])
488
+ else:
489
+ if selection in model_dict:
490
+ selected_models.extend(model_dict[selection])
491
+
492
+ nums = num_selection.replace(',', ' ').split()
493
+ max_num = len(model_dict)
494
+ unique_nums = set()
495
+
496
+ for num_part in nums:
497
+ split_nums = split_numbers(num_part, max_num)
498
+ unique_nums.update(split_nums)
499
+
500
+ for num in unique_nums:
501
+ if 1 <= num <= max_num:
502
+ name = list(model_dict.keys())[num - 1]
503
+ selected_models.extend(model_dict[name])
504
+
505
+ unique_models = {model['name']: model for model in selected_models}.values()
506
+
507
+ for model in unique_models:
508
+ model['dst_dir'] = dst_dir
509
+
510
+ return list(unique_models)
511
+
512
+ def handle_submodels(selection, num_selection, model_dict, dst_dir, url):
513
+ submodels = add_submodels(selection, num_selection, model_dict, dst_dir)
514
+ for submodel in submodels:
515
+ if not inpainting_model and "inpainting" in submodel['name']:
516
+ continue
517
+ url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, "
518
+ return url
519
+
520
+ url = ""
521
+ url = handle_submodels(model, model_num, model_list, models_dir, url)
522
+ url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)
523
+ url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)
524
+
525
+ ''' file.txt - added urls '''
526
+
527
+ def process_file_download(file_url, PREFIXES, unique_urls):
528
+ files_urls = ""
529
+
530
+ if file_url.startswith("http"):
531
+ if "blob" in file_url:
532
+ file_url = file_url.replace("blob", "raw")
533
+ response = requests.get(file_url)
534
+ lines = response.text.split('\n')
535
+ else:
536
+ with open(file_url, 'r') as file:
537
+ lines = file.readlines()
538
+
539
+ current_tag = None
540
+ for line in lines:
541
+ line = line.strip()
542
+ if any(f'# {tag}' in line.lower() for tag in PREFIXES):
543
+ current_tag = next((tag for tag in PREFIXES if tag in line.lower()))
544
+
545
+ urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
546
+ for url in urls:
547
+ filter_url = url.split('[')[0] # same url filter
548
+
549
+ if url.startswith("http") and filter_url not in unique_urls:
550
+ files_urls += f"{current_tag}:{url}, "
551
+ unique_urls.add(filter_url)
552
+
553
+ return files_urls
554
+
555
+ file_urls = ""
556
+ unique_urls = set()
557
+
558
+ if custom_file_urls:
559
+ for custom_file_url in custom_file_urls.replace(',', '').split():
560
+ if not custom_file_url.endswith('.txt'):
561
+ custom_file_url += '.txt'
562
+ if not custom_file_url.startswith('http'):
563
+ if not custom_file_url.startswith(root_path):
564
+ custom_file_url = f'{root_path}/{custom_file_url}'
565
+
566
+ try:
567
+ file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)
568
+ except FileNotFoundError:
569
+ pass
570
+
571
+ # url prefixing
572
+ urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
573
+ prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())
574
+ url += ", ".join(prefixed_urls) + ", " + file_urls
575
+
576
+ if detailed_download == "on":
577
+ print("\n\n\033[33m# ====== Detailed Download ====== #\n\033[0m")
578
+ download(url)
579
+ print("\n\033[33m# =============================== #\n\033[0m")
580
+ else:
581
+ with capture.capture_output():
582
+ download(url)
583
+
584
+ print("\r🏁 Download Complete!" + " "*15)
585
+
586
+
587
+ # Cleaning shit after downloading...
588
+ get_ipython().system('find {webui_path} \\( -type d \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -o -type f -name "*.aria2" \\) -exec rm -r {{}} \\; >/dev/null 2>&1')
589
+
590
+
591
+ ## Install of Custom extensions
592
+ if len(extension_repo) > 0:
593
+ print("✨ Installing custom extensions...", end='')
594
+ with capture.capture_output():
595
+ for repo, repo_name in extension_repo:
596
+ if not repo_name:
597
+ repo_name = repo.split('/')[-1]
598
+ get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch')
599
+ print(f"\r📦 Installed '{len(extension_repo)}', Custom extensions!")
600
+
601
+
602
+ ## List Models and stuff V2
603
+ if detailed_download == "off":
604
+ print("\n\n\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.")
605
+
606
+ get_ipython().run_line_magic('run', '{root_path}/file_cell/special/dl_display_results.py # display widgets result')
607
+