kagglee / downloading_en.py
duhking's picture
Update downloading_en.py
81411c4 verified
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
##~ DOWNLOADING CODE | BY: ANXETY ~##
import os
import re
import time
import json
import requests
import subprocess
from datetime import timedelta
from subprocess import getoutput
from urllib.parse import unquote
from IPython.utils import capture
from IPython.display import clear_output
# ================= DETECT ENV =================
def detect_environment():
environments = {
'COLAB_GPU': ('Google Colab', "/content"),
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content"),
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
}
for env_var, (environment, path) in environments.items():
if env_var in os.environ:
return environment, path
env, root_path = detect_environment()
webui_path = f"{root_path}/sdw"
# ----------------------------------------------
# === ONLY SAGEMAKER ===
if env == "SageMaker Studio Lab":
print("Updating dependencies, may take some time...")
get_ipython().system('pip install -q --upgrade torchsde')
get_ipython().system('pip install -q --upgrade pip')
get_ipython().system('pip install -q --upgrade psutil')
clear_output()
# ================ LIBRARIES ================
flag_file = f"{root_path}/libraries_installed.txt"
if not os.path.exists(flag_file):
# A1111 update webui to 1.8.0
xformers = "xformers==0.0.23.post1 triton==2.1.0"
torch = "torch==2.1.2+cu121 torchvision==0.16.2+cu121 --extra-index-url https://download.pytorch.org/whl/cu121"
print("Installing the libraries, it's going to take a while....", end='')
with capture.capture_output() as cap:
get_ipython().system('apt-get update && apt -y install aria2')
get_ipython().system('npm install -g localtunnel &> /dev/null')
get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
get_ipython().system('pip install insightface')
if env == "SageMaker Studio Lab":
get_ipython().run_line_magic('pip', 'install -q opencv-python-headless huggingface-hub')
get_ipython().run_line_magic('conda', 'update -q -n base conda')
get_ipython().run_line_magic('conda', 'install -q -y aria2')
get_ipython().run_line_magic('conda', 'install -q -y glib')
get_ipython().system('pip install tensorflow')
get_ipython().system('wget -P /home/studio-lab-user https://huggingface.co/NagisaNao/fast_repo/resolve/main/sagemaker/FULL_DELETED_NOTEBOOK.ipynb')
if env == "Google Colab":
get_ipython().system('pip install -q {xformers} -U')
else:
get_ipython().system('pip install -q {torch} -U')
get_ipython().system('pip install -q {xformers} -U')
with open(flag_file, "w") as f:
f.write("hey ;3")
del cap
print("\rLibraries are installed!" + " "*35)
time.sleep(2)
clear_output()
# ================= loading settings V4 =================
def load_settings(path):
if os.path.exists(path):
with open(path, 'r') as file:
return json.load(file)
return {}
settings = load_settings(f'{root_path}/settings.json')
variables = [
'Model', 'Model_Num', 'Inpainting_Model',
'Vae', 'Vae_Num',
'latest_webui', 'latest_exstensions', 'detailed_download',
'controlnet', 'controlnet_Num', 'commit_hash', 'optional_huggingface_token',
'ngrok_token' 'commandline_arguments',
'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url', 'Extensions_url', 'custom_file_urls'
]
locals().update({key: settings.get(key) for key in variables})
# ================= OTHER =================
try:
start_colab
except:
start_colab = int(time.time())-5
# CONFIG DIR
models_dir = f"{webui_path}/models/Stable-diffusion"
vaes_dir = f"{webui_path}/models/VAE"
embeddings_dir = f"{webui_path}/embeddings"
loras_dir = f"{webui_path}/models/Lora"
extensions_dir = f"{webui_path}/extensions"
control_dir = f"{webui_path}/models/ControlNet"
# ================= MAIN CODE =================
# --- Obsolescence warning ---
if env == "SageMaker Studio Lab":
print("You are using the 'SageMaker' environment - this environment is outdated so many bugs will not be fixed and it will be cut in functionality. To save memory and/or to avoid bugs.\n\n")
if not os.path.exists(webui_path):
start_install = int(time.time())
print("⌚ Unpacking Stable Diffusion...", end='')
with capture.capture_output() as cap:
get_ipython().system('aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip -o repo.zip')
get_ipython().system('unzip -q -o repo.zip -d {webui_path}')
get_ipython().system('rm -rf repo.zip')
get_ipython().run_line_magic('cd', '{root_path}')
os.environ["SAFETENSORS_FAST_GPU"]='1'
os.environ["CUDA_MODULE_LOADING"]="LAZY"
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
os.environ["PYTHONWARNINGS"] = "ignore"
get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
del cap
install_time = timedelta(seconds=time.time()-start_install)
print("\r🚀 Unpacking is complete! For","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
else:
get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
print("🚀 All unpacked... Skip. ⚡")
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
print(f"⌚️ You have been conducting this session for - \033[33m{time_since_start}\033[0m")
## Changes extensions and WebUi
if latest_webui or latest_exstensions:
action = "Updating WebUI and Extensions" if latest_webui and latest_exstensions else ("WebUI Update" if latest_webui else "Update Extensions")
print(f"⌚️ {action}...", end='', flush=True)
with capture.capture_output() as cap:
get_ipython().system('git config --global user.email "you@example.com"')
get_ipython().system('git config --global user.name "Your Name"')
## Update Webui
if latest_webui:
get_ipython().run_line_magic('cd', '{webui_path}')
get_ipython().system('git restore .')
get_ipython().system('git pull -X theirs --rebase --autostash')
## Update extensions
if latest_exstensions:
if env != "SageMaker Studio Lab":
get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
else:
get_ipython().system('{\'for dir in /home/studio-lab-user/content/sdw/extensions/*/; do cd \\"$dir\\" && git fetch origin && git pull; done\'}')
get_ipython().system('cd {webui_path}/repositories/stable-diffusion-stability-ai && git restore .')
del cap
print(f"\r✨ {action} Completed!")
# === FIXING ERRORS ===
# --- All ---
# --- Encrypt-Image ---
get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js")
# --- SageMaker ---
if env == "SageMaker Studio Lab":
with capture.capture_output() as cap:
# --- SuperMerger Remove ---
if os.path.exists(f"{webui_path}/extensions/supermerger"):
get_ipython().system('rm -rf {webui_path}/extensions/supermerger')
# --- Launch (Style) ---
get_ipython().system('wget -O {webui_path}/modules/styles.py https://huggingface.co/NagisaNao/fast_repo/resolve/main/sagemaker/fixing/webui/styles.py')
del cap
## Version switching
if commit_hash:
print('⏳ Time machine activation...', end="", flush=True)
with capture.capture_output() as cap:
get_ipython().run_line_magic('cd', '{webui_path}')
get_ipython().system('git config --global user.email "you@example.com"')
get_ipython().system('git config --global user.name "Your Name"')
get_ipython().system('git reset --hard {commit_hash}')
del cap
print(f"\r⌛️ The time machine has been activated! Current commit: \033[34m{commit_hash}\033[0m")
## Downloading model and stuff | oh yeah~ I'm starting to misunderstand my own code ( almost my own ;3 )
print("📦 Downloading models and stuff...", end='')
model_list = {
"1.Anime (by Xpuct) + INP": [
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_v2.safetensors"},
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_v2-inpainting.safetensors"}
],
"2.Cetus-Mix [Anime] [V4] + INP": [
{"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
{"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
],
"3.Counterfeit [Anime] [V3] + INP": [
{"url": "https://civitai.com/api/download/models/125050", "name": "Counterfeit_V3.safetensors"},
{"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
],
"4.CuteColor [Anime] [V3]": [
{"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
],
"5.Dark-Sushi-Mix [Anime]": [
{"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
{"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
],
"6.Meina-Mix [Anime] [V11] + INP": [
{"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
{"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
],
"7.Mix-Pro [Anime] [V4] + INP": [
{"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
{"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
],
"8.BluMix [Anime] [V7]": [
{"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_v7.safetensors"}
],
"9.pony": [
{"url": "https://civitai.com/api/download/models/290640", "name": "pony.safetensors"}
]
}
# 1-4 (fp16/cleaned)
vae_list = {
"1.Anime.vae": [
{"url": "https://civitai.com/api/download/models/131654", "name": "Anime.vae.safetensors"},
{"url": "https://civitai.com/api/download/models/131658", "name": "vae-ft-mse.vae.safetensors"}
],
"2.Anything.vae": [{"url": "https://civitai.com/api/download/models/131656", "name": "Anything.vae.safetensors"}],
"3.Blessed2.vae": [{"url": "https://civitai.com/api/download/models/142467", "name": "Blessed2.vae.safetensors"}],
"4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/133362", "name": "ClearVae_23.vae.safetensors"}],
"5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
}
controlnet_list = {
"1.canny": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
],
"2.openpose": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
],
"3.depth": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
{"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
],
"4.normal_map": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
],
"5.mlsd": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
],
"6.lineart": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
],
"7.soft_edge": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
],
"8.scribble": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
],
"9.segmentation": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
],
"10.shuffle": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
],
"11.tile": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
],
"12.inpaint": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
],
"13.instruct_p2p": [
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
]
}
extension_repo = []
prefixes = [
"model:",
"vae:",
"lora:",
"embeddings:",
"extensions:"
]
get_ipython().system('mkdir -p {models_dir} {vaes_dir} {embeddings_dir} {loras_dir} {control_dir}')
url = ""
hf_token = optional_huggingface_token if optional_huggingface_token else "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"
user_header = f"\"Authorization: Bearer {hf_token}\""
def handle_manual(url):
original_url = url
url = url.split(':', 1)[1]
file_name = re.search(r'\[(.*?)\]', url)
file_name = file_name.group(1) if file_name else None
if file_name:
url = re.sub(r'\[.*?\]', '', url)
dir_mapping = {"model": models_dir, "vae": vaes_dir, "lora": loras_dir, "embeddings": embeddings_dir, "extensions": None}
for prefix, dir in dir_mapping.items():
if original_url.startswith(f"{prefix}:"):
if prefix != "extensions":
if '|' in url:
urls = [url.strip() for url in url.split('|')]
for urll in urls:
print("inn******************************prefix:{},dir:{},url:{},ddfile_name:{}".format(prefix, dir,
urll,
file_name))
manual_download(urll, dir, file_name=file_name)
else:
print("******************************prefix:{},dir:{},url:{},file_name:{}".format(prefix, dir,url,file_name))
manual_download(url, dir, file_name=file_name)
else:
if '|' in url:
urls = [url.strip() for url in url.split('|')]
for urll in urls:
extension_repo.append((urll, file_name))
print("this is the extension:url:{},file_name:{}".format(urll, file_name))
else:
print("this is the extension:url:{},file_name:{}".format(url,file_name))
extension_repo.append((url, file_name))
def manual_download(url, dst_dir, file_name):
basename = url.split("/")[-1] if file_name is None else file_name
# I do it at my own risk..... Fucking CivitAi >:(
civitai_token = "62c0c5956b2f9defbd844d754000180b"
if 'civitai' in url and civitai_token:
url = f"{url}?token={civitai_token}"
# -- GDrive --
if 'drive.google' in url:
if 'folders' in url:
get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
else:
get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
# -- Huggin Face --
elif 'huggingface' in url:
if '/blob/' in url:
url = url.replace('/blob/', '/resolve/')
if file_name:
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --header={user_header} -c -j5 -x16 -s16 -k1M -d {dst_dir} -o {basename} {url}')
else:
parsed_link = '\n{}\n\tout={}'.format(url, unquote(url.split('/')[-1]))
get_ipython().system('echo -e "{parsed_link}" | aria2c --header={user_header} --console-log-level=error --summary-interval=10 -i- -j5 -x16 -s16 -k1M -c -d "{dst_dir}" -o {basename}')
# -- Other --
elif 'http' in url or 'magnet' in url:
if file_name:
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {dst_dir} -o {file_name} {url}')
else:
parsed_link = '"{}"'.format(url)
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {dst_dir} -Z {parsed_link}')
def download(url):
links_and_paths = url.split(',')
for link_or_path in links_and_paths:
link_or_path = link_or_path.strip()
if not link_or_path:
continue
if any(link_or_path.startswith(prefix.lower()) for prefix in prefixes):
handle_manual(link_or_path)
continue
url, dst_dir, file_name = link_or_path.split()
manual_download(url, dst_dir, file_name)
submodels = []
def add_submodels(selection, num_selection, model_dict, dst_dir):
if selection == "none":
return []
if selection == "ALL":
all_models = []
for models in model_dict.values():
all_models.extend(models)
selected_models = all_models
else:
selected_models = model_dict[selection]
selected_nums = map(int, num_selection.replace(',', '').split())
for num in selected_nums:
if 1 <= num <= len(model_dict):
name = list(model_dict)[num - 1]
selected_models.extend(model_dict[name])
unique_models = list({model['name']: model for model in selected_models}.values())
for model in unique_models:
model['dst_dir'] = dst_dir
return unique_models
submodels += add_submodels(Model, Model_Num, model_list, models_dir) # model
submodels += add_submodels(Vae, Vae_Num, vae_list, vaes_dir) # vae
submodels += add_submodels(controlnet, "" if controlnet == "ALL" else controlnet_Num, controlnet_list, control_dir) # controlnet
for submodel in submodels:
if not Inpainting_Model and "inpainting" in submodel['name']:
continue
url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, "
def process_file_download(file_url):
global Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url
urls_dict = {
'model': 'Model_url',
'vae': 'Vae_url',
'embed': 'Embedding_url',
'lora': 'LoRA_url',
'extension': 'Extensions_url'
}
if file_url.startswith("http"):
if "blob" in file_url:
file_url = file_url.replace("blob", "raw")
response = requests.get(file_url)
lines = response.text.split('\n')
else:
with open(file_url, 'r') as file:
lines = file.readlines()
current_tag = None
for line in lines:
if line.strip().startswith('#'):
current_tag = next((tag for tag in urls_dict if tag in line.lower()), None)
elif current_tag and line.strip():
urls = [url.strip() for url in line.split()]
for url in urls:
if url.startswith("http"):
globals()[urls_dict[current_tag]] += ", " + url
# fix all possible errors/options and function call
if custom_file_urls:
if not custom_file_urls.endswith('.txt'):
custom_file_urls += '.txt'
if not custom_file_urls.startswith('http'):
if not custom_file_urls.startswith(root_path):
custom_file_urls = f'{root_path}/{custom_file_urls}'
if custom_file_urls.count('/content') >= 2:
custom_file_urls = re.sub(r'(/content){2,}', '/content', custom_file_urls)
try:
process_file_download(custom_file_urls)
except FileNotFoundError:
pass
urls = [Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url]
for i, prefix in enumerate(prefixes):
if urls[i]:
prefixed_urls = [f"{prefix}{u}" for u in urls[i].replace(',', '').split()]
if prefixed_urls:
url += ", ".join(prefixed_urls) + ", "
print("url:",url)
if detailed_download == "on":
print("\n\n\033[33m# ====== Detailed Download ====== #\n\033[0m")
download(url)
print("\n\033[33m# =============================== #\n\033[0m")
else:
with capture.capture_output() as cap:
download(url)
del cap
print("\r🏁 Download Complete!" + " "*15)
# Cleaning shit after downloading...
get_ipython().system('find \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -type d -exec rm -r {} \\; >/dev/null 2>&1')
## Install of Custom extensions
if len(extension_repo) > 0:
print("✨ Installing custom extensions...", end='', flush=True)
with capture.capture_output() as cap:
for repo, repo_name in extension_repo:
if not repo_name:
repo_name = repo.split('/')[-1]
get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch')
del cap
print(f"\r📦 Installed '{len(extension_repo)}', Custom extensions!")
## List Models and stuff
if detailed_download == "off":
print("\n\n\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.")
if any(not file.endswith('.txt') for file in os.listdir(models_dir)):
print("\n\033[33m➤ Models\033[0m")
get_ipython().system("find {models_dir}/ -mindepth 1 ! -name '*.txt' -printf '%f\\n'")
if any(not file.endswith('.txt') for file in os.listdir(vaes_dir)):
print("\n\033[33m➤ VAEs\033[0m")
get_ipython().system("find {vaes_dir}/ -mindepth 1 ! -name '*.txt' -printf '%f\\n'")
if any(not file.endswith('.txt') and not os.path.isdir(os.path.join(embeddings_dir, file)) for file in os.listdir(embeddings_dir)):
print("\n\033[33m➤ Embeddings\033[0m")
get_ipython().system("find {embeddings_dir}/ -mindepth 1 -maxdepth 1 \\( -name '*.pt' -or -name '*.safetensors' \\) -printf '%f\\n'")
if any(not file.endswith('.txt') for file in os.listdir(loras_dir)):
print("\n\033[33m➤ LoRAs\033[0m")
get_ipython().system("find {loras_dir}/ -mindepth 1 ! -name '*.keep' -printf '%f\\n'")
print(f"\n\033[33m➤ Extensions\033[0m")
get_ipython().system("find {extensions_dir}/ -mindepth 1 -maxdepth 1 ! -name '*.txt' -printf '%f\\n'")
if any(not file.endswith(('.txt', '.yaml')) for file in os.listdir(control_dir)):
print("\n\033[33m➤ ControlNet\033[0m")
get_ipython().system("find {control_dir}/ -mindepth 1 ! -name '*.yaml' -printf '%f\\n' | sed 's/^[^_]*_[^_]*_[^_]*_\\(.*\\)_fp16\\.safetensors$/\\1/'")
# === OTHER ===
# Downlaod discord tags UmiWildcards
files_umi = [
"https://huggingface.co/NagisaNao/fast_repo/resolve/main/extensions/UmiWildacrd/discord/200_pan_gen.txt",
"https://huggingface.co/NagisaNao/fast_repo/resolve/main/extensions/UmiWildacrd/discord/150_bra_gen.txt"
]
save_dir_path = f"{webui_path}/extensions/Umi-AI-Wildcards/wildcards/discord"
with capture.capture_output() as cap:
for file in files_umi:
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}')
del cap
# === ONLY SAGEMAKER ===
if env == "SageMaker Studio Lab":
with capture.capture_output() as cap:
get_ipython().system('rm -rf /home/studio-lab-user/.conda/envs/studiolab-safemode')
get_ipython().system('rm -rf /home/studio-lab-user/.conda/envs/sagemaker-distribution')
get_ipython().system('rm -rf /home/studio-lab-user/.conda/pkgs/cache')
get_ipython().system('pip cache purge')
get_ipython().system('rm -rf ~/.cache')
del cap