File size: 14,394 Bytes
0885d48 de7f595 e781d44 ad98c21 de7f595 aeb12b6 de7f595 aeb12b6 de7f595 1180c28 dd594b9 b9d7a9e dd594b9 b9d7a9e dd594b9 1180c28 de7f595 aeb12b6 de7f595 aeb12b6 6ae9887 b8292ae aeb12b6 de7f595 9685a05 de7f595 aeb12b6 0885d48 aeb12b6 de7f595 aeb12b6 de7f595 8ea7f58 de7f595 aeb12b6 5262e8f 4336b6e aeb12b6 4336b6e aeb12b6 de7f595 8ea7f58 de7f595 2777c8d 9e5825c de7f595 2777c8d de7f595 9e5825c 08fb7c1 5c4a3b6 de7f595 08fb7c1 7b63227 aeb12b6 9e5825c 7b63227 08fb7c1 9e5825c 6ae9887 b26589e 9e5825c dee8234 aeb12b6 6ae9887 de7f595 9e5825c 6ae9887 492f6d1 6ae9887 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 |
# 给我的朋友写的
import os
import threading
#使用的库
from pathlib import Path
import subprocess
import pandas as pd
import shutil
import os
import time
import re
import gc
import requests
import zipfile
import threading
import time
import socket
from concurrent.futures import ProcessPoolExecutor
# import wandb
os.system("pip install nvidia-ml-py3")
os.chdir(f"/home/xlab-app-center")
os.system(f"git clone https://openi.pcl.ac.cn/2575044704/sd-v1.7.0 /home/xlab-app-center/stable-diffusion-webui")
os.system(f"cp /home/xlab-app-center/styles.csv /home/xlab-app-center/stable-diffusion-webui/styles.csv")
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui")
os.system(f"git lfs install")
os.system(f"git reset --hard")
import os
def create_directory(directory_path):
if not os.path.exists(directory_path):
os.makedirs(directory_path)
def download_file(url, destination_path):
os.system(f'wget -O {destination_path} {url}')
# 设置基本路径
base_directory = '/home/xlab-app-center/stable-diffusion-webui'
configs_directory = os.path.join(base_directory, 'configs')
# 创建configs文件夹
create_directory(configs_directory)
# 下载配置文件
#download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json', os.path.join(configs_directory, 'config.json'))
#download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pri2.json', os.path.join(configs_directory, 'ui-config-pri.json'))
#download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub.json', os.path.join(configs_directory, 'ui-config.json'))
# 下载配置文件
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json', os.path.join(base_directory, 'config.json'))
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pri2.json', os.path.join(base_directory, 'ui-config-pri.json'))
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub.json', os.path.join(base_directory, 'ui-config.json'))
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui/extensions")
show_shell_info = False
def run(command, cwd=None, desc=None, errdesc=None, custom_env=None,try_error:bool=True) -> str:
global show_shell_info
if desc is not None:
print(desc)
run_kwargs = {
"args": command,
"shell": True,
"cwd": cwd,
"env": os.environ if custom_env is None else custom_env,
"encoding": 'utf8',
"errors": 'ignore',
}
if not show_shell_info:
run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE
result = subprocess.run(**run_kwargs)
if result.returncode != 0:
error_bits = [
f"{errdesc or 'Error running command'}.",
f"Command: {command}",
f"Error code: {result.returncode}",
]
if result.stdout:
error_bits.append(f"stdout: {result.stdout}")
if result.stderr:
error_bits.append(f"stderr: {result.stderr}")
if try_error:
print((RuntimeError("\n".join(error_bits))))
else:
raise RuntimeError("\n".join(error_bits))
if show_shell_info:
print((result.stdout or ""))
return (result.stdout or "")
import subprocess
def sdmodel():
def download_with_aria2(urls):
# 使用循环遍历所有的下载链接
for url in urls:
# 将huggingface.co替换为hf-mirror.com
url = url.replace('huggingface.co', 'hf-mirror.com')
file_name = url.split('/')[-1].split('?')[0]
# 构建aria2c命令
command = f'aria2c -x 16 -s 16 -c -k 1M -o "{file_name}" "{url}" -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion'
try:
# 使用subprocess模块运行aria2c命令
subprocess.run(command, shell=True, check=True)
except subprocess.CalledProcessError as e:
# 打印错误信息(如果有)
print(f'Error downloading {url}: {e}')
# 直接将链接作为多行字符串传递给函数
download_urls = """
https://huggingface.co/HiroHiroHirook/meinamix_meinaV8/resolve/main/meinamix_meinaV8.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/anything_v50.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/blindbox_v1_mix.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/cuteyukimixAdorable_naiV3style.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/ddicon_v10.ckpt?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinamix_meinaV11.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinapastel_v6-inpainting.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinapastel_v6Pastel.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/midjourney_20230624181825.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/mixProV4_v4.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/qteamixQ_omegaFp16.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/uberRealisticPornMerge_urpmv13.safetensors?download=true
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/velaMix_velaMixVersion2.safetensors?download=true
"""
# 使用字符串的splitlines方法将字符串分割成列表
urls_list = download_urls.strip().splitlines()
# 调用函数开始下载
download_with_aria2(urls_list)
#sdmodel()
def mkdirs(path, exist_ok=True):
if path and not Path(path).exists():
os.makedirs(path,exist_ok=exist_ok)
plugins = [
"https://gitcode.net/overbill1683/stable-diffusion-webui-localization-zh_Hans",
"https://gitcode.net/ranting8323/multidiffusion-upscaler-for-automatic1111",
"https://gitcode.net/ranting8323/adetailer",
#"https://gitcode.net/ranting8323/sd-webui-prompt-all-in-one",
"https://gitcode.net/ranting8323/sd-webui-inpaint-anything",
"https://gitcode.net/ranting8323/a1111-sd-webui-tagcomplete",
"https://gitcode.net/nightaway/sd-webui-infinite-image-browsing",
"https://openi.pcl.ac.cn/2575044704/sd-extension-system-info",
"https://openi.pcl.ac.cn/2575044704/batchlinks-webui",
'https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-localization-zh_CN',
#'https://openi.pcl.ac.cn/2575044704/sd-webui-supermerger',
'https://openi.pcl.ac.cn/2575044704/sd-webui-lora-block-weight',
'https://openi.pcl.ac.cn/2575044704/sd-skin-extension',
]
for plugin in plugins:
os.system(f"git clone {plugin}")
os.makedirs('/home/xlab-app-center/stable-diffusion-webui/models/adetailer', exist_ok=True)
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui/models/adetailer")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/Bingsu/adetailer/resolve/main/hand_yolov8s.pt -d /home/xlab-app-center/stable-diffusion-webui/models/adetailer -o hand_yolov8s.pt")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/Bingsu/adetailer/resolve/main/hand_yolov8n.pt -d /home/xlab-app-center/stable-diffusion-webui/models/adetailer -o hand_yolov8n.pt")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/kaggle/input/museum/131-half.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o [萌二次元]131-half.safetensors")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/ba.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Lora -o ba.safetensors")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/racaco2.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Lora -o racaco2.safetensors")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/coinz/Add-detail/resolve/main/add_detail.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Lora -o add_detail.safetensors")
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/VASVASVAS/vae/resolve/main/pastel-waifu-diffusion.vae.pt -d /home/xlab-app-center/stable-diffusion-webui/models/VAE -o pastel-waifu-diffusion.vae.pt")
# os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://download.openxlab.org.cn/models/camenduru/sdxl-refiner-1.0/weight//sd_xl_refiner_1.0.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o sd_xl_refiner_1.0.safetensors")
# os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://download.openxlab.org.cn/models/camenduru/cyber-realistic/weight//cyberrealistic_v32.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o cyberrealistic_v32.safetensors")
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui")
print('webui launching...')
package_envs = [
{"env": "STABLE_DIFFUSION_REPO", "url": os.environ.get('STABLE_DIFFUSION_REPO', "https://gitcode.net/overbill1683/stablediffusion")},
{"env": "STABLE_DIFFUSION_XL_REPO", "url": os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://gitcode.net/overbill1683/generative-models")},
{"env": "K_DIFFUSION_REPO", "url": os.environ.get('K_DIFFUSION_REPO', "https://gitcode.net/overbill1683/k-diffusion")},
{"env": "CODEFORMER_REPO", "url": os.environ.get('CODEFORMER_REPO', "https://gitcode.net/overbill1683/CodeFormer")},
{"env": "BLIP_REPO", "url": os.environ.get('BLIP_REPO', "https://gitcode.net/overbill1683/BLIP")},
]
os.environ["PIP_INDEX_URL"] = "https://mirrors.aliyun.com/pypi/simple/"
for i in package_envs:
os.environ[i["env"]] = i["url"]
import os
import time
import wandb
import nvidia_smi
import os
import time
import wandb
# nginx 反向代理配置文件
def echoToFile(content:str,path:str):
if path.find('/') >= 0:
_path = '/'.join(path.split('/')[:-1])
run(f'''mkdir -p {_path}''')
with open(path,'w') as sh:
sh.write(content)
# 检查网络
def check_service(host, port):
try:
socket.create_connection((host, port), timeout=5)
return True
except socket.error:
return False
def localProxy():
os.system('sudo apt install nginx -y')
download_file('https://huggingface.co/datasets/ACCA225/openxlab/resolve/main/proxy_nginx.conf', os.path.join(base_directory, 'proxy_nginx.conf'))
if not check_service('localhost',_server_port):
run(f'''nginx -c /home/xlab-app-center/stable-diffusion-webui/proxy_nginx.conf''')
run(f'''nginx -s reload''')
# WandB登录
os.system('wandb login 5c00964de1bb95ec1ab24869d4c523c59e0fb8e3')
nvidia_smi.nvmlInit()
# 初始化WandB项目
wandb.init(project="gpu-temperature-monitor")
import os
import threading
import wandb
import time
def monitor_gpu():
start_thread = threading.Thread(target=start)
start_thread.start()
while True:
try:
# 获取 GPU 温度
handle = nvidia_smi.nvmlDeviceGetHandleByIndex(0) # 0 表示第一个 GPU
gpu_temperature = nvidia_smi.nvmlDeviceGetTemperature(handle, nvidia_smi.NVML_TEMPERATURE_GPU)
# 获取 GPU 使用率
utilization = nvidia_smi.nvmlDeviceGetUtilizationRates(handle)
gpu_usage = utilization.gpu
# 使用 WandB 记录 GPU 温度和使用率
wandb.log({"GPU 温度": gpu_temperature, "GPU 使用率": gpu_usage})
except Exception as e:
print(f"Error: {e}")
time.sleep(60)
def zrok():
#不可用
pass
def start():
os.system("pip install pyngrok")
#try:
# print('启动proxy')
# threading.Thread(target = localProxy,daemon=True).start()
#except Exception as e:
# # 在这里处理异常的代码
# print(f"proxy An error occurred: {e}")
try:
#安装环境
os.system(f"python launch.py --api --xformers --exit --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle")
#time.sleep(5)
command = "python launch.py --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle"
command1 = "python launch.py --ui-config-file=/home/xlab-app-center/stable-diffusion-webui/ui-config-pri.json --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7862 & ngrok http 7862 --authtoken=2bgnVJhlLqxUZhS1WyeaAtQA12F_6nQXrapUved4QSxYFn1cD"
command2 = "python launch.py --ui-config-file=/home/xlab-app-center/stable-diffusion-webui/ui-config-pri.json --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7863 & ngrok http 7863 --authtoken=2CXyNlT9xGfFoL5ruI6hQV20FNq_7tbmuzS9RtyNTkyEe1J6C"
process = subprocess.Popen(command, shell=True)
time.sleep(250)
process = subprocess.Popen(command1, shell=True)
time.sleep(100)
os.system(f"{command2}")
except Exception as e:
# 在这里处理异常的代码
print(f"启动SD发生错误: {e}")
try:
#os.system(f"python launch.py --api --xformers --exit --enable-insecure-extension-access --ui-settings-file /home/xlab-app-center/config.json --ui-config-file /home/xlab-app-center/ui-config.json --gradio-queue --disable-safe-unpickle")
os.system(f"python launch.py --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle")
except Exception as e:
# 在这里处理异常的代码
print(f"An error occurred: {e}")
net_thread = threading.Thread(target=zrok)
net_thread.start()
monitor_gpu() |