Upload app20.py
Browse files
app20.py
ADDED
@@ -0,0 +1,410 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# 给我的朋友写的
|
2 |
+
import os
|
3 |
+
os.system("pip install wandb nvidia-ml-py3")
|
4 |
+
os.system("apt update && apt install -y aria2")
|
5 |
+
os.system("mkdir -p /root/xlab-app-center/")
|
6 |
+
import threading
|
7 |
+
#使用的库
|
8 |
+
from pathlib import Path
|
9 |
+
import subprocess
|
10 |
+
import pandas as pd
|
11 |
+
import shutil
|
12 |
+
import os
|
13 |
+
import time
|
14 |
+
import re
|
15 |
+
import gc
|
16 |
+
import requests
|
17 |
+
import zipfile
|
18 |
+
import threading
|
19 |
+
import time
|
20 |
+
import socket
|
21 |
+
from concurrent.futures import ProcessPoolExecutor
|
22 |
+
# import wandb
|
23 |
+
import base64
|
24 |
+
import subprocess
|
25 |
+
|
26 |
+
|
27 |
+
def notbook():
|
28 |
+
os.system("pip install jupyterlab")
|
29 |
+
os.system("pip install pyngrok")
|
30 |
+
# 构建命令字符串
|
31 |
+
ngrok_command = f"ngrok http 8889 --authtoken=2cdw5pJsqgsq1igQKeHIpNwTNG7_7LQD3jojKKQ7PzcnNkok5"
|
32 |
+
ngrok_command2 = f"ngrok tunnel --label edge=edghts_2doueG9BDi9rCXUGnsSsNbTab8H --authtoken=2douOsr61tUyYwMVF3lfj9uZGoa_6FPJ2x1xhKPbL6z4euKkM --region=ap http://localhost:8889"
|
33 |
+
jupyter_command = "jupyter-lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir=/ --port=8889 --LabApp.allow_origin=* --LabApp.token= --LabApp.base_url=/8889/"
|
34 |
+
|
35 |
+
# 启动 ngrok 进程
|
36 |
+
ngrok_process = subprocess.Popen(ngrok_command, shell=True)
|
37 |
+
time.sleep(30)
|
38 |
+
ngrok_process2 = subprocess.Popen(ngrok_command2, shell=True)
|
39 |
+
# 启动 Jupyter 进程
|
40 |
+
jupyter_process = subprocess.Popen(jupyter_command, shell=True)
|
41 |
+
#os.system(f"ngrok tunnel --authtoken={ngrok_token} --region=ap http://localhost:8888 & python jupyter-lab --ServerApp.shutdown_no_activity_timeout=1800 --TerminalManager.cull_inactive_timeout=1800 --TerminalManager.cull_interval=300 --MappingKernelManager.cull_idle_timeout=1800 --MappingKernelManager.cull_interval=300 --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir=/ --port=8888 --LabApp.token= --LabApp.allow_origin=* --LabApp.base_url=")
|
42 |
+
|
43 |
+
ngrok_token = "2bgXLjjKFvxfbuZFlR2NMZkvL8n_4WrK7f15FLtWb8p7v3oaF"
|
44 |
+
_ngrok_token = "2CXyNlT9xGfFoL5ruI6hQV20FNq_7tbmuzS9RtyNTkyEe1J6C"
|
45 |
+
|
46 |
+
os.system("pip install nvidia-ml-py3")
|
47 |
+
os.chdir(f"/root/xlab-app-center")
|
48 |
+
os.system(f"git clone https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-v1.8.0 /root/xlab-app-center/stable-diffusion-webui")
|
49 |
+
|
50 |
+
os.system(f"cp /root/xlab-app-center/styles.csv /root/xlab-app-center/stable-diffusion-webui/styles.csv")
|
51 |
+
os.chdir(f"/root/xlab-app-center/stable-diffusion-webui")
|
52 |
+
os.system(f"git lfs install")
|
53 |
+
os.system(f"git reset --hard")
|
54 |
+
import os
|
55 |
+
|
56 |
+
ngrok_token = "2bgXLjjKFvxfbuZFlR2NMZkvL8n_4WrK7f15FLtWb8p7v3oaF" #7862端口
|
57 |
+
_ngrok_token = "2CXyNlT9xGfFoL5ruI6hQV20FNq_7tbmuzS9RtyNTkyEe1J6C" #7863端口
|
58 |
+
|
59 |
+
def remove_restart():
|
60 |
+
os.chdir("/root/xlab-app-center/stable-diffusion-webui/html")
|
61 |
+
os.system("rm ./footer.html && wget -O footer.html https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/footer.html")
|
62 |
+
os.chdir("/root/xlab-app-center/stable-diffusion-webui/modules")
|
63 |
+
os.system("rm ./ui_settings.py && wget -O ui_settings.py https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui_settings.py")
|
64 |
+
remove_restart()
|
65 |
+
def create_directory(directory_path):
|
66 |
+
if not os.path.exists(directory_path):
|
67 |
+
os.makedirs(directory_path)
|
68 |
+
|
69 |
+
def download_file(url, destination_path):
|
70 |
+
os.system(f'wget -O {destination_path} {url}')
|
71 |
+
|
72 |
+
# 设置基本路径
|
73 |
+
base_directory = '/root/xlab-app-center/stable-diffusion-webui'
|
74 |
+
configs_directory = os.path.join(base_directory, 'configs')
|
75 |
+
|
76 |
+
# 创建configs文件夹
|
77 |
+
create_directory(configs_directory)
|
78 |
+
|
79 |
+
# 下载配置文件
|
80 |
+
#download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json', os.path.join(configs_directory, 'config.json'))
|
81 |
+
#download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pri2.json', os.path.join(configs_directory, 'ui-config-pri.json'))
|
82 |
+
#download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub.json', os.path.join(configs_directory, 'ui-config.json'))
|
83 |
+
# 下载配置文件
|
84 |
+
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json', os.path.join(base_directory, 'config.json'))
|
85 |
+
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pri2.json', os.path.join(base_directory, 'ui-config-pri.json'))
|
86 |
+
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub.json', os.path.join(base_directory, 'ui-config.json'))
|
87 |
+
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub2.json', os.path.join(base_directory, 'ui-config2.json'))
|
88 |
+
|
89 |
+
os.chdir(f"/root/xlab-app-center/stable-diffusion-webui/extensions")
|
90 |
+
os.system(f"rm -rf ./batchlinks-webui")
|
91 |
+
os.system(f"rm -rf ./sd-extension-system-info")
|
92 |
+
show_shell_info = False
|
93 |
+
def run(command, cwd=None, desc=None, errdesc=None, custom_env=None,try_error:bool=True) -> str:
|
94 |
+
global show_shell_info
|
95 |
+
if desc is not None:
|
96 |
+
print(desc)
|
97 |
+
|
98 |
+
run_kwargs = {
|
99 |
+
"args": command,
|
100 |
+
"shell": True,
|
101 |
+
"cwd": cwd,
|
102 |
+
"env": os.environ if custom_env is None else custom_env,
|
103 |
+
"encoding": 'utf8',
|
104 |
+
"errors": 'ignore',
|
105 |
+
}
|
106 |
+
|
107 |
+
if not show_shell_info:
|
108 |
+
run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE
|
109 |
+
|
110 |
+
result = subprocess.run(**run_kwargs)
|
111 |
+
|
112 |
+
if result.returncode != 0:
|
113 |
+
error_bits = [
|
114 |
+
f"{errdesc or 'Error running command'}.",
|
115 |
+
f"Command: {command}",
|
116 |
+
f"Error code: {result.returncode}",
|
117 |
+
]
|
118 |
+
if result.stdout:
|
119 |
+
error_bits.append(f"stdout: {result.stdout}")
|
120 |
+
if result.stderr:
|
121 |
+
error_bits.append(f"stderr: {result.stderr}")
|
122 |
+
if try_error:
|
123 |
+
print((RuntimeError("\n".join(error_bits))))
|
124 |
+
else:
|
125 |
+
raise RuntimeError("\n".join(error_bits))
|
126 |
+
|
127 |
+
if show_shell_info:
|
128 |
+
print((result.stdout or ""))
|
129 |
+
return (result.stdout or "")
|
130 |
+
import subprocess
|
131 |
+
def sdmodel():
|
132 |
+
def download_with_aria2(urls):
|
133 |
+
# 使用循环遍历所有的下载链接
|
134 |
+
for url in urls:
|
135 |
+
# 将huggingface.co替换为hf-mirror.com
|
136 |
+
url = url.replace('huggingface.co', 'hf-mirror.com')
|
137 |
+
file_name = url.split('/')[-1].split('?')[0]
|
138 |
+
# 构建aria2c命令
|
139 |
+
command = f'aria2c -x 16 -s 16 -c -k 1M -o "{file_name}" "{url}" -d /root/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion'
|
140 |
+
|
141 |
+
try:
|
142 |
+
# 使用subprocess模块运行aria2c命令
|
143 |
+
subprocess.run(command, shell=True, check=True)
|
144 |
+
except subprocess.CalledProcessError as e:
|
145 |
+
# 打印错误信息(如果有)
|
146 |
+
print(f'Error downloading {url}: {e}')
|
147 |
+
|
148 |
+
# 直接将链接作为多行字符串传递给函数
|
149 |
+
download_urls = """
|
150 |
+
https://huggingface.co/HiroHiroHirook/meinamix_meinaV8/resolve/main/meinamix_meinaV8.safetensors?download=true
|
151 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/anything_v50.safetensors?download=true
|
152 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/blindbox_v1_mix.safetensors?download=true
|
153 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/cuteyukimixAdorable_naiV3style.safetensors?download=true
|
154 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/ddicon_v10.ckpt?download=true
|
155 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinamix_meinaV11.safetensors?download=true
|
156 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinapastel_v6-inpainting.safetensors?download=true
|
157 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinapastel_v6Pastel.safetensors?download=true
|
158 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/midjourney_20230624181825.safetensors?download=true
|
159 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/mixProV4_v4.safetensors?download=true
|
160 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/qteamixQ_omegaFp16.safetensors?download=true
|
161 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/uberRealisticPornMerge_urpmv13.safetensors?download=true
|
162 |
+
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/velaMix_velaMixVersion2.safetensors?download=true
|
163 |
+
"""
|
164 |
+
|
165 |
+
# 使用字符串的splitlines方法将字符串分割成列表
|
166 |
+
urls_list = download_urls.strip().splitlines()
|
167 |
+
|
168 |
+
# 调用函数开始下载
|
169 |
+
download_with_aria2(urls_list)
|
170 |
+
#sdmodel()
|
171 |
+
def mkdirs(path, exist_ok=True):
|
172 |
+
if path and not Path(path).exists():
|
173 |
+
os.makedirs(path,exist_ok=exist_ok)
|
174 |
+
plugins = [
|
175 |
+
"https://gitcode.net/overbill1683/stable-diffusion-webui-localization-zh_Hans",
|
176 |
+
"https://gitcode.net/ranting8323/multidiffusion-upscaler-for-automatic1111",
|
177 |
+
"https://gitcode.net/ranting8323/adetailer",
|
178 |
+
"https://gitcode.net/ranting8323/sd-webui-inpaint-anything",
|
179 |
+
"https://gitcode.net/ranting8323/a1111-sd-webui-tagcomplete",
|
180 |
+
"https://gitcode.net/nightaway/sd-webui-infinite-image-browsing",
|
181 |
+
"https://openi.pcl.ac.cn/2575044704/sd-extension-system-info",
|
182 |
+
"https://openi.pcl.ac.cn/2575044704/batchlinks-webui",
|
183 |
+
'https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-localization-zh_CN',
|
184 |
+
'https://openi.pcl.ac.cn/2575044704/sd-webui-lora-block-weight',
|
185 |
+
'https://openi.pcl.ac.cn/2575044704/sd-skin-extension',
|
186 |
+
#"https://kkgithub.com/thygate/stable-diffusion-webui-depthmap-script.git",
|
187 |
+
#"https://gitcode.net/ranting8323/sd-webui-controlnet",
|
188 |
+
#"https://kkgithub.com/SignalFlagZ/sd-webui-civbrowser.git",
|
189 |
+
#"https://kkgithub.com/continue-revolution/sd-webui-animatediff.git",
|
190 |
+
#"https://kkkkgithub.com/aigc-apps/sd-webui-EasyPhoto.git",
|
191 |
+
"https://kkgithub.com/Iyashinouta/sd-model-downloader.git",
|
192 |
+
"https://kkgithub.com/fkunn1326/openpose-editor.git",
|
193 |
+
#"https://kkgithub.com/zero01101/openOutpaint-webUI-extension.git",
|
194 |
+
#"https://kkgithub.com/LonicaMewinsky/gif2gif.git",
|
195 |
+
#"https://kkgithub.com/modelscope/facechain.git",
|
196 |
+
"https://openi.pcl.ac.cn/2575044704/sd-webui-controlnet",
|
197 |
+
"https://openi.pcl.ac.cn/2575044704/sd-webui-agent-scheduler",
|
198 |
+
"https://openi.pcl.ac.cn/2575044704/sd-webui-depth-lib"
|
199 |
+
]
|
200 |
+
suffix_1 = "Nyan9"
|
201 |
+
suffix_2 = "BiliBili"
|
202 |
+
needed_extensions = [
|
203 |
+
"https://openi.pcl.ac.cn/2575044704/sd-extension-system-info",
|
204 |
+
"https://openi.pcl.ac.cn/2575044704/batchlinks-webui",
|
205 |
+
]
|
206 |
+
for plugin in plugins:
|
207 |
+
os.system(f"git clone {plugin}")
|
208 |
+
#for plugin in needed_extensions:
|
209 |
+
# os.system(f"git clone {plugin}")
|
210 |
+
os.makedirs('/root/xlab-app-center/stable-diffusion-webui/models/adetailer', exist_ok=True)
|
211 |
+
os.chdir(f"/root/xlab-app-center/stable-diffusion-webui/models/adetailer")
|
212 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/Bingsu/adetailer/resolve/main/hand_yolov8s.pt -d /root/xlab-app-center/stable-diffusion-webui/models/adetailer -o hand_yolov8s.pt")
|
213 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/Bingsu/adetailer/resolve/main/hand_yolov8n.pt -d /root/xlab-app-center/stable-diffusion-webui/models/adetailer -o hand_yolov8n.pt")
|
214 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/kaggle/input/museum/131-half.safetensors -d /root/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o [萌二次元]131-half.safetensors")
|
215 |
+
os.system(f"rm /root/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion/*porn*.safetensors")
|
216 |
+
|
217 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/ba.safetensors -d /root/xlab-app-center/stable-diffusion-webui/models/Lora -o ba.safetensors")
|
218 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/racaco2.safetensors -d /root/xlab-app-center/stable-diffusion-webui/models/Lora -o racaco2.safetensors")
|
219 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/coinz/Add-detail/resolve/main/add_detail.safetensors -d /root/xlab-app-center/stable-diffusion-webui/models/Lora -o add_detail.safetensors")
|
220 |
+
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/VASVASVAS/vae/resolve/main/pastel-waifu-diffusion.vae.pt -d /root/xlab-app-center/stable-diffusion-webui/models/VAE -o pastel-waifu-diffusion.vae.pt")
|
221 |
+
# os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://download.openxlab.org.cn/models/camenduru/sdxl-refiner-1.0/weight//sd_xl_refiner_1.0.safetensors -d /root/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o sd_xl_refiner_1.0.safetensors")
|
222 |
+
# os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://download.openxlab.org.cn/models/camenduru/cyber-realistic/weight//cyberrealistic_v32.safetensors -d /root/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o cyberrealistic_v32.safetensors")
|
223 |
+
os.chdir(f"/root/xlab-app-center/stable-diffusion-webui")
|
224 |
+
print('webui launching...')
|
225 |
+
pwd_1 = ngrok_token + suffix_1
|
226 |
+
pwd_2 = _ngrok_token + suffix_2
|
227 |
+
package_envs = [
|
228 |
+
{"env": "STABLE_DIFFUSION_REPO", "url": os.environ.get('STABLE_DIFFUSION_REPO', "https://gitcode.net/overbill1683/stablediffusion")},
|
229 |
+
{"env": "STABLE_DIFFUSION_XL_REPO", "url": os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://gitcode.net/overbill1683/generative-models")},
|
230 |
+
{"env": "K_DIFFUSION_REPO", "url": os.environ.get('K_DIFFUSION_REPO', "https://gitcode.net/overbill1683/k-diffusion")},
|
231 |
+
{"env": "CODEFORMER_REPO", "url": os.environ.get('CODEFORMER_REPO', "https://gitcode.net/overbill1683/CodeFormer")},
|
232 |
+
{"env": "BLIP_REPO", "url": os.environ.get('BLIP_REPO', "https://gitcode.net/overbill1683/BLIP")},
|
233 |
+
]
|
234 |
+
os.environ["PIP_INDEX_URL"] = "https://mirrors.aliyun.com/pypi/simple/"
|
235 |
+
for i in package_envs:
|
236 |
+
os.environ[i["env"]] = i["url"]
|
237 |
+
|
238 |
+
import os
|
239 |
+
import time
|
240 |
+
import wandb
|
241 |
+
import nvidia_smi
|
242 |
+
import os
|
243 |
+
import time
|
244 |
+
import wandb
|
245 |
+
|
246 |
+
# nginx 反向代理配置文件
|
247 |
+
def echoToFile(content:str,path:str):
|
248 |
+
if path.find('/') >= 0:
|
249 |
+
_path = '/'.join(path.split('/')[:-1])
|
250 |
+
run(f'''mkdir -p {_path}''')
|
251 |
+
with open(path,'w') as sh:
|
252 |
+
sh.write(content)
|
253 |
+
# 检查网络
|
254 |
+
def check_service(host, port):
|
255 |
+
try:
|
256 |
+
socket.create_connection((host, port), timeout=5)
|
257 |
+
return True
|
258 |
+
except socket.error:
|
259 |
+
return False
|
260 |
+
def localProxy():
|
261 |
+
os.system('sudo apt install nginx -y')
|
262 |
+
download_file('https://huggingface.co/datasets/ACCA225/openxlab/resolve/main/proxy_nginx.conf', os.path.join(base_directory, 'proxy_nginx.conf'))
|
263 |
+
if not check_service('localhost',_server_port):
|
264 |
+
run(f'''nginx -c /root/xlab-app-center/stable-diffusion-webui/proxy_nginx.conf''')
|
265 |
+
run(f'''nginx -s reload''')
|
266 |
+
|
267 |
+
# WandB登录
|
268 |
+
os.system('wandb login 5c00964de1bb95ec1ab24869d4c523c59e0fb8e3')
|
269 |
+
nvidia_smi.nvmlInit()
|
270 |
+
# 初始化WandB项目
|
271 |
+
wandb.init(project="gpu-temperature-monitor")
|
272 |
+
|
273 |
+
import threading
|
274 |
+
import requests
|
275 |
+
|
276 |
+
def check_website(url):
|
277 |
+
try:
|
278 |
+
response = requests.get(url)
|
279 |
+
if response.status_code == 200:
|
280 |
+
print(f"The website {url} is accessible.")
|
281 |
+
else:
|
282 |
+
print(f"The website {url} returned status code: {response.status_code}")
|
283 |
+
notbook()
|
284 |
+
except requests.exceptions.RequestException as e:
|
285 |
+
print(f"An error occurred while trying to access {url}: {e}")
|
286 |
+
|
287 |
+
def run_check_periodically(url, interval):
|
288 |
+
check_website(url)
|
289 |
+
threading.Timer(interval, run_check_periodically, args=(url, interval)).start()
|
290 |
+
|
291 |
+
|
292 |
+
import os
|
293 |
+
import threading
|
294 |
+
import wandb
|
295 |
+
import time
|
296 |
+
def monitor_gpu():
|
297 |
+
start_thread = threading.Thread(target=start)
|
298 |
+
start_thread.start()
|
299 |
+
while True:
|
300 |
+
try:
|
301 |
+
# 获取 GPU 温度
|
302 |
+
handle = nvidia_smi.nvmlDeviceGetHandleByIndex(0) # 0 表示第一个 GPU
|
303 |
+
gpu_temperature = nvidia_smi.nvmlDeviceGetTemperature(handle, nvidia_smi.NVML_TEMPERATURE_GPU)
|
304 |
+
|
305 |
+
# 获取 GPU 使用率
|
306 |
+
utilization = nvidia_smi.nvmlDeviceGetUtilizationRates(handle)
|
307 |
+
gpu_usage = utilization.gpu
|
308 |
+
|
309 |
+
# 使用 WandB 记录 GPU 温度和使用率
|
310 |
+
wandb.log({"GPU 温度": gpu_temperature, "GPU 使用率": gpu_usage})
|
311 |
+
|
312 |
+
except Exception as e:
|
313 |
+
print(f"Error: {e}")
|
314 |
+
|
315 |
+
time.sleep(60)
|
316 |
+
def zrok():
|
317 |
+
|
318 |
+
#os.system("wget https://hf-mirror.com/datasets/ACCA225/frp-1/resolve/main/frpc")
|
319 |
+
#os.system("chmod +x ./frpc")
|
320 |
+
#os.system("./frpc -f bsnehoeds17ett71i2cr79ujoa7ndkxz:13738352 & ./frpc -f bsnehoeds17ett71i2cr79ujoa7ndkxz:13738353")
|
321 |
+
pass
|
322 |
+
|
323 |
+
_pwd_1 = base64.b64encode(pwd_1.encode()).decode()
|
324 |
+
_pwd_2 = base64.b64encode(pwd_2.encode()).decode()
|
325 |
+
|
326 |
+
apitoken = "{}:{}".format(_pwd_1, _pwd_2)
|
327 |
+
def start():
|
328 |
+
|
329 |
+
#notbook()
|
330 |
+
#try:
|
331 |
+
# print('启动proxy')
|
332 |
+
# threading.Thread(target = localProxy,daemon=True).start()
|
333 |
+
#except Exception as e:
|
334 |
+
# # 在这里处理异常的代码
|
335 |
+
# print(f"proxy An error occurred: {e}")
|
336 |
+
try:
|
337 |
+
#安装环境
|
338 |
+
os.system(f"python launch.py --api --xformers --exit --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle")
|
339 |
+
#time.sleep(5)
|
340 |
+
|
341 |
+
command = f"python launch.py --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7861"
|
342 |
+
command1 = "python launch.py --ui-config=/root/xlab-app-center/stable-diffusion-webui/ui-config2.json --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7862"
|
343 |
+
command2 = "python launch.py --ui-config=/root/xlab-app-center/stable-diffusion-webui/ui-config2.json --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7863"
|
344 |
+
process = subprocess.Popen(command, shell=True)
|
345 |
+
time.sleep(250)
|
346 |
+
#process = subprocess.Popen(command1, shell=True)
|
347 |
+
time.sleep(100)
|
348 |
+
print("启动ngrok中")
|
349 |
+
command_ngrok = f"ngrok http 8888 --authtoken={ngrok_token} --region=ap & ngrok http 7863 --authtoken=2H8A6felNlmleMqWBF0UUt2C72e_7mqBMs1nVibXbfnK28KrX --region=ap"
|
350 |
+
#process = subprocess.Popen(command2, shell=True)
|
351 |
+
def start_ngrok():
|
352 |
+
try:
|
353 |
+
os.system(f"{command_ngrok}")
|
354 |
+
except Exception as e:
|
355 |
+
print(f"ngrok启动失败: {e}")
|
356 |
+
start_ngrok()
|
357 |
+
#start_ngrok()
|
358 |
+
except Exception as e:
|
359 |
+
# 在这里处理异常的代码
|
360 |
+
print(f"启动SD发生错误: {e}")
|
361 |
+
try:
|
362 |
+
#os.system(f"python launch.py --api --xformers --exit --enable-insecure-extension-access --ui-settings-file /root/xlab-app-center/config.json --ui-config-file /root/xlab-app-center/ui-config.json --gradio-queue --disable-safe-unpickle")
|
363 |
+
os.system(f"HF_ENDPOINT=https://hf-mirror.com python launch.py --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle")
|
364 |
+
except Exception as e:
|
365 |
+
# 在这里处理异常的代码
|
366 |
+
print(f"An error occurred: {e}")
|
367 |
+
|
368 |
+
# 实例保活
|
369 |
+
import time
|
370 |
+
|
371 |
+
def session_saver():
|
372 |
+
try:
|
373 |
+
import cupy as cp
|
374 |
+
except ImportError:
|
375 |
+
print("cupy模块未安装,正在安装...")
|
376 |
+
try:
|
377 |
+
import pip
|
378 |
+
pip.main(['install', 'cupy'])
|
379 |
+
import cupy as cp
|
380 |
+
except ImportError:
|
381 |
+
print("无法安装模块,请确保已正确安装pip。")
|
382 |
+
return
|
383 |
+
|
384 |
+
while True:
|
385 |
+
for _ in range(5):
|
386 |
+
matrix_a = cp.random.rand(10000, 10000)
|
387 |
+
matrix_b = cp.random.rand(10000, 10000)
|
388 |
+
result = cp.dot(matrix_a, matrix_b)
|
389 |
+
print("实例保活:", result)
|
390 |
+
del matrix_a, matrix_b, result
|
391 |
+
cp.cuda.Stream.null.synchronize()
|
392 |
+
time.sleep(600)
|
393 |
+
|
394 |
+
#keepliving_thread = threading.Thread(target=session_saver)
|
395 |
+
net_thread = threading.Thread(target=zrok)
|
396 |
+
|
397 |
+
net_thread.start()
|
398 |
+
#keepliving_thread.start()
|
399 |
+
|
400 |
+
# 要检测的网站
|
401 |
+
#website_url = "https://surely-definite-monarch.ngrok-free.app"
|
402 |
+
|
403 |
+
# 间隔时间(秒)
|
404 |
+
#interval = 1800
|
405 |
+
|
406 |
+
# 第一次立即运行一次
|
407 |
+
#run_check_periodically(website_url, interval)
|
408 |
+
|
409 |
+
monitor_gpu()
|
410 |
+
#keepliving_thread.join()
|