Create download.py
Browse files- download.py +76 -0
download.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import requests
|
3 |
+
from tqdm import tqdm
|
4 |
+
import time
|
5 |
+
|
6 |
+
def download_file(url, directory, chunk_size=1024, max_retries=5, retry_delay=5):
|
7 |
+
if not os.path.exists(directory):
|
8 |
+
os.makedirs(directory)
|
9 |
+
|
10 |
+
local_filename = os.path.join(directory, url.split('/')[-1].split('?')[0])
|
11 |
+
|
12 |
+
headers = {}
|
13 |
+
mode = 'wb'
|
14 |
+
initial_pos = 0
|
15 |
+
|
16 |
+
if os.path.exists(local_filename):
|
17 |
+
initial_pos = os.path.getsize(local_filename)
|
18 |
+
headers['Range'] = f'bytes={initial_pos}-'
|
19 |
+
mode = 'ab'
|
20 |
+
|
21 |
+
for attempt in range(max_retries):
|
22 |
+
try:
|
23 |
+
with requests.get(url, headers=headers, stream=True) as r:
|
24 |
+
r.raise_for_status()
|
25 |
+
total_size = int(r.headers.get('content-length', 0)) + initial_pos
|
26 |
+
|
27 |
+
with open(local_filename, mode) as f:
|
28 |
+
with tqdm(
|
29 |
+
desc=f"Попытка {attempt+1}/{max_retries}: {local_filename}",
|
30 |
+
total=total_size,
|
31 |
+
unit='iB',
|
32 |
+
unit_scale=True,
|
33 |
+
unit_divisor=1024,
|
34 |
+
initial=initial_pos
|
35 |
+
) as progress_bar:
|
36 |
+
for chunk in r.iter_content(chunk_size=chunk_size):
|
37 |
+
size = f.write(chunk)
|
38 |
+
progress_bar.update(size)
|
39 |
+
initial_pos += size
|
40 |
+
|
41 |
+
return local_filename
|
42 |
+
|
43 |
+
except (requests.exceptions.RequestException, IOError) as e:
|
44 |
+
print(f"Ошибка при скачивании {url}: {str(e)}")
|
45 |
+
print(f"Повторная попытка через {retry_delay} секунд...")
|
46 |
+
time.sleep(retry_delay)
|
47 |
+
|
48 |
+
if os.path.exists(local_filename):
|
49 |
+
initial_pos = os.path.getsize(local_filename)
|
50 |
+
headers['Range'] = f'bytes={initial_pos}-'
|
51 |
+
mode = 'ab'
|
52 |
+
|
53 |
+
raise Exception(f"Не удалось скачать файл после {max_retries} попыток")
|
54 |
+
|
55 |
+
# Список URL-адресов и соответствующих директорий
|
56 |
+
files_to_download = [
|
57 |
+
("https://huggingface.co/daswer123/test/resolve/main/models/ControlNet/diffusion_pytorch_model.safetensors?download=true", "models/ControlNet"),
|
58 |
+
("https://huggingface.co/daswer123/test/resolve/main/models/ControlNet/ip-adapter.bin?download=true", "models/ControlNet"),
|
59 |
+
("https://huggingface.co/daswer123/test/resolve/main/models/Stable-diffusion/moxieDiffusionXL_v16.safetensors?download=true", "models/Stable-diffusion"),
|
60 |
+
("https://huggingface.co/daswer123/test/resolve/main/models/VAE/sdxl_vae.safetensors?download=true", "models/VAE"),
|
61 |
+
("https://huggingface.co/daswer123/test/resolve/main/models/ESRGAN/4x_NMKD-Siax_2000k.pth?download=true", "models/ESRGAN")
|
62 |
+
]
|
63 |
+
|
64 |
+
# Получаем текущую рабочую директорию
|
65 |
+
current_dir = os.getcwd()
|
66 |
+
|
67 |
+
# Скачиваем каждый файл
|
68 |
+
for url, directory in files_to_download:
|
69 |
+
full_directory = os.path.join(current_dir, directory)
|
70 |
+
try:
|
71 |
+
downloaded_file = download_file(url, full_directory)
|
72 |
+
print(f"Успешно скачан файл: {downloaded_file}")
|
73 |
+
except Exception as e:
|
74 |
+
print(f"Не удалось скачать {url}: {str(e)}")
|
75 |
+
|
76 |
+
print("Все файлы обработаны!")
|