Add files using upload-large-folder tool
Browse files- README.md +2 -0
- loss_params.pth +3 -0
- pyproject.toml +40 -0
- requirements.txt +2 -0
- src/edge_maxxing_4090_newdream.egg-info/PKG-INFO +24 -0
- src/edge_maxxing_4090_newdream.egg-info/SOURCES.txt +11 -0
- src/edge_maxxing_4090_newdream.egg-info/dependency_links.txt +1 -0
- src/edge_maxxing_4090_newdream.egg-info/entry_points.txt +2 -0
- src/edge_maxxing_4090_newdream.egg-info/requires.txt +19 -0
- src/edge_maxxing_4090_newdream.egg-info/top_level.txt +3 -0
- src/loss.py +45 -0
- src/main.py +59 -0
- src/pipeline.py +962 -0
- src/scheduler_config.json +3 -0
- uv.lock +935 -0
README.md
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
Heban olla vogola
|
2 |
+
|
loss_params.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bf0b76d04764575883fcd146c7ae8e0edb9e049ad55db61640e37deffa652fdb
|
3 |
+
size 3120
|
pyproject.toml
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[build-system]
|
2 |
+
requires = ["setuptools >= 75.0"]
|
3 |
+
build-backend = "setuptools.build_meta"
|
4 |
+
|
5 |
+
[project]
|
6 |
+
name = "edge-maxxing-4090-newdream"
|
7 |
+
description = "An edge-maxxing model submission for the 4090 newdream contest"
|
8 |
+
requires-python = ">=3.10,<3.11"
|
9 |
+
version = "7"
|
10 |
+
dependencies = [
|
11 |
+
"diffusers==0.28.2",
|
12 |
+
"onediff==1.2.0",
|
13 |
+
"onediffx==1.2.0",
|
14 |
+
"accelerate==0.31.0",
|
15 |
+
"numpy==1.26.4",
|
16 |
+
"xformers==0.0.25.post1",
|
17 |
+
"triton==2.2.0",
|
18 |
+
"transformers==4.41.2",
|
19 |
+
"accelerate==0.31.0",
|
20 |
+
"omegaconf==2.3.0",
|
21 |
+
"torch==2.2.2",
|
22 |
+
"torchvision==0.17.2",
|
23 |
+
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@e713a4f52ca3ea8c1d57ff63c1c08470f4fd0a60#subdirectory=pipelines",
|
24 |
+
"huggingface-hub==0.25.2",
|
25 |
+
"oneflow",
|
26 |
+
"setuptools>=75.2.0",
|
27 |
+
]
|
28 |
+
|
29 |
+
[tool.edge-maxxing]
|
30 |
+
models = [
|
31 |
+
"stablediffusionapi/newdream-sdxl-20",
|
32 |
+
"RobertML/cached-pipe-03"
|
33 |
+
]
|
34 |
+
|
35 |
+
[tool.uv.sources]
|
36 |
+
oneflow = { url = "https://github.com/siliconflow/oneflow_releases/releases/download/community_cu118/oneflow-0.9.1.dev20240802%2Bcu118-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }
|
37 |
+
|
38 |
+
[project.scripts]
|
39 |
+
start_inference = "main:main"
|
40 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
# Specify any extra options here, like --find-links, --pre, etc. Avoid specifying dependencies here and specify them in pyproject.toml instead
|
2 |
+
https://github.com/siliconflow/oneflow_releases/releases/download/community_cu118/oneflow-0.9.1.dev20240802%2Bcu118-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
src/edge_maxxing_4090_newdream.egg-info/PKG-INFO
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: edge-maxxing-4090-newdream
|
3 |
+
Version: 7
|
4 |
+
Summary: An edge-maxxing model submission for the 4090 newdream contest
|
5 |
+
Requires-Python: <3.11,>=3.10
|
6 |
+
Requires-Dist: diffusers==0.28.2
|
7 |
+
Requires-Dist: onediff==1.2.0
|
8 |
+
Requires-Dist: onediffx==1.2.0
|
9 |
+
Requires-Dist: accelerate==0.31.0
|
10 |
+
Requires-Dist: numpy==1.26.4
|
11 |
+
Requires-Dist: xformers==0.0.25.post1
|
12 |
+
Requires-Dist: triton==2.2.0
|
13 |
+
Requires-Dist: transformers==4.41.2
|
14 |
+
Requires-Dist: accelerate==0.31.0
|
15 |
+
Requires-Dist: omegaconf==2.3.0
|
16 |
+
Requires-Dist: torch==2.2.2
|
17 |
+
Requires-Dist: torchvision==0.17.2
|
18 |
+
Requires-Dist: edge-maxxing-pipelines@ git+https://github.com/womboai/edge-maxxing@8d8ff45863416484b5b4bc547782591bbdfc696a#subdirectory=pipelines
|
19 |
+
Requires-Dist: huggingface-hub==0.25.2
|
20 |
+
Requires-Dist: oneflow
|
21 |
+
Requires-Dist: setuptools>=75.2.0
|
22 |
+
Requires-Dist: bitsandbytes>=0.44.1
|
23 |
+
Requires-Dist: stable-fast
|
24 |
+
Requires-Dist: tomesd>=0.1.3
|
src/edge_maxxing_4090_newdream.egg-info/SOURCES.txt
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
README.md
|
2 |
+
pyproject.toml
|
3 |
+
src/loss.py
|
4 |
+
src/main.py
|
5 |
+
src/pipeline.py
|
6 |
+
src/edge_maxxing_4090_newdream.egg-info/PKG-INFO
|
7 |
+
src/edge_maxxing_4090_newdream.egg-info/SOURCES.txt
|
8 |
+
src/edge_maxxing_4090_newdream.egg-info/dependency_links.txt
|
9 |
+
src/edge_maxxing_4090_newdream.egg-info/entry_points.txt
|
10 |
+
src/edge_maxxing_4090_newdream.egg-info/requires.txt
|
11 |
+
src/edge_maxxing_4090_newdream.egg-info/top_level.txt
|
src/edge_maxxing_4090_newdream.egg-info/dependency_links.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
|
src/edge_maxxing_4090_newdream.egg-info/entry_points.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[console_scripts]
|
2 |
+
start_inference = main:main
|
src/edge_maxxing_4090_newdream.egg-info/requires.txt
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
diffusers==0.28.2
|
2 |
+
onediff==1.2.0
|
3 |
+
onediffx==1.2.0
|
4 |
+
accelerate==0.31.0
|
5 |
+
numpy==1.26.4
|
6 |
+
xformers==0.0.25.post1
|
7 |
+
triton==2.2.0
|
8 |
+
transformers==4.41.2
|
9 |
+
accelerate==0.31.0
|
10 |
+
omegaconf==2.3.0
|
11 |
+
torch==2.2.2
|
12 |
+
torchvision==0.17.2
|
13 |
+
edge-maxxing-pipelines@ git+https://github.com/womboai/edge-maxxing@8d8ff45863416484b5b4bc547782591bbdfc696a#subdirectory=pipelines
|
14 |
+
huggingface-hub==0.25.2
|
15 |
+
oneflow
|
16 |
+
setuptools>=75.2.0
|
17 |
+
bitsandbytes>=0.44.1
|
18 |
+
stable-fast
|
19 |
+
tomesd>=0.1.3
|
src/edge_maxxing_4090_newdream.egg-info/top_level.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
loss
|
2 |
+
main
|
3 |
+
pipeline
|
src/loss.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
_A=None
|
2 |
+
import torch
|
3 |
+
from tqdm import tqdm
|
4 |
+
class LossSchedulerModel(torch.nn.Module):
|
5 |
+
def __init__(A,wx,we):super(LossSchedulerModel,A).__init__();assert len(wx.shape)==1 and len(we.shape)==2;B=wx.shape[0];assert B==we.shape[0]and B==we.shape[1];A.register_parameter('wx',torch.nn.Parameter(wx));A.register_parameter('we',torch.nn.Parameter(we))
|
6 |
+
def forward(A,t,xT,e_prev):
|
7 |
+
B=e_prev;assert t-len(B)+1==0;C=xT*A.wx[t]
|
8 |
+
for(D,E)in zip(B,A.we[t]):C+=D*E
|
9 |
+
return C.to(xT.dtype)
|
10 |
+
class LossScheduler:
|
11 |
+
def __init__(A,timesteps,model):A.timesteps=timesteps;A.model=model;A.init_noise_sigma=1.;A.order=1
|
12 |
+
@staticmethod
|
13 |
+
def load(path):A,B,C=torch.load(path,map_location='cpu');D=LossSchedulerModel(B,C);return LossScheduler(A,D)
|
14 |
+
def save(A,path):B,C,D=A.timesteps,A.model.wx,A.model.we;torch.save((B,C,D),path)
|
15 |
+
def set_timesteps(A,num_inference_steps,device='cuda'):B=device;A.xT=_A;A.e_prev=[];A.t_prev=-1;A.model=A.model.to(B);A.timesteps=A.timesteps.to(B)
|
16 |
+
def scale_model_input(A,sample,*B,**C):return sample
|
17 |
+
@torch.no_grad()
|
18 |
+
def step(self,model_output,timestep,sample,*D,**E):
|
19 |
+
A=self;B=A.timesteps.tolist().index(timestep);assert A.t_prev==-1 or B==A.t_prev+1
|
20 |
+
if A.t_prev==-1:A.xT=sample
|
21 |
+
A.e_prev.append(model_output);C=A.model(B,A.xT,A.e_prev)
|
22 |
+
if B+1==len(A.timesteps):A.xT=_A;A.e_prev=[];A.t_prev=-1
|
23 |
+
else:A.t_prev=B
|
24 |
+
return C,
|
25 |
+
class SchedulerWrapper:
|
26 |
+
def __init__(A,scheduler,loss_params_path='loss_params.pth'):A.scheduler=scheduler;A.catch_x,A.catch_e,A.catch_x_={},{},{};A.loss_scheduler=_A;A.loss_params_path=loss_params_path
|
27 |
+
def set_timesteps(A,num_inference_steps,**C):
|
28 |
+
D=num_inference_steps
|
29 |
+
if A.loss_scheduler is _A:B=A.scheduler.set_timesteps(D,**C);A.timesteps=A.scheduler.timesteps;A.init_noise_sigma=A.scheduler.init_noise_sigma;A.order=A.scheduler.order;return B
|
30 |
+
else:B=A.loss_scheduler.set_timesteps(D,**C);A.timesteps=A.loss_scheduler.timesteps;A.init_noise_sigma=A.scheduler.init_noise_sigma;A.order=A.scheduler.order;return B
|
31 |
+
def step(B,model_output,timestep,sample,**F):
|
32 |
+
D=sample;E=model_output;A=timestep
|
33 |
+
if B.loss_scheduler is _A:
|
34 |
+
C=B.scheduler.step(E,A,D,**F);A=A.tolist()
|
35 |
+
if A not in B.catch_x:B.catch_x[A]=[];B.catch_e[A]=[];B.catch_x_[A]=[]
|
36 |
+
B.catch_x[A].append(D.clone().detach().cpu());B.catch_e[A].append(E.clone().detach().cpu());B.catch_x_[A].append(C[0].clone().detach().cpu());return C
|
37 |
+
else:C=B.loss_scheduler.step(E,A,D,**F);return C
|
38 |
+
def scale_model_input(A,sample,timestep):return sample
|
39 |
+
def add_noise(A,original_samples,noise,timesteps):B=A.scheduler.add_noise(original_samples,noise,timesteps);return B
|
40 |
+
def get_path(C):
|
41 |
+
A=sorted([A for A in C.catch_x],reverse=True);B,D=[],[]
|
42 |
+
for E in A:F=torch.cat(C.catch_x[E],dim=0);B.append(F);G=torch.cat(C.catch_e[E],dim=0);D.append(G)
|
43 |
+
H=A[-1];I=torch.cat(C.catch_x_[H],dim=0);B.append(I);A=torch.tensor(A,dtype=torch.int32);B=torch.stack(B);D=torch.stack(D);return A,B,D
|
44 |
+
def load_loss_params(A):B,C,D=torch.load(A.loss_params_path,map_location='cpu');A.loss_model=LossSchedulerModel(C,D);A.loss_scheduler=LossScheduler(B,A.loss_model)
|
45 |
+
def prepare_loss(A,num_accelerate_steps=15):A.load_loss_params()
|
src/main.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import atexit
|
2 |
+
from io import BytesIO
|
3 |
+
from multiprocessing.connection import Listener
|
4 |
+
from os import chmod, remove
|
5 |
+
from os.path import abspath, exists
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
import torch
|
9 |
+
|
10 |
+
from PIL.JpegImagePlugin import JpegImageFile
|
11 |
+
from pipelines.models import TextToImageRequest
|
12 |
+
|
13 |
+
from pipeline import load_pipeline, infer
|
14 |
+
|
15 |
+
SOCKET = abspath(Path(__file__).parent.parent / "inferences.sock")
|
16 |
+
|
17 |
+
|
18 |
+
def at_exit():
|
19 |
+
torch.cuda.empty_cache()
|
20 |
+
|
21 |
+
|
22 |
+
def main():
|
23 |
+
atexit.register(at_exit)
|
24 |
+
|
25 |
+
print(f"Loading pipeline")
|
26 |
+
pipeline = load_pipeline()
|
27 |
+
|
28 |
+
print(f"Pipeline loaded, creating socket at '{SOCKET}'")
|
29 |
+
|
30 |
+
if exists(SOCKET):
|
31 |
+
remove(SOCKET)
|
32 |
+
|
33 |
+
with Listener(SOCKET) as listener:
|
34 |
+
chmod(SOCKET, 0o777)
|
35 |
+
|
36 |
+
print(f"Awaiting connections")
|
37 |
+
with listener.accept() as connection:
|
38 |
+
print(f"Connected")
|
39 |
+
|
40 |
+
while True:
|
41 |
+
try:
|
42 |
+
request = TextToImageRequest.model_validate_json(connection.recv_bytes().decode("utf-8"))
|
43 |
+
except EOFError:
|
44 |
+
print(f"Inference socket exiting")
|
45 |
+
|
46 |
+
return
|
47 |
+
|
48 |
+
image = infer(request, pipeline)
|
49 |
+
|
50 |
+
data = BytesIO()
|
51 |
+
image.save(data, format=JpegImageFile.format)
|
52 |
+
|
53 |
+
packet = data.getvalue()
|
54 |
+
|
55 |
+
connection.send_bytes(packet)
|
56 |
+
|
57 |
+
|
58 |
+
if __name__ == '__main__':
|
59 |
+
main()
|
src/pipeline.py
ADDED
@@ -0,0 +1,962 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from PIL import Image
|
3 |
+
from pipelines.models import TextToImageRequest
|
4 |
+
from torch import Generator
|
5 |
+
import json
|
6 |
+
from diffusers import StableDiffusionXLPipeline, DDIMScheduler
|
7 |
+
import inspect
|
8 |
+
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
9 |
+
from loss import SchedulerWrapper
|
10 |
+
from onediffx import compile_pipe,load_pipe
|
11 |
+
# Import necessary components
|
12 |
+
from transformers import (
|
13 |
+
CLIPImageProcessor,
|
14 |
+
CLIPTextModel,
|
15 |
+
CLIPTextModelWithProjection,
|
16 |
+
CLIPTokenizer,
|
17 |
+
CLIPVisionModelWithProjection,
|
18 |
+
)
|
19 |
+
|
20 |
+
from diffusers.callbacks import MultiPipelineCallbacks, PipelineCallback
|
21 |
+
from diffusers.image_processor import PipelineImageInput, VaeImageProcessor
|
22 |
+
from diffusers.loaders import (
|
23 |
+
FromSingleFileMixin,
|
24 |
+
IPAdapterMixin,
|
25 |
+
StableDiffusionXLLoraLoaderMixin,
|
26 |
+
TextualInversionLoaderMixin,
|
27 |
+
)
|
28 |
+
from diffusers.models import AutoencoderKL, ImageProjection, UNet2DConditionModel
|
29 |
+
from diffusers.models.attention_processor import (
|
30 |
+
AttnProcessor2_0,
|
31 |
+
FusedAttnProcessor2_0,
|
32 |
+
XFormersAttnProcessor,
|
33 |
+
)
|
34 |
+
from diffusers.models.lora import adjust_lora_scale_text_encoder
|
35 |
+
from diffusers.schedulers import KarrasDiffusionSchedulers
|
36 |
+
from diffusers.utils import (
|
37 |
+
USE_PEFT_BACKEND,
|
38 |
+
deprecate,
|
39 |
+
is_invisible_watermark_available,
|
40 |
+
is_torch_xla_available,
|
41 |
+
logging,
|
42 |
+
replace_example_docstring,
|
43 |
+
scale_lora_layers,
|
44 |
+
unscale_lora_layers,
|
45 |
+
)
|
46 |
+
from diffusers.utils.torch_utils import randn_tensor
|
47 |
+
from diffusers.pipelines.pipeline_utils import DiffusionPipeline, StableDiffusionMixin
|
48 |
+
from diffusers.pipelines.stable_diffusion_xl.pipeline_output import StableDiffusionXLPipelineOutput
|
49 |
+
|
50 |
+
# Import watermark if available
|
51 |
+
if is_invisible_watermark_available():
|
52 |
+
from .watermark import StableDiffusionXLWatermarker
|
53 |
+
|
54 |
+
# Check for XLA availability
|
55 |
+
if is_torch_xla_available():
|
56 |
+
import torch_xla.core.xla_model as xm
|
57 |
+
XLA_AVAILABLE = True
|
58 |
+
else:
|
59 |
+
XLA_AVAILABLE = False
|
60 |
+
|
61 |
+
logger = logging.get_logger(__name__)
|
62 |
+
|
63 |
+
# Constants
|
64 |
+
EXAMPLE_DOC_STRING = """
|
65 |
+
Examples:
|
66 |
+
```py
|
67 |
+
>>> import torch
|
68 |
+
>>> from diffusers import StableDiffusionXLPipeline
|
69 |
+
|
70 |
+
>>> pipe = StableDiffusionXLPipeline.from_pretrained(
|
71 |
+
>>> "stabilityai/stable-diffusion-xl-base-1.0",
|
72 |
+
>>> torch_dtype=torch.float16
|
73 |
+
>>> )
|
74 |
+
>>> pipe = pipe.to("cuda")
|
75 |
+
|
76 |
+
>>> prompt = "a photo of an astronaut riding a horse on mars"
|
77 |
+
>>> image = pipe(prompt).images[0]
|
78 |
+
```
|
79 |
+
"""
|
80 |
+
|
81 |
+
# Helper functions
|
82 |
+
def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0):
|
83 |
+
"""Rescale noise configuration."""
|
84 |
+
std_text = noise_pred_text.std(dim=list(range(1, noise_pred_text.ndim)), keepdim=True)
|
85 |
+
std_cfg = noise_cfg.std(dim=list(range(1, noise_cfg.ndim)), keepdim=True)
|
86 |
+
noise_pred_rescaled = noise_cfg * (std_text / std_cfg)
|
87 |
+
noise_cfg = guidance_rescale * noise_pred_rescaled + (1 - guidance_rescale) * noise_cfg
|
88 |
+
return noise_cfg
|
89 |
+
|
90 |
+
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.retrieve_timesteps
|
91 |
+
def retrieve_timesteps(
|
92 |
+
scheduler,
|
93 |
+
num_inference_steps: Optional[int] = None,
|
94 |
+
device: Optional[Union[str, torch.device]] = None,
|
95 |
+
timesteps: Optional[List[int]] = None,
|
96 |
+
sigmas: Optional[List[float]] = None,
|
97 |
+
**kwargs,
|
98 |
+
):
|
99 |
+
if timesteps is not None and sigmas is not None:
|
100 |
+
raise ValueError("Only one of `timesteps` or `sigmas` can be passed. Please choose one to set custom values")
|
101 |
+
if timesteps is not None:
|
102 |
+
accepts_timesteps = "timesteps" in set(inspect.signature(scheduler.set_timesteps).parameters.keys())
|
103 |
+
if not accepts_timesteps:
|
104 |
+
raise ValueError(
|
105 |
+
f"The current scheduler class {scheduler.__class__}'s `set_timesteps` does not support custom"
|
106 |
+
f" timestep schedules. Please check whether you are using the correct scheduler."
|
107 |
+
)
|
108 |
+
scheduler.set_timesteps(timesteps=timesteps, device=device, **kwargs)
|
109 |
+
timesteps = scheduler.timesteps
|
110 |
+
num_inference_steps = len(timesteps)
|
111 |
+
elif sigmas is not None:
|
112 |
+
accept_sigmas = "sigmas" in set(inspect.signature(scheduler.set_timesteps).parameters.keys())
|
113 |
+
if not accept_sigmas:
|
114 |
+
raise ValueError(
|
115 |
+
f"The current scheduler class {scheduler.__class__}'s `set_timesteps` does not support custom"
|
116 |
+
f" sigmas schedules. Please check whether you are using the correct scheduler."
|
117 |
+
)
|
118 |
+
scheduler.set_timesteps(sigmas=sigmas, device=device, **kwargs)
|
119 |
+
timesteps = scheduler.timesteps
|
120 |
+
num_inference_steps = len(timesteps)
|
121 |
+
else:
|
122 |
+
scheduler.set_timesteps(num_inference_steps, device=device, **kwargs)
|
123 |
+
timesteps = scheduler.timesteps
|
124 |
+
return timesteps, num_inference_steps
|
125 |
+
|
126 |
+
|
127 |
+
class StableDiffusionXLPipeline_new(
|
128 |
+
DiffusionPipeline,
|
129 |
+
StableDiffusionMixin,
|
130 |
+
FromSingleFileMixin,
|
131 |
+
StableDiffusionXLLoraLoaderMixin,
|
132 |
+
TextualInversionLoaderMixin,
|
133 |
+
IPAdapterMixin,
|
134 |
+
):
|
135 |
+
|
136 |
+
model_cpu_offload_seq = "text_encoder->text_encoder_2->image_encoder->unet->vae"
|
137 |
+
_optional_components = [
|
138 |
+
"tokenizer",
|
139 |
+
"tokenizer_2",
|
140 |
+
"text_encoder",
|
141 |
+
"text_encoder_2",
|
142 |
+
"image_encoder",
|
143 |
+
"feature_extractor",
|
144 |
+
]
|
145 |
+
_callback_tensor_inputs = [
|
146 |
+
"latents",
|
147 |
+
"prompt_embeds",
|
148 |
+
"negative_prompt_embeds",
|
149 |
+
"add_text_embeds",
|
150 |
+
"add_time_ids",
|
151 |
+
"negative_pooled_prompt_embeds",
|
152 |
+
"negative_add_time_ids",
|
153 |
+
]
|
154 |
+
|
155 |
+
def __init__(
|
156 |
+
self,
|
157 |
+
vae: AutoencoderKL,
|
158 |
+
text_encoder: CLIPTextModel,
|
159 |
+
text_encoder_2: CLIPTextModelWithProjection,
|
160 |
+
tokenizer: CLIPTokenizer,
|
161 |
+
tokenizer_2: CLIPTokenizer,
|
162 |
+
unet: UNet2DConditionModel,
|
163 |
+
scheduler: KarrasDiffusionSchedulers,
|
164 |
+
image_encoder: CLIPVisionModelWithProjection = None,
|
165 |
+
feature_extractor: CLIPImageProcessor = None,
|
166 |
+
force_zeros_for_empty_prompt: bool = True,
|
167 |
+
add_watermarker: Optional[bool] = None,
|
168 |
+
):
|
169 |
+
super().__init__()
|
170 |
+
|
171 |
+
self.register_modules(
|
172 |
+
vae=vae,
|
173 |
+
text_encoder=text_encoder,
|
174 |
+
text_encoder_2=text_encoder_2,
|
175 |
+
tokenizer=tokenizer,
|
176 |
+
tokenizer_2=tokenizer_2,
|
177 |
+
unet=unet,
|
178 |
+
scheduler=scheduler,
|
179 |
+
image_encoder=image_encoder,
|
180 |
+
feature_extractor=feature_extractor,
|
181 |
+
)
|
182 |
+
self.register_to_config(force_zeros_for_empty_prompt=force_zeros_for_empty_prompt)
|
183 |
+
self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)
|
184 |
+
self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)
|
185 |
+
|
186 |
+
self.default_sample_size = self.unet.config.sample_size
|
187 |
+
|
188 |
+
add_watermarker = add_watermarker if add_watermarker is not None else is_invisible_watermark_available()
|
189 |
+
|
190 |
+
if add_watermarker:
|
191 |
+
self.watermark = StableDiffusionXLWatermarker()
|
192 |
+
else:
|
193 |
+
self.watermark = None
|
194 |
+
|
195 |
+
def encode_prompt(
|
196 |
+
self,
|
197 |
+
prompt: str,
|
198 |
+
prompt_2: Optional[str] = None,
|
199 |
+
device: Optional[torch.device] = None,
|
200 |
+
num_images_per_prompt: int = 1,
|
201 |
+
do_classifier_free_guidance: bool = True,
|
202 |
+
negative_prompt: Optional[str] = None,
|
203 |
+
negative_prompt_2: Optional[str] = None,
|
204 |
+
prompt_embeds: Optional[torch.Tensor] = None,
|
205 |
+
negative_prompt_embeds: Optional[torch.Tensor] = None,
|
206 |
+
pooled_prompt_embeds: Optional[torch.Tensor] = None,
|
207 |
+
negative_pooled_prompt_embeds: Optional[torch.Tensor] = None,
|
208 |
+
lora_scale: Optional[float] = None,
|
209 |
+
clip_skip: Optional[int] = None,
|
210 |
+
):
|
211 |
+
device = device or self._execution_device
|
212 |
+
|
213 |
+
# set lora scale so that monkey patched LoRA
|
214 |
+
# function of text encoder can correctly access it
|
215 |
+
if lora_scale is not None and isinstance(self, StableDiffusionXLLoraLoaderMixin):
|
216 |
+
self._lora_scale = lora_scale
|
217 |
+
|
218 |
+
# dynamically adjust the LoRA scale
|
219 |
+
if self.text_encoder is not None:
|
220 |
+
if not USE_PEFT_BACKEND:
|
221 |
+
adjust_lora_scale_text_encoder(self.text_encoder, lora_scale)
|
222 |
+
else:
|
223 |
+
scale_lora_layers(self.text_encoder, lora_scale)
|
224 |
+
|
225 |
+
if self.text_encoder_2 is not None:
|
226 |
+
if not USE_PEFT_BACKEND:
|
227 |
+
adjust_lora_scale_text_encoder(self.text_encoder_2, lora_scale)
|
228 |
+
else:
|
229 |
+
scale_lora_layers(self.text_encoder_2, lora_scale)
|
230 |
+
|
231 |
+
prompt = [prompt] if isinstance(prompt, str) else prompt
|
232 |
+
|
233 |
+
if prompt is not None:
|
234 |
+
batch_size = len(prompt)
|
235 |
+
else:
|
236 |
+
batch_size = prompt_embeds.shape[0]
|
237 |
+
|
238 |
+
# Define tokenizers and text encoders
|
239 |
+
tokenizers = [self.tokenizer, self.tokenizer_2] if self.tokenizer is not None else [self.tokenizer_2]
|
240 |
+
text_encoders = (
|
241 |
+
[self.text_encoder, self.text_encoder_2] if self.text_encoder is not None else [self.text_encoder_2]
|
242 |
+
)
|
243 |
+
|
244 |
+
if prompt_embeds is None:
|
245 |
+
prompt_2 = prompt_2 or prompt
|
246 |
+
prompt_2 = [prompt_2] if isinstance(prompt_2, str) else prompt_2
|
247 |
+
|
248 |
+
# textual inversion: process multi-vector tokens if necessary
|
249 |
+
prompt_embeds_list = []
|
250 |
+
prompts = [prompt, prompt_2]
|
251 |
+
for prompt, tokenizer, text_encoder in zip(prompts, tokenizers, text_encoders):
|
252 |
+
if isinstance(self, TextualInversionLoaderMixin):
|
253 |
+
prompt = self.maybe_convert_prompt(prompt, tokenizer)
|
254 |
+
|
255 |
+
text_inputs = tokenizer(
|
256 |
+
prompt,
|
257 |
+
padding="max_length",
|
258 |
+
max_length=tokenizer.model_max_length,
|
259 |
+
truncation=True,
|
260 |
+
return_tensors="pt",
|
261 |
+
)
|
262 |
+
|
263 |
+
text_input_ids = text_inputs.input_ids
|
264 |
+
untruncated_ids = tokenizer(prompt, padding="longest", return_tensors="pt").input_ids
|
265 |
+
|
266 |
+
if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(
|
267 |
+
text_input_ids, untruncated_ids
|
268 |
+
):
|
269 |
+
removed_text = tokenizer.batch_decode(untruncated_ids[:, tokenizer.model_max_length - 1 : -1])
|
270 |
+
logger.warning(
|
271 |
+
"The following part of your input was truncated because CLIP can only handle sequences up to"
|
272 |
+
f" {tokenizer.model_max_length} tokens: {removed_text}"
|
273 |
+
)
|
274 |
+
|
275 |
+
prompt_embeds = text_encoder(text_input_ids.to(device), output_hidden_states=True)
|
276 |
+
|
277 |
+
# We are only ALWAYS interested in the pooled output of the final text encoder
|
278 |
+
pooled_prompt_embeds = prompt_embeds[0]
|
279 |
+
if clip_skip is None:
|
280 |
+
prompt_embeds = prompt_embeds.hidden_states[-2]
|
281 |
+
else:
|
282 |
+
# "2" because SDXL always indexes from the penultimate layer.
|
283 |
+
prompt_embeds = prompt_embeds.hidden_states[-(clip_skip + 2)]
|
284 |
+
|
285 |
+
prompt_embeds_list.append(prompt_embeds)
|
286 |
+
|
287 |
+
prompt_embeds = torch.concat(prompt_embeds_list, dim=-1)
|
288 |
+
|
289 |
+
# get unconditional embeddings for classifier free guidance
|
290 |
+
zero_out_negative_prompt = negative_prompt is None and self.config.force_zeros_for_empty_prompt
|
291 |
+
if do_classifier_free_guidance and negative_prompt_embeds is None and zero_out_negative_prompt:
|
292 |
+
negative_prompt_embeds = torch.zeros_like(prompt_embeds)
|
293 |
+
negative_pooled_prompt_embeds = torch.zeros_like(pooled_prompt_embeds)
|
294 |
+
elif do_classifier_free_guidance and negative_prompt_embeds is None:
|
295 |
+
negative_prompt = negative_prompt or ""
|
296 |
+
negative_prompt_2 = negative_prompt_2 or negative_prompt
|
297 |
+
|
298 |
+
# normalize str to list
|
299 |
+
negative_prompt = batch_size * [negative_prompt] if isinstance(negative_prompt, str) else negative_prompt
|
300 |
+
negative_prompt_2 = (
|
301 |
+
batch_size * [negative_prompt_2] if isinstance(negative_prompt_2, str) else negative_prompt_2
|
302 |
+
)
|
303 |
+
|
304 |
+
uncond_tokens: List[str]
|
305 |
+
if prompt is not None and type(prompt) is not type(negative_prompt):
|
306 |
+
raise TypeError(
|
307 |
+
f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !="
|
308 |
+
f" {type(prompt)}."
|
309 |
+
)
|
310 |
+
elif batch_size != len(negative_prompt):
|
311 |
+
raise ValueError(
|
312 |
+
f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:"
|
313 |
+
f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches"
|
314 |
+
" the batch size of `prompt`."
|
315 |
+
)
|
316 |
+
else:
|
317 |
+
uncond_tokens = [negative_prompt, negative_prompt_2]
|
318 |
+
|
319 |
+
negative_prompt_embeds_list = []
|
320 |
+
for negative_prompt, tokenizer, text_encoder in zip(uncond_tokens, tokenizers, text_encoders):
|
321 |
+
if isinstance(self, TextualInversionLoaderMixin):
|
322 |
+
negative_prompt = self.maybe_convert_prompt(negative_prompt, tokenizer)
|
323 |
+
|
324 |
+
max_length = prompt_embeds.shape[1]
|
325 |
+
uncond_input = tokenizer(
|
326 |
+
negative_prompt,
|
327 |
+
padding="max_length",
|
328 |
+
max_length=max_length,
|
329 |
+
truncation=True,
|
330 |
+
return_tensors="pt",
|
331 |
+
)
|
332 |
+
|
333 |
+
negative_prompt_embeds = text_encoder(
|
334 |
+
uncond_input.input_ids.to(device),
|
335 |
+
output_hidden_states=True,
|
336 |
+
)
|
337 |
+
# We are only ALWAYS interested in the pooled output of the final text encoder
|
338 |
+
negative_pooled_prompt_embeds = negative_prompt_embeds[0]
|
339 |
+
negative_prompt_embeds = negative_prompt_embeds.hidden_states[-2]
|
340 |
+
|
341 |
+
negative_prompt_embeds_list.append(negative_prompt_embeds)
|
342 |
+
|
343 |
+
negative_prompt_embeds = torch.concat(negative_prompt_embeds_list, dim=-1)
|
344 |
+
|
345 |
+
if self.text_encoder_2 is not None:
|
346 |
+
prompt_embeds = prompt_embeds.to(dtype=self.text_encoder_2.dtype, device=device)
|
347 |
+
else:
|
348 |
+
prompt_embeds = prompt_embeds.to(dtype=self.unet.dtype, device=device)
|
349 |
+
|
350 |
+
bs_embed, seq_len, _ = prompt_embeds.shape
|
351 |
+
# duplicate text embeddings for each generation per prompt, using mps friendly method
|
352 |
+
prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)
|
353 |
+
prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)
|
354 |
+
|
355 |
+
if do_classifier_free_guidance:
|
356 |
+
# duplicate unconditional embeddings for each generation per prompt, using mps friendly method
|
357 |
+
seq_len = negative_prompt_embeds.shape[1]
|
358 |
+
|
359 |
+
if self.text_encoder_2 is not None:
|
360 |
+
negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder_2.dtype, device=device)
|
361 |
+
else:
|
362 |
+
negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.unet.dtype, device=device)
|
363 |
+
|
364 |
+
negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)
|
365 |
+
negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)
|
366 |
+
|
367 |
+
pooled_prompt_embeds = pooled_prompt_embeds.repeat(1, num_images_per_prompt).view(
|
368 |
+
bs_embed * num_images_per_prompt, -1
|
369 |
+
)
|
370 |
+
if do_classifier_free_guidance:
|
371 |
+
negative_pooled_prompt_embeds = negative_pooled_prompt_embeds.repeat(1, num_images_per_prompt).view(
|
372 |
+
bs_embed * num_images_per_prompt, -1
|
373 |
+
)
|
374 |
+
|
375 |
+
if self.text_encoder is not None:
|
376 |
+
if isinstance(self, StableDiffusionXLLoraLoaderMixin) and USE_PEFT_BACKEND:
|
377 |
+
# Retrieve the original scale by scaling back the LoRA layers
|
378 |
+
unscale_lora_layers(self.text_encoder, lora_scale)
|
379 |
+
|
380 |
+
if self.text_encoder_2 is not None:
|
381 |
+
if isinstance(self, StableDiffusionXLLoraLoaderMixin) and USE_PEFT_BACKEND:
|
382 |
+
# Retrieve the original scale by scaling back the LoRA layers
|
383 |
+
unscale_lora_layers(self.text_encoder_2, lora_scale)
|
384 |
+
|
385 |
+
return prompt_embeds, negative_prompt_embeds, pooled_prompt_embeds, negative_pooled_prompt_embeds
|
386 |
+
|
387 |
+
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.encode_image
|
388 |
+
def encode_image(self, image, device, num_images_per_prompt, output_hidden_states=None):
|
389 |
+
dtype = next(self.image_encoder.parameters()).dtype
|
390 |
+
|
391 |
+
if not isinstance(image, torch.Tensor):
|
392 |
+
image = self.feature_extractor(image, return_tensors="pt").pixel_values
|
393 |
+
|
394 |
+
image = image.to(device=device, dtype=dtype)
|
395 |
+
if output_hidden_states:
|
396 |
+
image_enc_hidden_states = self.image_encoder(image, output_hidden_states=True).hidden_states[-2]
|
397 |
+
image_enc_hidden_states = image_enc_hidden_states.repeat_interleave(num_images_per_prompt, dim=0)
|
398 |
+
uncond_image_enc_hidden_states = self.image_encoder(
|
399 |
+
torch.zeros_like(image), output_hidden_states=True
|
400 |
+
).hidden_states[-2]
|
401 |
+
uncond_image_enc_hidden_states = uncond_image_enc_hidden_states.repeat_interleave(
|
402 |
+
num_images_per_prompt, dim=0
|
403 |
+
)
|
404 |
+
return image_enc_hidden_states, uncond_image_enc_hidden_states
|
405 |
+
else:
|
406 |
+
image_embeds = self.image_encoder(image).image_embeds
|
407 |
+
image_embeds = image_embeds.repeat_interleave(num_images_per_prompt, dim=0)
|
408 |
+
uncond_image_embeds = torch.zeros_like(image_embeds)
|
409 |
+
|
410 |
+
return image_embeds, uncond_image_embeds
|
411 |
+
|
412 |
+
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_ip_adapter_image_embeds
|
413 |
+
def prepare_ip_adapter_image_embeds(
|
414 |
+
self, ip_adapter_image, ip_adapter_image_embeds, device, num_images_per_prompt, do_classifier_free_guidance
|
415 |
+
):
|
416 |
+
image_embeds = []
|
417 |
+
if do_classifier_free_guidance:
|
418 |
+
negative_image_embeds = []
|
419 |
+
if ip_adapter_image_embeds is None:
|
420 |
+
if not isinstance(ip_adapter_image, list):
|
421 |
+
ip_adapter_image = [ip_adapter_image]
|
422 |
+
|
423 |
+
if len(ip_adapter_image) != len(self.unet.encoder_hid_proj.image_projection_layers):
|
424 |
+
raise ValueError(
|
425 |
+
f"`ip_adapter_image` must have same length as the number of IP Adapters. Got {len(ip_adapter_image)} images and {len(self.unet.encoder_hid_proj.image_projection_layers)} IP Adapters."
|
426 |
+
)
|
427 |
+
|
428 |
+
for single_ip_adapter_image, image_proj_layer in zip(
|
429 |
+
ip_adapter_image, self.unet.encoder_hid_proj.image_projection_layers
|
430 |
+
):
|
431 |
+
output_hidden_state = not isinstance(image_proj_layer, ImageProjection)
|
432 |
+
single_image_embeds, single_negative_image_embeds = self.encode_image(
|
433 |
+
single_ip_adapter_image, device, 1, output_hidden_state
|
434 |
+
)
|
435 |
+
|
436 |
+
image_embeds.append(single_image_embeds[None, :])
|
437 |
+
if do_classifier_free_guidance:
|
438 |
+
negative_image_embeds.append(single_negative_image_embeds[None, :])
|
439 |
+
else:
|
440 |
+
for single_image_embeds in ip_adapter_image_embeds:
|
441 |
+
if do_classifier_free_guidance:
|
442 |
+
single_negative_image_embeds, single_image_embeds = single_image_embeds.chunk(2)
|
443 |
+
negative_image_embeds.append(single_negative_image_embeds)
|
444 |
+
image_embeds.append(single_image_embeds)
|
445 |
+
|
446 |
+
ip_adapter_image_embeds = []
|
447 |
+
for i, single_image_embeds in enumerate(image_embeds):
|
448 |
+
single_image_embeds = torch.cat([single_image_embeds] * num_images_per_prompt, dim=0)
|
449 |
+
if do_classifier_free_guidance:
|
450 |
+
single_negative_image_embeds = torch.cat([negative_image_embeds[i]] * num_images_per_prompt, dim=0)
|
451 |
+
single_image_embeds = torch.cat([single_negative_image_embeds, single_image_embeds], dim=0)
|
452 |
+
|
453 |
+
single_image_embeds = single_image_embeds.to(device=device)
|
454 |
+
ip_adapter_image_embeds.append(single_image_embeds)
|
455 |
+
|
456 |
+
return ip_adapter_image_embeds
|
457 |
+
|
458 |
+
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs
|
459 |
+
def prepare_extra_step_kwargs(self, generator, eta):
|
460 |
+
# prepare extra kwargs for the scheduler step, since not all schedulers have the same signature
|
461 |
+
# eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.
|
462 |
+
# eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502
|
463 |
+
# and should be between [0, 1]
|
464 |
+
|
465 |
+
accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys())
|
466 |
+
extra_step_kwargs = {}
|
467 |
+
if accepts_eta:
|
468 |
+
extra_step_kwargs["eta"] = eta
|
469 |
+
|
470 |
+
# check if the scheduler accepts generator
|
471 |
+
accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys())
|
472 |
+
if accepts_generator:
|
473 |
+
extra_step_kwargs["generator"] = generator
|
474 |
+
return extra_step_kwargs
|
475 |
+
|
476 |
+
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents
|
477 |
+
def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):
|
478 |
+
shape = (
|
479 |
+
batch_size,
|
480 |
+
num_channels_latents,
|
481 |
+
int(height) // self.vae_scale_factor,
|
482 |
+
int(width) // self.vae_scale_factor,
|
483 |
+
)
|
484 |
+
if isinstance(generator, list) and len(generator) != batch_size:
|
485 |
+
raise ValueError(
|
486 |
+
f"You have passed a list of generators of length {len(generator)}, but requested an effective batch"
|
487 |
+
f" size of {batch_size}. Make sure the batch size matches the length of the generators."
|
488 |
+
)
|
489 |
+
|
490 |
+
if latents is None:
|
491 |
+
latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)
|
492 |
+
else:
|
493 |
+
latents = latents.to(device)
|
494 |
+
|
495 |
+
# scale the initial noise by the standard deviation required by the scheduler
|
496 |
+
latents = latents * self.scheduler.init_noise_sigma
|
497 |
+
return latents
|
498 |
+
|
499 |
+
def _get_add_time_ids(
|
500 |
+
self, original_size, crops_coords_top_left, target_size, dtype, text_encoder_projection_dim=None
|
501 |
+
):
|
502 |
+
add_time_ids = list(original_size + crops_coords_top_left + target_size)
|
503 |
+
|
504 |
+
passed_add_embed_dim = (
|
505 |
+
self.unet.config.addition_time_embed_dim * len(add_time_ids) + text_encoder_projection_dim
|
506 |
+
)
|
507 |
+
expected_add_embed_dim = self.unet.add_embedding.linear_1.in_features
|
508 |
+
|
509 |
+
if expected_add_embed_dim != passed_add_embed_dim:
|
510 |
+
raise ValueError(
|
511 |
+
f"Model expects an added time embedding vector of length {expected_add_embed_dim}, but a vector of {passed_add_embed_dim} was created. The model has an incorrect config. Please check `unet.config.time_embedding_type` and `text_encoder_2.config.projection_dim`."
|
512 |
+
)
|
513 |
+
|
514 |
+
add_time_ids = torch.tensor([add_time_ids], dtype=dtype)
|
515 |
+
return add_time_ids
|
516 |
+
|
517 |
+
def upcast_vae(self):
|
518 |
+
dtype = self.vae.dtype
|
519 |
+
self.vae.to(dtype=torch.float32)
|
520 |
+
use_torch_2_0_or_xformers = isinstance(
|
521 |
+
self.vae.decoder.mid_block.attentions[0].processor,
|
522 |
+
(
|
523 |
+
AttnProcessor2_0,
|
524 |
+
XFormersAttnProcessor,
|
525 |
+
FusedAttnProcessor2_0,
|
526 |
+
),
|
527 |
+
)
|
528 |
+
# if xformers or torch_2_0 is used attention block does not need
|
529 |
+
# to be in float32 which can save lots of memory
|
530 |
+
if use_torch_2_0_or_xformers:
|
531 |
+
self.vae.post_quant_conv.to(dtype)
|
532 |
+
self.vae.decoder.conv_in.to(dtype)
|
533 |
+
self.vae.decoder.mid_block.to(dtype)
|
534 |
+
|
535 |
+
# Copied from diffusers.pipelines.latent_consistency_models.pipeline_latent_consistency_text2img.LatentConsistencyModelPipeline.get_guidance_scale_embedding
|
536 |
+
def get_guidance_scale_embedding(
|
537 |
+
self, w: torch.Tensor, embedding_dim: int = 512, dtype: torch.dtype = torch.float32
|
538 |
+
) -> torch.Tensor:
|
539 |
+
"""
|
540 |
+
See https://github.com/google-research/vdm/blob/dc27b98a554f65cdc654b800da5aa1846545d41b/model_vdm.py#L298
|
541 |
+
|
542 |
+
Args:
|
543 |
+
w (`torch.Tensor`):
|
544 |
+
Generate embedding vectors with a specified guidance scale to subsequently enrich timestep embeddings.
|
545 |
+
embedding_dim (`int`, *optional*, defaults to 512):
|
546 |
+
Dimension of the embeddings to generate.
|
547 |
+
dtype (`torch.dtype`, *optional*, defaults to `torch.float32`):
|
548 |
+
Data type of the generated embeddings.
|
549 |
+
|
550 |
+
Returns:
|
551 |
+
`torch.Tensor`: Embedding vectors with shape `(len(w), embedding_dim)`.
|
552 |
+
"""
|
553 |
+
assert len(w.shape) == 1
|
554 |
+
w = w * 1000.0
|
555 |
+
|
556 |
+
half_dim = embedding_dim // 2
|
557 |
+
emb = torch.log(torch.tensor(10000.0)) / (half_dim - 1)
|
558 |
+
emb = torch.exp(torch.arange(half_dim, dtype=dtype) * -emb)
|
559 |
+
emb = w.to(dtype)[:, None] * emb[None, :]
|
560 |
+
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)
|
561 |
+
if embedding_dim % 2 == 1: # zero pad
|
562 |
+
emb = torch.nn.functional.pad(emb, (0, 1))
|
563 |
+
assert emb.shape == (w.shape[0], embedding_dim)
|
564 |
+
return emb
|
565 |
+
|
566 |
+
@property
|
567 |
+
def guidance_scale(self):
|
568 |
+
return self._guidance_scale
|
569 |
+
|
570 |
+
@property
|
571 |
+
def guidance_rescale(self):
|
572 |
+
return self._guidance_rescale
|
573 |
+
|
574 |
+
@property
|
575 |
+
def clip_skip(self):
|
576 |
+
return self._clip_skip
|
577 |
+
|
578 |
+
# here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)
|
579 |
+
# of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`
|
580 |
+
# corresponds to doing no classifier free guidance.
|
581 |
+
@property
|
582 |
+
def do_classifier_free_guidance(self):
|
583 |
+
return self._guidance_scale > 1 and self.unet.config.time_cond_proj_dim is None
|
584 |
+
|
585 |
+
@property
|
586 |
+
def cross_attention_kwargs(self):
|
587 |
+
return self._cross_attention_kwargs
|
588 |
+
|
589 |
+
@property
|
590 |
+
def denoising_end(self):
|
591 |
+
return self._denoising_end
|
592 |
+
|
593 |
+
@property
|
594 |
+
def num_timesteps(self):
|
595 |
+
return self._num_timesteps
|
596 |
+
|
597 |
+
@property
|
598 |
+
def interrupt(self):
|
599 |
+
return self._interrupt
|
600 |
+
|
601 |
+
@torch.no_grad()
|
602 |
+
def __call__(
|
603 |
+
self,
|
604 |
+
prompt: Union[str, List[str]] = None,
|
605 |
+
prompt_2: Optional[Union[str, List[str]]] = None,
|
606 |
+
height: Optional[int] = None,
|
607 |
+
width: Optional[int] = None,
|
608 |
+
num_inference_steps: int = 50,
|
609 |
+
timesteps: List[int] = None,
|
610 |
+
sigmas: List[float] = None,
|
611 |
+
denoising_end: Optional[float] = None,
|
612 |
+
guidance_scale: float = 5.0,
|
613 |
+
negative_prompt: Optional[Union[str, List[str]]] = None,
|
614 |
+
negative_prompt_2: Optional[Union[str, List[str]]] = None,
|
615 |
+
num_images_per_prompt: Optional[int] = 1,
|
616 |
+
eta: float = 0.0,
|
617 |
+
generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,
|
618 |
+
latents: Optional[torch.Tensor] = None,
|
619 |
+
prompt_embeds: Optional[torch.Tensor] = None,
|
620 |
+
negative_prompt_embeds: Optional[torch.Tensor] = None,
|
621 |
+
pooled_prompt_embeds: Optional[torch.Tensor] = None,
|
622 |
+
negative_pooled_prompt_embeds: Optional[torch.Tensor] = None,
|
623 |
+
ip_adapter_image: Optional[PipelineImageInput] = None,
|
624 |
+
ip_adapter_image_embeds: Optional[List[torch.Tensor]] = None,
|
625 |
+
output_type: Optional[str] = "pil",
|
626 |
+
return_dict: bool = True,
|
627 |
+
cross_attention_kwargs: Optional[Dict[str, Any]] = None,
|
628 |
+
guidance_rescale: float = 0.0,
|
629 |
+
end_cfg: float = 0.9,
|
630 |
+
original_size: Optional[Tuple[int, int]] = None,
|
631 |
+
crops_coords_top_left: Tuple[int, int] = (0, 0),
|
632 |
+
target_size: Optional[Tuple[int, int]] = None,
|
633 |
+
negative_original_size: Optional[Tuple[int, int]] = None,
|
634 |
+
negative_crops_coords_top_left: Tuple[int, int] = (0, 0),
|
635 |
+
negative_target_size: Optional[Tuple[int, int]] = None,
|
636 |
+
clip_skip: Optional[int] = None,
|
637 |
+
callback_on_step_end: Optional[
|
638 |
+
Union[Callable[[int, int, Dict], None], PipelineCallback, MultiPipelineCallbacks]
|
639 |
+
] = None,
|
640 |
+
callback_on_step_end_tensor_inputs: List[str] = ["latents"],
|
641 |
+
**kwargs,
|
642 |
+
):
|
643 |
+
callback = kwargs.pop("callback", None)
|
644 |
+
callback_steps = kwargs.pop("callback_steps", None)
|
645 |
+
|
646 |
+
if callback is not None:
|
647 |
+
deprecate(
|
648 |
+
"callback",
|
649 |
+
"1.0.0",
|
650 |
+
"Passing `callback` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`",
|
651 |
+
)
|
652 |
+
if callback_steps is not None:
|
653 |
+
deprecate(
|
654 |
+
"callback_steps",
|
655 |
+
"1.0.0",
|
656 |
+
"Passing `callback_steps` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`",
|
657 |
+
)
|
658 |
+
|
659 |
+
if isinstance(callback_on_step_end, (PipelineCallback, MultiPipelineCallbacks)):
|
660 |
+
callback_on_step_end_tensor_inputs = callback_on_step_end.tensor_inputs
|
661 |
+
|
662 |
+
# 0. Default height and width to unet
|
663 |
+
height = height or self.default_sample_size * self.vae_scale_factor
|
664 |
+
width = width or self.default_sample_size * self.vae_scale_factor
|
665 |
+
|
666 |
+
original_size = original_size or (height, width)
|
667 |
+
target_size = target_size or (height, width)
|
668 |
+
|
669 |
+
self._guidance_scale = guidance_scale
|
670 |
+
self._guidance_rescale = guidance_rescale
|
671 |
+
self._clip_skip = clip_skip
|
672 |
+
self._cross_attention_kwargs = cross_attention_kwargs
|
673 |
+
self._denoising_end = denoising_end
|
674 |
+
self._interrupt = False
|
675 |
+
|
676 |
+
# 2. Define call parameters
|
677 |
+
if prompt is not None and isinstance(prompt, str):
|
678 |
+
batch_size = 1
|
679 |
+
elif prompt is not None and isinstance(prompt, list):
|
680 |
+
batch_size = len(prompt)
|
681 |
+
else:
|
682 |
+
batch_size = prompt_embeds.shape[0]
|
683 |
+
|
684 |
+
device = self._execution_device
|
685 |
+
|
686 |
+
# 3. Encode input prompt
|
687 |
+
lora_scale = (
|
688 |
+
self.cross_attention_kwargs.get("scale", None) if self.cross_attention_kwargs is not None else None
|
689 |
+
)
|
690 |
+
|
691 |
+
(
|
692 |
+
prompt_embeds,
|
693 |
+
negative_prompt_embeds,
|
694 |
+
pooled_prompt_embeds,
|
695 |
+
negative_pooled_prompt_embeds,
|
696 |
+
) = self.encode_prompt(
|
697 |
+
prompt=prompt,
|
698 |
+
prompt_2=prompt_2,
|
699 |
+
device=device,
|
700 |
+
num_images_per_prompt=num_images_per_prompt,
|
701 |
+
do_classifier_free_guidance=self.do_classifier_free_guidance,
|
702 |
+
negative_prompt=negative_prompt,
|
703 |
+
negative_prompt_2=negative_prompt_2,
|
704 |
+
prompt_embeds=prompt_embeds,
|
705 |
+
negative_prompt_embeds=negative_prompt_embeds,
|
706 |
+
pooled_prompt_embeds=pooled_prompt_embeds,
|
707 |
+
negative_pooled_prompt_embeds=negative_pooled_prompt_embeds,
|
708 |
+
lora_scale=lora_scale,
|
709 |
+
clip_skip=self.clip_skip,
|
710 |
+
)
|
711 |
+
|
712 |
+
# 4. Prepare timesteps
|
713 |
+
timesteps, num_inference_steps = retrieve_timesteps(
|
714 |
+
self.scheduler, num_inference_steps, device, timesteps, sigmas
|
715 |
+
)
|
716 |
+
|
717 |
+
# 5. Prepare latent variables
|
718 |
+
num_channels_latents = self.unet.config.in_channels
|
719 |
+
latents = self.prepare_latents(
|
720 |
+
batch_size * num_images_per_prompt,
|
721 |
+
num_channels_latents,
|
722 |
+
height,
|
723 |
+
width,
|
724 |
+
prompt_embeds.dtype,
|
725 |
+
device,
|
726 |
+
generator,
|
727 |
+
latents,
|
728 |
+
)
|
729 |
+
|
730 |
+
# 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline
|
731 |
+
extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)
|
732 |
+
|
733 |
+
# 7. Prepare added time ids & embeddings
|
734 |
+
add_text_embeds = pooled_prompt_embeds
|
735 |
+
if self.text_encoder_2 is None:
|
736 |
+
text_encoder_projection_dim = int(pooled_prompt_embeds.shape[-1])
|
737 |
+
else:
|
738 |
+
text_encoder_projection_dim = self.text_encoder_2.config.projection_dim
|
739 |
+
|
740 |
+
add_time_ids = self._get_add_time_ids(
|
741 |
+
original_size,
|
742 |
+
crops_coords_top_left,
|
743 |
+
target_size,
|
744 |
+
dtype=prompt_embeds.dtype,
|
745 |
+
text_encoder_projection_dim=text_encoder_projection_dim,
|
746 |
+
)
|
747 |
+
if negative_original_size is not None and negative_target_size is not None:
|
748 |
+
negative_add_time_ids = self._get_add_time_ids(
|
749 |
+
negative_original_size,
|
750 |
+
negative_crops_coords_top_left,
|
751 |
+
negative_target_size,
|
752 |
+
dtype=prompt_embeds.dtype,
|
753 |
+
text_encoder_projection_dim=text_encoder_projection_dim,
|
754 |
+
)
|
755 |
+
else:
|
756 |
+
negative_add_time_ids = add_time_ids
|
757 |
+
|
758 |
+
if self.do_classifier_free_guidance:
|
759 |
+
prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds], dim=0)
|
760 |
+
add_text_embeds = torch.cat([negative_pooled_prompt_embeds, add_text_embeds], dim=0)
|
761 |
+
add_time_ids = torch.cat([negative_add_time_ids, add_time_ids], dim=0)
|
762 |
+
|
763 |
+
prompt_embeds = prompt_embeds.to(device)
|
764 |
+
add_text_embeds = add_text_embeds.to(device)
|
765 |
+
add_time_ids = add_time_ids.to(device).repeat(batch_size * num_images_per_prompt, 1)
|
766 |
+
|
767 |
+
if ip_adapter_image is not None or ip_adapter_image_embeds is not None:
|
768 |
+
image_embeds = self.prepare_ip_adapter_image_embeds(
|
769 |
+
ip_adapter_image,
|
770 |
+
ip_adapter_image_embeds,
|
771 |
+
device,
|
772 |
+
batch_size * num_images_per_prompt,
|
773 |
+
self.do_classifier_free_guidance,
|
774 |
+
)
|
775 |
+
|
776 |
+
# 8. Denoising loop
|
777 |
+
num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0)
|
778 |
+
|
779 |
+
# 8.1 Apply denoising_end
|
780 |
+
if (
|
781 |
+
self.denoising_end is not None
|
782 |
+
and isinstance(self.denoising_end, float)
|
783 |
+
and self.denoising_end > 0
|
784 |
+
and self.denoising_end < 1
|
785 |
+
):
|
786 |
+
discrete_timestep_cutoff = int(
|
787 |
+
round(
|
788 |
+
self.scheduler.config.num_train_timesteps
|
789 |
+
- (self.denoising_end * self.scheduler.config.num_train_timesteps)
|
790 |
+
)
|
791 |
+
)
|
792 |
+
num_inference_steps = len(list(filter(lambda ts: ts >= discrete_timestep_cutoff, timesteps)))
|
793 |
+
timesteps = timesteps[:num_inference_steps]
|
794 |
+
|
795 |
+
# 9. Optionally get Guidance Scale Embedding
|
796 |
+
timestep_cond = None
|
797 |
+
if self.unet.config.time_cond_proj_dim is not None:
|
798 |
+
guidance_scale_tensor = torch.tensor(self.guidance_scale - 1).repeat(batch_size * num_images_per_prompt)
|
799 |
+
timestep_cond = self.get_guidance_scale_embedding(
|
800 |
+
guidance_scale_tensor, embedding_dim=self.unet.config.time_cond_proj_dim
|
801 |
+
).to(device=device, dtype=latents.dtype)
|
802 |
+
|
803 |
+
self._num_timesteps = len(timesteps)
|
804 |
+
with self.progress_bar(total=num_inference_steps) as progress_bar:
|
805 |
+
do_classifier_free_guidance = self.do_classifier_free_guidance
|
806 |
+
for i, t in enumerate(timesteps):
|
807 |
+
if self.interrupt:
|
808 |
+
continue
|
809 |
+
if end_cfg is not None and i / num_inference_steps > end_cfg and do_classifier_free_guidance:
|
810 |
+
do_classifier_free_guidance = False
|
811 |
+
prompt_embeds = 1.5*torch.chunk(prompt_embeds, 2, dim=0)[-1]
|
812 |
+
add_text_embeds = 1.5*torch.chunk(add_text_embeds, 2, dim=0)[-1]
|
813 |
+
add_time_ids = 1.25*torch.chunk(add_time_ids, 2, dim=0)[-1]
|
814 |
+
# expand the latents if we are doing classifier free guidance
|
815 |
+
latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents
|
816 |
+
|
817 |
+
latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)
|
818 |
+
|
819 |
+
# predict the noise residual
|
820 |
+
added_cond_kwargs = {"text_embeds": add_text_embeds, "time_ids": add_time_ids}
|
821 |
+
if ip_adapter_image is not None or ip_adapter_image_embeds is not None:
|
822 |
+
added_cond_kwargs["image_embeds"] = image_embeds
|
823 |
+
noise_pred = self.unet(
|
824 |
+
latent_model_input,
|
825 |
+
t,
|
826 |
+
encoder_hidden_states=prompt_embeds,
|
827 |
+
timestep_cond=timestep_cond,
|
828 |
+
cross_attention_kwargs=self.cross_attention_kwargs,
|
829 |
+
added_cond_kwargs=added_cond_kwargs,
|
830 |
+
return_dict=False,
|
831 |
+
)[0]
|
832 |
+
|
833 |
+
# perform guidance
|
834 |
+
if do_classifier_free_guidance:
|
835 |
+
noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)
|
836 |
+
noise_pred = noise_pred_uncond + self.guidance_scale * (noise_pred_text - noise_pred_uncond)
|
837 |
+
|
838 |
+
if do_classifier_free_guidance and self.guidance_rescale > 0.0:
|
839 |
+
# Based on 3.4. in https://arxiv.org/pdf/2305.08891.pdf
|
840 |
+
noise_pred = rescale_noise_cfg(noise_pred, noise_pred_text, guidance_rescale=self.guidance_rescale)
|
841 |
+
|
842 |
+
# compute the previous noisy sample x_t -> x_t-1
|
843 |
+
latents_dtype = latents.dtype
|
844 |
+
latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]
|
845 |
+
if latents.dtype != latents_dtype:
|
846 |
+
if torch.backends.mps.is_available():
|
847 |
+
# some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272
|
848 |
+
latents = latents.to(latents_dtype)
|
849 |
+
|
850 |
+
if callback_on_step_end is not None:
|
851 |
+
callback_kwargs = {}
|
852 |
+
for k in callback_on_step_end_tensor_inputs:
|
853 |
+
callback_kwargs[k] = locals()[k]
|
854 |
+
callback_outputs = callback_on_step_end(self, i, t, callback_kwargs)
|
855 |
+
|
856 |
+
latents = callback_outputs.pop("latents", latents)
|
857 |
+
prompt_embeds = callback_outputs.pop("prompt_embeds", prompt_embeds)
|
858 |
+
negative_prompt_embeds = callback_outputs.pop("negative_prompt_embeds", negative_prompt_embeds)
|
859 |
+
add_text_embeds = callback_outputs.pop("add_text_embeds", add_text_embeds)
|
860 |
+
negative_pooled_prompt_embeds = callback_outputs.pop(
|
861 |
+
"negative_pooled_prompt_embeds", negative_pooled_prompt_embeds
|
862 |
+
)
|
863 |
+
add_time_ids = callback_outputs.pop("add_time_ids", add_time_ids)
|
864 |
+
negative_add_time_ids = callback_outputs.pop("negative_add_time_ids", negative_add_time_ids)
|
865 |
+
|
866 |
+
# call the callback, if provided
|
867 |
+
if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):
|
868 |
+
progress_bar.update()
|
869 |
+
if callback is not None and i % callback_steps == 0:
|
870 |
+
step_idx = i // getattr(self.scheduler, "order", 1)
|
871 |
+
callback(step_idx, t, latents)
|
872 |
+
|
873 |
+
if XLA_AVAILABLE:
|
874 |
+
xm.mark_step()
|
875 |
+
|
876 |
+
if not output_type == "latent":
|
877 |
+
# make sure the VAE is in float32 mode, as it overflows in float16
|
878 |
+
needs_upcasting = self.vae.dtype == torch.float16 and self.vae.config.force_upcast
|
879 |
+
|
880 |
+
if needs_upcasting:
|
881 |
+
self.upcast_vae()
|
882 |
+
latents = latents.to(next(iter(self.vae.post_quant_conv.parameters())).dtype)
|
883 |
+
elif latents.dtype != self.vae.dtype:
|
884 |
+
if torch.backends.mps.is_available():
|
885 |
+
# some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272
|
886 |
+
self.vae = self.vae.to(latents.dtype)
|
887 |
+
|
888 |
+
# unscale/denormalize the latents
|
889 |
+
# denormalize with the mean and std if available and not None
|
890 |
+
has_latents_mean = hasattr(self.vae.config, "latents_mean") and self.vae.config.latents_mean is not None
|
891 |
+
has_latents_std = hasattr(self.vae.config, "latents_std") and self.vae.config.latents_std is not None
|
892 |
+
if has_latents_mean and has_latents_std:
|
893 |
+
latents_mean = (
|
894 |
+
torch.tensor(self.vae.config.latents_mean).view(1, 4, 1, 1).to(latents.device, latents.dtype)
|
895 |
+
)
|
896 |
+
latents_std = (
|
897 |
+
torch.tensor(self.vae.config.latents_std).view(1, 4, 1, 1).to(latents.device, latents.dtype)
|
898 |
+
)
|
899 |
+
latents = latents * latents_std / self.vae.config.scaling_factor + latents_mean
|
900 |
+
else:
|
901 |
+
latents = latents / self.vae.config.scaling_factor
|
902 |
+
|
903 |
+
image = self.vae.decode(latents, return_dict=False)[0]
|
904 |
+
|
905 |
+
# cast back to fp16 if needed
|
906 |
+
if needs_upcasting:
|
907 |
+
self.vae.to(dtype=torch.float16)
|
908 |
+
else:
|
909 |
+
image = latents
|
910 |
+
|
911 |
+
if not output_type == "latent":
|
912 |
+
# apply watermark if available
|
913 |
+
if self.watermark is not None:
|
914 |
+
image = self.watermark.apply_watermark(image)
|
915 |
+
|
916 |
+
image = self.image_processor.postprocess(image, output_type=output_type)
|
917 |
+
|
918 |
+
# Offload all models
|
919 |
+
self.maybe_free_model_hooks()
|
920 |
+
|
921 |
+
if not return_dict:
|
922 |
+
return (image,)
|
923 |
+
|
924 |
+
return StableDiffusionXLPipelineOutput(images=image)
|
925 |
+
|
926 |
+
def load_pipeline(pipeline=None) -> StableDiffusionXLPipeline:
|
927 |
+
"""Load and prepare the pipeline."""
|
928 |
+
if not pipeline:
|
929 |
+
pipeline = StableDiffusionXLPipeline_new.from_pretrained(
|
930 |
+
"stablediffusionapi/newdream-sdxl-20",
|
931 |
+
torch_dtype=torch.float16,
|
932 |
+
).to("cuda")
|
933 |
+
|
934 |
+
pipeline.scheduler = SchedulerWrapper(DDIMScheduler.from_config(pipeline.scheduler.config))
|
935 |
+
pipeline = compile_pipe(pipeline)
|
936 |
+
load_pipe(pipeline, dir="/home/sandbox/.cache/huggingface/hub/models--RobertML--cached-pipe-03/snapshots/7fde15e48c3c8035de8ae14843673fb30520e8aa")
|
937 |
+
|
938 |
+
# Warm-up runs
|
939 |
+
for _ in range(5):
|
940 |
+
pipeline(
|
941 |
+
prompt="gynocratic, phrenoplegy, senegin, unsuspicion, coccochromatic, unbrothered, conveyer, Anniellidae",
|
942 |
+
num_inference_steps=20
|
943 |
+
)
|
944 |
+
pipeline.scheduler.prepare_loss()
|
945 |
+
return pipeline
|
946 |
+
|
947 |
+
def infer(request: TextToImageRequest, pipeline: StableDiffusionXLPipeline) -> Image:
|
948 |
+
"""Generate image from text prompt."""
|
949 |
+
generator = Generator(pipeline.device).manual_seed(request.seed) if request.seed else None
|
950 |
+
|
951 |
+
image = pipeline(
|
952 |
+
prompt=request.prompt,
|
953 |
+
negative_prompt=request.negative_prompt,
|
954 |
+
width=request.width,
|
955 |
+
height=request.height,
|
956 |
+
generator=generator,
|
957 |
+
num_inference_steps=15,
|
958 |
+
).images[0]
|
959 |
+
|
960 |
+
return image
|
961 |
+
|
962 |
+
|
src/scheduler_config.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_by": "RobertML"
|
3 |
+
}
|
uv.lock
ADDED
@@ -0,0 +1,935 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version = 1
|
2 |
+
requires-python = "==3.10.*"
|
3 |
+
|
4 |
+
[[package]]
|
5 |
+
name = "accelerate"
|
6 |
+
version = "0.31.0"
|
7 |
+
source = { registry = "https://pypi.org/simple" }
|
8 |
+
dependencies = [
|
9 |
+
{ name = "huggingface-hub" },
|
10 |
+
{ name = "numpy" },
|
11 |
+
{ name = "packaging" },
|
12 |
+
{ name = "psutil" },
|
13 |
+
{ name = "pyyaml" },
|
14 |
+
{ name = "safetensors" },
|
15 |
+
{ name = "torch" },
|
16 |
+
]
|
17 |
+
sdist = { url = "https://files.pythonhosted.org/packages/89/e2/94937840162a87baa6b56c82247bbb06690b290ad3da0f083192d7b539a9/accelerate-0.31.0.tar.gz", hash = "sha256:b5199865b26106ccf9205acacbe8e4b3b428ad585e7c472d6a46f6fb75b6c176", size = 307110 }
|
18 |
+
wheels = [
|
19 |
+
{ url = "https://files.pythonhosted.org/packages/f0/62/9ebaf1fdd3d3c737a8814f9ae409d4ac04bc93b26a46a7dab456bb7e16f8/accelerate-0.31.0-py3-none-any.whl", hash = "sha256:0fc608dc49584f64d04711a39711d73cb0ad4ef3d21cddee7ef2216e29471144", size = 309428 },
|
20 |
+
]
|
21 |
+
|
22 |
+
[[package]]
|
23 |
+
name = "annotated-types"
|
24 |
+
version = "0.7.0"
|
25 |
+
source = { registry = "https://pypi.org/simple" }
|
26 |
+
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
|
27 |
+
wheels = [
|
28 |
+
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
|
29 |
+
]
|
30 |
+
|
31 |
+
[[package]]
|
32 |
+
name = "antlr4-python3-runtime"
|
33 |
+
version = "4.9.3"
|
34 |
+
source = { registry = "https://pypi.org/simple" }
|
35 |
+
sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034 }
|
36 |
+
|
37 |
+
[[package]]
|
38 |
+
name = "certifi"
|
39 |
+
version = "2024.8.30"
|
40 |
+
source = { registry = "https://pypi.org/simple" }
|
41 |
+
sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 }
|
42 |
+
wheels = [
|
43 |
+
{ url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 },
|
44 |
+
]
|
45 |
+
|
46 |
+
[[package]]
|
47 |
+
name = "charset-normalizer"
|
48 |
+
version = "3.4.0"
|
49 |
+
source = { registry = "https://pypi.org/simple" }
|
50 |
+
sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 }
|
51 |
+
wheels = [
|
52 |
+
{ url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 },
|
53 |
+
{ url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 },
|
54 |
+
{ url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 },
|
55 |
+
{ url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 },
|
56 |
+
{ url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 },
|
57 |
+
{ url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 },
|
58 |
+
{ url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 },
|
59 |
+
{ url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 },
|
60 |
+
{ url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 },
|
61 |
+
{ url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 },
|
62 |
+
{ url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 },
|
63 |
+
{ url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 },
|
64 |
+
{ url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 },
|
65 |
+
{ url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 },
|
66 |
+
{ url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 },
|
67 |
+
{ url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 },
|
68 |
+
]
|
69 |
+
|
70 |
+
[[package]]
|
71 |
+
name = "colorama"
|
72 |
+
version = "0.4.6"
|
73 |
+
source = { registry = "https://pypi.org/simple" }
|
74 |
+
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
|
75 |
+
wheels = [
|
76 |
+
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
|
77 |
+
]
|
78 |
+
|
79 |
+
[[package]]
|
80 |
+
name = "diffusers"
|
81 |
+
version = "0.28.2"
|
82 |
+
source = { registry = "https://pypi.org/simple" }
|
83 |
+
dependencies = [
|
84 |
+
{ name = "filelock" },
|
85 |
+
{ name = "huggingface-hub" },
|
86 |
+
{ name = "importlib-metadata" },
|
87 |
+
{ name = "numpy" },
|
88 |
+
{ name = "pillow" },
|
89 |
+
{ name = "regex" },
|
90 |
+
{ name = "requests" },
|
91 |
+
{ name = "safetensors" },
|
92 |
+
]
|
93 |
+
sdist = { url = "https://files.pythonhosted.org/packages/45/aa/f40919df7672c278b10509bb0f648a2f4b12ebb82174c0e51a25cbf76421/diffusers-0.28.2.tar.gz", hash = "sha256:0e1e881c821bd6dc2d88252a228a2532ae3577eb4f8b351c9c4abb184aee9b3f", size = 1704805 }
|
94 |
+
wheels = [
|
95 |
+
{ url = "https://files.pythonhosted.org/packages/8c/1d/decefc6459a40df26e17631e42fb7c3814f8424037a26d4bad187800179e/diffusers-0.28.2-py3-none-any.whl", hash = "sha256:9713b7666510723ad6b7e8f4702664ef20c59e4bc241d26c7ca086be3041082e", size = 2184296 },
|
96 |
+
]
|
97 |
+
|
98 |
+
[[package]]
|
99 |
+
name = "edge-maxxing-4090-newdream"
|
100 |
+
version = "7"
|
101 |
+
source = { editable = "." }
|
102 |
+
dependencies = [
|
103 |
+
{ name = "accelerate" },
|
104 |
+
{ name = "diffusers" },
|
105 |
+
{ name = "edge-maxxing-pipelines" },
|
106 |
+
{ name = "huggingface-hub" },
|
107 |
+
{ name = "numpy" },
|
108 |
+
{ name = "omegaconf" },
|
109 |
+
{ name = "onediff" },
|
110 |
+
{ name = "onediffx" },
|
111 |
+
{ name = "oneflow" },
|
112 |
+
{ name = "setuptools" },
|
113 |
+
{ name = "torch" },
|
114 |
+
{ name = "torchvision" },
|
115 |
+
{ name = "transformers" },
|
116 |
+
{ name = "triton" },
|
117 |
+
{ name = "xformers" },
|
118 |
+
]
|
119 |
+
|
120 |
+
[package.metadata]
|
121 |
+
requires-dist = [
|
122 |
+
{ name = "accelerate", specifier = "==0.31.0" },
|
123 |
+
{ name = "diffusers", specifier = "==0.28.2" },
|
124 |
+
{ name = "edge-maxxing-pipelines", git = "https://github.com/womboai/edge-maxxing?subdirectory=pipelines&rev=e713a4f52ca3ea8c1d57ff63c1c08470f4fd0a60#e713a4f52ca3ea8c1d57ff63c1c08470f4fd0a60" },
|
125 |
+
{ name = "huggingface-hub", specifier = "==0.25.2" },
|
126 |
+
{ name = "numpy", specifier = "==1.26.4" },
|
127 |
+
{ name = "omegaconf", specifier = "==2.3.0" },
|
128 |
+
{ name = "onediff", specifier = "==1.2.0" },
|
129 |
+
{ name = "onediffx", specifier = "==1.2.0" },
|
130 |
+
{ name = "oneflow", url = "https://github.com/siliconflow/oneflow_releases/releases/download/community_cu118/oneflow-0.9.1.dev20240802%2Bcu118-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" },
|
131 |
+
{ name = "setuptools", specifier = ">=75.2.0" },
|
132 |
+
{ name = "torch", specifier = "==2.2.2" },
|
133 |
+
{ name = "torchvision", specifier = "==0.17.2" },
|
134 |
+
{ name = "transformers", specifier = "==4.41.2" },
|
135 |
+
{ name = "triton", specifier = "==2.2.0" },
|
136 |
+
{ name = "xformers", specifier = "==0.0.25.post1" },
|
137 |
+
]
|
138 |
+
|
139 |
+
[[package]]
|
140 |
+
name = "edge-maxxing-pipelines"
|
141 |
+
version = "1.0.0"
|
142 |
+
source = { git = "https://github.com/womboai/edge-maxxing?subdirectory=pipelines&rev=e713a4f52ca3ea8c1d57ff63c1c08470f4fd0a60#e713a4f52ca3ea8c1d57ff63c1c08470f4fd0a60" }
|
143 |
+
dependencies = [
|
144 |
+
{ name = "pydantic" },
|
145 |
+
]
|
146 |
+
|
147 |
+
[[package]]
|
148 |
+
name = "filelock"
|
149 |
+
version = "3.16.1"
|
150 |
+
source = { registry = "https://pypi.org/simple" }
|
151 |
+
sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 }
|
152 |
+
wheels = [
|
153 |
+
{ url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 },
|
154 |
+
]
|
155 |
+
|
156 |
+
[[package]]
|
157 |
+
name = "fsspec"
|
158 |
+
version = "2024.10.0"
|
159 |
+
source = { registry = "https://pypi.org/simple" }
|
160 |
+
sdist = { url = "https://files.pythonhosted.org/packages/a0/52/f16a068ebadae42526484c31f4398e62962504e5724a8ba5dc3409483df2/fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493", size = 286853 }
|
161 |
+
wheels = [
|
162 |
+
{ url = "https://files.pythonhosted.org/packages/c6/b2/454d6e7f0158951d8a78c2e1eb4f69ae81beb8dca5fee9809c6c99e9d0d0/fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871", size = 179641 },
|
163 |
+
]
|
164 |
+
|
165 |
+
[[package]]
|
166 |
+
name = "huggingface-hub"
|
167 |
+
version = "0.25.2"
|
168 |
+
source = { registry = "https://pypi.org/simple" }
|
169 |
+
dependencies = [
|
170 |
+
{ name = "filelock" },
|
171 |
+
{ name = "fsspec" },
|
172 |
+
{ name = "packaging" },
|
173 |
+
{ name = "pyyaml" },
|
174 |
+
{ name = "requests" },
|
175 |
+
{ name = "tqdm" },
|
176 |
+
{ name = "typing-extensions" },
|
177 |
+
]
|
178 |
+
sdist = { url = "https://files.pythonhosted.org/packages/df/fd/5f81bae67096c5ab50d29a0230b8374f0245916cca192f8ee2fada51f4f6/huggingface_hub-0.25.2.tar.gz", hash = "sha256:a1014ea111a5f40ccd23f7f7ba8ac46e20fa3b658ced1f86a00c75c06ec6423c", size = 365806 }
|
179 |
+
wheels = [
|
180 |
+
{ url = "https://files.pythonhosted.org/packages/64/09/a535946bf2dc88e61341f39dc507530411bb3ea4eac493e5ec833e8f35bd/huggingface_hub-0.25.2-py3-none-any.whl", hash = "sha256:1897caf88ce7f97fe0110603d8f66ac264e3ba6accdf30cd66cc0fed5282ad25", size = 436575 },
|
181 |
+
]
|
182 |
+
|
183 |
+
[[package]]
|
184 |
+
name = "idna"
|
185 |
+
version = "3.10"
|
186 |
+
source = { registry = "https://pypi.org/simple" }
|
187 |
+
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
|
188 |
+
wheels = [
|
189 |
+
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
|
190 |
+
]
|
191 |
+
|
192 |
+
[[package]]
|
193 |
+
name = "importlib-metadata"
|
194 |
+
version = "8.5.0"
|
195 |
+
source = { registry = "https://pypi.org/simple" }
|
196 |
+
dependencies = [
|
197 |
+
{ name = "zipp" },
|
198 |
+
]
|
199 |
+
sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 }
|
200 |
+
wheels = [
|
201 |
+
{ url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 },
|
202 |
+
]
|
203 |
+
|
204 |
+
[[package]]
|
205 |
+
name = "jinja2"
|
206 |
+
version = "3.1.4"
|
207 |
+
source = { registry = "https://pypi.org/simple" }
|
208 |
+
dependencies = [
|
209 |
+
{ name = "markupsafe" },
|
210 |
+
]
|
211 |
+
sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 }
|
212 |
+
wheels = [
|
213 |
+
{ url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 },
|
214 |
+
]
|
215 |
+
|
216 |
+
[[package]]
|
217 |
+
name = "markdown-it-py"
|
218 |
+
version = "3.0.0"
|
219 |
+
source = { registry = "https://pypi.org/simple" }
|
220 |
+
dependencies = [
|
221 |
+
{ name = "mdurl" },
|
222 |
+
]
|
223 |
+
sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 }
|
224 |
+
wheels = [
|
225 |
+
{ url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 },
|
226 |
+
]
|
227 |
+
|
228 |
+
[[package]]
|
229 |
+
name = "markupsafe"
|
230 |
+
version = "3.0.2"
|
231 |
+
source = { registry = "https://pypi.org/simple" }
|
232 |
+
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 }
|
233 |
+
wheels = [
|
234 |
+
{ url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 },
|
235 |
+
{ url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 },
|
236 |
+
{ url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 },
|
237 |
+
{ url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 },
|
238 |
+
{ url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 },
|
239 |
+
{ url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 },
|
240 |
+
{ url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 },
|
241 |
+
{ url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 },
|
242 |
+
{ url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 },
|
243 |
+
{ url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 },
|
244 |
+
]
|
245 |
+
|
246 |
+
[[package]]
|
247 |
+
name = "mdurl"
|
248 |
+
version = "0.1.2"
|
249 |
+
source = { registry = "https://pypi.org/simple" }
|
250 |
+
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
|
251 |
+
wheels = [
|
252 |
+
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
|
253 |
+
]
|
254 |
+
|
255 |
+
[[package]]
|
256 |
+
name = "mpmath"
|
257 |
+
version = "1.3.0"
|
258 |
+
source = { registry = "https://pypi.org/simple" }
|
259 |
+
sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 }
|
260 |
+
wheels = [
|
261 |
+
{ url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 },
|
262 |
+
]
|
263 |
+
|
264 |
+
[[package]]
|
265 |
+
name = "networkx"
|
266 |
+
version = "3.4.2"
|
267 |
+
source = { registry = "https://pypi.org/simple" }
|
268 |
+
sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368 }
|
269 |
+
wheels = [
|
270 |
+
{ url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263 },
|
271 |
+
]
|
272 |
+
|
273 |
+
[[package]]
|
274 |
+
name = "numpy"
|
275 |
+
version = "1.26.4"
|
276 |
+
source = { registry = "https://pypi.org/simple" }
|
277 |
+
sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 }
|
278 |
+
wheels = [
|
279 |
+
{ url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 },
|
280 |
+
{ url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 },
|
281 |
+
{ url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 },
|
282 |
+
{ url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 },
|
283 |
+
{ url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 },
|
284 |
+
{ url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 },
|
285 |
+
{ url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 },
|
286 |
+
{ url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 },
|
287 |
+
]
|
288 |
+
|
289 |
+
[[package]]
|
290 |
+
name = "nvidia-cublas-cu11"
|
291 |
+
version = "11.11.3.6"
|
292 |
+
source = { registry = "https://pypi.org/simple" }
|
293 |
+
wheels = [
|
294 |
+
{ url = "https://files.pythonhosted.org/packages/46/be/c222e33e60d28ecd496a46fc4d78ccae0ee28e1fd7dc705b6288b4cad27e/nvidia_cublas_cu11-11.11.3.6-py3-none-manylinux1_x86_64.whl", hash = "sha256:39fb40e8f486dd8a2ddb8fdeefe1d5b28f5b99df01c87ab3676f057a74a5a6f3", size = 417870452 },
|
295 |
+
{ url = "https://files.pythonhosted.org/packages/96/df/c5ac9ac5096355c47c606e613ecc7aa50fbccf5e0145df857d11da6464b1/nvidia_cublas_cu11-11.11.3.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5ccae9e069a2c6be9af9cb5a0b0c6928c19c7915e390d15f598a1eead2a01a7a", size = 291428448 },
|
296 |
+
{ url = "https://files.pythonhosted.org/packages/ea/2e/9d99c60771d275ecf6c914a612e9a577f740a615bc826bec132368e1d3ae/nvidia_cublas_cu11-11.11.3.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:60252822adea5d0b10cd990a7dc7bedf7435f30ae40083c7a624a85a43225abc", size = 417870460 },
|
297 |
+
{ url = "https://files.pythonhosted.org/packages/0b/1d/7a78cd36fd5e3da4021b3ac2c2c8b2651dd72345b7c3ecc0d3e051884f50/nvidia_cublas_cu11-11.11.3.6-py3-none-win_amd64.whl", hash = "sha256:6ab12b1302bef8ac1ff4414edd1c059e57f4833abef9151683fb8f4de25900be", size = 427234740 },
|
298 |
+
]
|
299 |
+
|
300 |
+
[[package]]
|
301 |
+
name = "nvidia-cublas-cu12"
|
302 |
+
version = "12.1.3.1"
|
303 |
+
source = { registry = "https://pypi.org/simple" }
|
304 |
+
wheels = [
|
305 |
+
{ url = "https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728", size = 410594774 },
|
306 |
+
]
|
307 |
+
|
308 |
+
[[package]]
|
309 |
+
name = "nvidia-cuda-cupti-cu12"
|
310 |
+
version = "12.1.105"
|
311 |
+
source = { registry = "https://pypi.org/simple" }
|
312 |
+
wheels = [
|
313 |
+
{ url = "https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e", size = 14109015 },
|
314 |
+
]
|
315 |
+
|
316 |
+
[[package]]
|
317 |
+
name = "nvidia-cuda-nvrtc-cu12"
|
318 |
+
version = "12.1.105"
|
319 |
+
source = { registry = "https://pypi.org/simple" }
|
320 |
+
wheels = [
|
321 |
+
{ url = "https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2", size = 23671734 },
|
322 |
+
]
|
323 |
+
|
324 |
+
[[package]]
|
325 |
+
name = "nvidia-cuda-runtime-cu12"
|
326 |
+
version = "12.1.105"
|
327 |
+
source = { registry = "https://pypi.org/simple" }
|
328 |
+
wheels = [
|
329 |
+
{ url = "https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40", size = 823596 },
|
330 |
+
]
|
331 |
+
|
332 |
+
[[package]]
|
333 |
+
name = "nvidia-cudnn-cu11"
|
334 |
+
version = "9.5.0.50"
|
335 |
+
source = { registry = "https://pypi.org/simple" }
|
336 |
+
dependencies = [
|
337 |
+
{ name = "nvidia-cublas-cu11" },
|
338 |
+
]
|
339 |
+
wheels = [
|
340 |
+
{ url = "https://files.pythonhosted.org/packages/a6/d6/ec0bf8fd29c907e68de69248e5dbc3e78b63a613163d54ebfc4ca9362421/nvidia_cudnn_cu11-9.5.0.50-py3-none-manylinux2014_x86_64.whl", hash = "sha256:15ddd1c8e2904f35debb4a0452f4df57eeb9d363e760de5a5c2ca0daba8e545a", size = 558442361 },
|
341 |
+
{ url = "https://files.pythonhosted.org/packages/2e/01/e7aeb8f9f2336a44c946e629eeb2d204ec672ecd94a47d3b1abaa10a05f1/nvidia_cudnn_cu11-9.5.0.50-py3-none-win_amd64.whl", hash = "sha256:ecf46a9ca869c1628f0ce91c160d53cbd28f0805dca856f85f3e65b4ef3496b1", size = 553719005 },
|
342 |
+
]
|
343 |
+
|
344 |
+
[[package]]
|
345 |
+
name = "nvidia-cudnn-cu12"
|
346 |
+
version = "8.9.2.26"
|
347 |
+
source = { registry = "https://pypi.org/simple" }
|
348 |
+
dependencies = [
|
349 |
+
{ name = "nvidia-cublas-cu12" },
|
350 |
+
]
|
351 |
+
wheels = [
|
352 |
+
{ url = "https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9", size = 731725872 },
|
353 |
+
]
|
354 |
+
|
355 |
+
[[package]]
|
356 |
+
name = "nvidia-cufft-cu11"
|
357 |
+
version = "10.9.0.58"
|
358 |
+
source = { registry = "https://pypi.org/simple" }
|
359 |
+
wheels = [
|
360 |
+
{ url = "https://files.pythonhosted.org/packages/74/79/b912a77e38e41f15a0581a59f5c3548d1ddfdda3225936fb67c342719e7a/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl", hash = "sha256:222f9da70c80384632fd6035e4c3f16762d64ea7a843829cb278f98b3cb7dd81", size = 168405414 },
|
361 |
+
{ url = "https://files.pythonhosted.org/packages/71/7a/a2ad9951d57c3cc23f4fa6d84b146afd9f375ffbc744b38935930ac4393f/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux2014_aarch64.whl", hash = "sha256:34b7315104e615b230dc3c2d1861f13bff9ec465c5d3b4bb65b4986d03a1d8d4", size = 111231060 },
|
362 |
+
{ url = "https://files.pythonhosted.org/packages/64/c8/133717b43182ba063803e983e7680a94826a9f4ff5734af0ca315803f1b3/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e21037259995243cc370dd63c430d77ae9280bedb68d5b5a18226bfc92e5d748", size = 168405419 },
|
363 |
+
{ url = "https://files.pythonhosted.org/packages/f8/b4/e432a74f8db0e84f734dc14d36c0e529225132bf7e239da21f55893351a6/nvidia_cufft_cu11-10.9.0.58-py3-none-win_amd64.whl", hash = "sha256:c4d316f17c745ec9c728e30409612eaf77a8404c3733cdf6c9c1569634d1ca03", size = 172237004 },
|
364 |
+
]
|
365 |
+
|
366 |
+
[[package]]
|
367 |
+
name = "nvidia-cufft-cu12"
|
368 |
+
version = "11.0.2.54"
|
369 |
+
source = { registry = "https://pypi.org/simple" }
|
370 |
+
wheels = [
|
371 |
+
{ url = "https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56", size = 121635161 },
|
372 |
+
]
|
373 |
+
|
374 |
+
[[package]]
|
375 |
+
name = "nvidia-curand-cu12"
|
376 |
+
version = "10.3.2.106"
|
377 |
+
source = { registry = "https://pypi.org/simple" }
|
378 |
+
wheels = [
|
379 |
+
{ url = "https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0", size = 56467784 },
|
380 |
+
]
|
381 |
+
|
382 |
+
[[package]]
|
383 |
+
name = "nvidia-cusolver-cu12"
|
384 |
+
version = "11.4.5.107"
|
385 |
+
source = { registry = "https://pypi.org/simple" }
|
386 |
+
dependencies = [
|
387 |
+
{ name = "nvidia-cublas-cu12" },
|
388 |
+
{ name = "nvidia-cusparse-cu12" },
|
389 |
+
{ name = "nvidia-nvjitlink-cu12" },
|
390 |
+
]
|
391 |
+
wheels = [
|
392 |
+
{ url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 },
|
393 |
+
]
|
394 |
+
|
395 |
+
[[package]]
|
396 |
+
name = "nvidia-cusparse-cu11"
|
397 |
+
version = "11.7.5.86"
|
398 |
+
source = { registry = "https://pypi.org/simple" }
|
399 |
+
wheels = [
|
400 |
+
{ url = "https://files.pythonhosted.org/packages/c1/e0/21b829c535d569831835a4ca5d049a19ba00d3e91f3e12ab4ad27bd7385f/nvidia_cusparse_cu11-11.7.5.86-py3-none-manylinux1_x86_64.whl", hash = "sha256:4ae709fe78d3f23f60acaba8c54b8ad556cf16ca486e0cc1aa92dca7555d2d2b", size = 204126221 },
|
401 |
+
{ url = "https://files.pythonhosted.org/packages/a2/6e/4eb2842e7ab1804072bca43030a8b92731e5a35f6a4a1b8f1aa8fa5f411c/nvidia_cusparse_cu11-11.7.5.86-py3-none-manylinux2014_aarch64.whl", hash = "sha256:6c7da46abee7567e619d4aa2e90a1b032cfcbd1211d429853b1a6e87514a14b2", size = 203917797 },
|
402 |
+
{ url = "https://files.pythonhosted.org/packages/ed/5c/b0333b07c51ced77397c2fb0d9826072cea0da9d421aa7e792aa0f8ecc72/nvidia_cusparse_cu11-11.7.5.86-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8d7cf1628fd8d462b5d2ba6678fae34733a48ecb80495b9c68672ec6a6dde5ef", size = 204126227 },
|
403 |
+
{ url = "https://files.pythonhosted.org/packages/b8/36/a670e8ca1deccd3c63be4d0286491cf5c6375253f0d948e7cc5167fe1da9/nvidia_cusparse_cu11-11.7.5.86-py3-none-win_amd64.whl", hash = "sha256:a0f6ee81cd91be606fc2f55992d06b09cd4e86d74b6ae5e8dd1631cf7f5a8706", size = 203420667 },
|
404 |
+
]
|
405 |
+
|
406 |
+
[[package]]
|
407 |
+
name = "nvidia-cusparse-cu12"
|
408 |
+
version = "12.1.0.106"
|
409 |
+
source = { registry = "https://pypi.org/simple" }
|
410 |
+
dependencies = [
|
411 |
+
{ name = "nvidia-nvjitlink-cu12" },
|
412 |
+
]
|
413 |
+
wheels = [
|
414 |
+
{ url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 },
|
415 |
+
]
|
416 |
+
|
417 |
+
[[package]]
|
418 |
+
name = "nvidia-nccl-cu11"
|
419 |
+
version = "2.21.5"
|
420 |
+
source = { registry = "https://pypi.org/simple" }
|
421 |
+
wheels = [
|
422 |
+
{ url = "https://files.pythonhosted.org/packages/ac/9a/8b6a28b3b87d5fddab0e92cd835339eb8fbddaa71ae67518c8c1b3d05bae/nvidia_nccl_cu11-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:49d8350629c7888701d1fd200934942671cb5c728f49acc5a0b3a768820bed29", size = 147811630 },
|
423 |
+
]
|
424 |
+
|
425 |
+
[[package]]
|
426 |
+
name = "nvidia-nccl-cu12"
|
427 |
+
version = "2.19.3"
|
428 |
+
source = { registry = "https://pypi.org/simple" }
|
429 |
+
wheels = [
|
430 |
+
{ url = "https://files.pythonhosted.org/packages/38/00/d0d4e48aef772ad5aebcf70b73028f88db6e5640b36c38e90445b7a57c45/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d", size = 165987969 },
|
431 |
+
]
|
432 |
+
|
433 |
+
[[package]]
|
434 |
+
name = "nvidia-nvjitlink-cu12"
|
435 |
+
version = "12.6.77"
|
436 |
+
source = { registry = "https://pypi.org/simple" }
|
437 |
+
wheels = [
|
438 |
+
{ url = "https://files.pythonhosted.org/packages/11/8c/386018fdffdce2ff8d43fedf192ef7d14cab7501cbf78a106dd2e9f1fc1f/nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3bf10d85bb1801e9c894c6e197e44dd137d2a0a9e43f8450e9ad13f2df0dd52d", size = 19270432 },
|
439 |
+
{ url = "https://files.pythonhosted.org/packages/fe/e4/486de766851d58699bcfeb3ba6a3beb4d89c3809f75b9d423b9508a8760f/nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9ae346d16203ae4ea513be416495167a0101d33d2d14935aa9c1829a3fb45142", size = 19745114 },
|
440 |
+
]
|
441 |
+
|
442 |
+
[[package]]
|
443 |
+
name = "nvidia-nvtx-cu12"
|
444 |
+
version = "12.1.105"
|
445 |
+
source = { registry = "https://pypi.org/simple" }
|
446 |
+
wheels = [
|
447 |
+
{ url = "https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5", size = 99138 },
|
448 |
+
]
|
449 |
+
|
450 |
+
[[package]]
|
451 |
+
name = "omegaconf"
|
452 |
+
version = "2.3.0"
|
453 |
+
source = { registry = "https://pypi.org/simple" }
|
454 |
+
dependencies = [
|
455 |
+
{ name = "antlr4-python3-runtime" },
|
456 |
+
{ name = "pyyaml" },
|
457 |
+
]
|
458 |
+
sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120 }
|
459 |
+
wheels = [
|
460 |
+
{ url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500 },
|
461 |
+
]
|
462 |
+
|
463 |
+
[[package]]
|
464 |
+
name = "onediff"
|
465 |
+
version = "1.2.0"
|
466 |
+
source = { registry = "https://pypi.org/simple" }
|
467 |
+
dependencies = [
|
468 |
+
{ name = "accelerate" },
|
469 |
+
{ name = "diffusers" },
|
470 |
+
{ name = "torch" },
|
471 |
+
{ name = "transformers" },
|
472 |
+
]
|
473 |
+
sdist = { url = "https://files.pythonhosted.org/packages/d5/30/b493cbca73c5cdd2499a3ec37e451fa4c826e02be6bb6bc66c44d98293cc/onediff-1.2.0.tar.gz", hash = "sha256:8655b18698ca252093c06db53d4620de4065d1e507f9d7b1cbe3f123ed17bd26", size = 73823417 }
|
474 |
+
wheels = [
|
475 |
+
{ url = "https://files.pythonhosted.org/packages/3c/22/5b6de9dae4b8ed161bfcab9a7d6d7660cca4ee1d7db733950b67905f7f66/onediff-1.2.0-py3-none-any.whl", hash = "sha256:84bb2285ae7f38e450229779ab96b591e4f530e1f22b7320f41b06f7aec181a3", size = 104932 },
|
476 |
+
]
|
477 |
+
|
478 |
+
[[package]]
|
479 |
+
name = "onediffx"
|
480 |
+
version = "1.2.0"
|
481 |
+
source = { registry = "https://pypi.org/simple" }
|
482 |
+
dependencies = [
|
483 |
+
{ name = "accelerate" },
|
484 |
+
{ name = "diffusers" },
|
485 |
+
{ name = "omegaconf" },
|
486 |
+
{ name = "onefx" },
|
487 |
+
{ name = "torch" },
|
488 |
+
{ name = "transformers" },
|
489 |
+
]
|
490 |
+
sdist = { url = "https://files.pythonhosted.org/packages/b9/e1/b0797217862a9eb48bdb5f3e53ed8c9b69fe582de1e7c3a4b948f7ed862a/onediffx-1.2.0.tar.gz", hash = "sha256:3e0934e71f966d41d04951826ae36ebfb25be8c23ca7f5e6f7b177fd92f0791b", size = 95945 }
|
491 |
+
wheels = [
|
492 |
+
{ url = "https://files.pythonhosted.org/packages/f4/dd/412be6d3646355b5e5b75066f715fc34d0bbdcfbf18d0934b9f107c59239/onediffx-1.2.0-py3-none-any.whl", hash = "sha256:86dea026ac30bc04a2f4cd25ac4a511b1c4b4154657bc48c7e05f4a238085f14", size = 67899 },
|
493 |
+
]
|
494 |
+
|
495 |
+
[[package]]
|
496 |
+
name = "oneflow"
|
497 |
+
version = "0.9.1.dev20240802+cu118"
|
498 |
+
source = { url = "https://github.com/siliconflow/oneflow_releases/releases/download/community_cu118/oneflow-0.9.1.dev20240802%2Bcu118-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }
|
499 |
+
dependencies = [
|
500 |
+
{ name = "numpy" },
|
501 |
+
{ name = "nvidia-cublas-cu11" },
|
502 |
+
{ name = "nvidia-cudnn-cu11" },
|
503 |
+
{ name = "nvidia-cufft-cu11" },
|
504 |
+
{ name = "nvidia-cusparse-cu11" },
|
505 |
+
{ name = "nvidia-nccl-cu11" },
|
506 |
+
{ name = "pillow" },
|
507 |
+
{ name = "protobuf" },
|
508 |
+
{ name = "requests" },
|
509 |
+
{ name = "rich" },
|
510 |
+
{ name = "tqdm" },
|
511 |
+
]
|
512 |
+
wheels = [
|
513 |
+
{ url = "https://github.com/siliconflow/oneflow_releases/releases/download/community_cu118/oneflow-0.9.1.dev20240802%2Bcu118-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a9de80a3c059ec78bec7dfa6952f5618aaa05f29182b49634e0211195281e19" },
|
514 |
+
]
|
515 |
+
|
516 |
+
[package.metadata]
|
517 |
+
requires-dist = [
|
518 |
+
{ name = "numpy", specifier = ">=1.22.1" },
|
519 |
+
{ name = "nvidia-cublas-cu11" },
|
520 |
+
{ name = "nvidia-cudnn-cu11" },
|
521 |
+
{ name = "nvidia-cufft-cu11" },
|
522 |
+
{ name = "nvidia-cusparse-cu11" },
|
523 |
+
{ name = "nvidia-nccl-cu11" },
|
524 |
+
{ name = "pillow" },
|
525 |
+
{ name = "protobuf", specifier = ">=3.9.2" },
|
526 |
+
{ name = "requests" },
|
527 |
+
{ name = "rich" },
|
528 |
+
{ name = "tqdm" },
|
529 |
+
]
|
530 |
+
|
531 |
+
[[package]]
|
532 |
+
name = "onefx"
|
533 |
+
version = "0.0.3"
|
534 |
+
source = { registry = "https://pypi.org/simple" }
|
535 |
+
sdist = { url = "https://files.pythonhosted.org/packages/3c/c4/2dc5fd8ab613d32e24d54204b4d448b315fa50e5f1ce3197938f2219fa09/onefx-0.0.3.tar.gz", hash = "sha256:d3f3f816891cdfdb6d69e4a78b72d5cafd7d40e165ce419f89f855f47a3dbe34", size = 147260 }
|
536 |
+
|
537 |
+
[[package]]
|
538 |
+
name = "packaging"
|
539 |
+
version = "24.1"
|
540 |
+
source = { registry = "https://pypi.org/simple" }
|
541 |
+
sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 }
|
542 |
+
wheels = [
|
543 |
+
{ url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 },
|
544 |
+
]
|
545 |
+
|
546 |
+
[[package]]
|
547 |
+
name = "pillow"
|
548 |
+
version = "11.0.0"
|
549 |
+
source = { registry = "https://pypi.org/simple" }
|
550 |
+
sdist = { url = "https://files.pythonhosted.org/packages/a5/26/0d95c04c868f6bdb0c447e3ee2de5564411845e36a858cfd63766bc7b563/pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739", size = 46737780 }
|
551 |
+
wheels = [
|
552 |
+
{ url = "https://files.pythonhosted.org/packages/98/fb/a6ce6836bd7fd93fbf9144bf54789e02babc27403b50a9e1583ee877d6da/pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947", size = 3154708 },
|
553 |
+
{ url = "https://files.pythonhosted.org/packages/6a/1d/1f51e6e912d8ff316bb3935a8cda617c801783e0b998bf7a894e91d3bd4c/pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba", size = 2979223 },
|
554 |
+
{ url = "https://files.pythonhosted.org/packages/90/83/e2077b0192ca8a9ef794dbb74700c7e48384706467067976c2a95a0f40a1/pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086", size = 4183167 },
|
555 |
+
{ url = "https://files.pythonhosted.org/packages/0e/74/467af0146970a98349cdf39e9b79a6cc8a2e7558f2c01c28a7b6b85c5bda/pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9", size = 4283912 },
|
556 |
+
{ url = "https://files.pythonhosted.org/packages/85/b1/d95d4f7ca3a6c1ae120959605875a31a3c209c4e50f0029dc1a87566cf46/pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488", size = 4195815 },
|
557 |
+
{ url = "https://files.pythonhosted.org/packages/41/c3/94f33af0762ed76b5a237c5797e088aa57f2b7fa8ee7932d399087be66a8/pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f", size = 4366117 },
|
558 |
+
{ url = "https://files.pythonhosted.org/packages/ba/3c/443e7ef01f597497268899e1cca95c0de947c9bbf77a8f18b3c126681e5d/pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb", size = 4278607 },
|
559 |
+
{ url = "https://files.pythonhosted.org/packages/26/95/1495304448b0081e60c0c5d63f928ef48bb290acee7385804426fa395a21/pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97", size = 4410685 },
|
560 |
+
{ url = "https://files.pythonhosted.org/packages/45/da/861e1df971ef0de9870720cb309ca4d553b26a9483ec9be3a7bf1de4a095/pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50", size = 2249185 },
|
561 |
+
{ url = "https://files.pythonhosted.org/packages/d5/4e/78f7c5202ea2a772a5ab05069c1b82503e6353cd79c7e474d4945f4b82c3/pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c", size = 2566726 },
|
562 |
+
{ url = "https://files.pythonhosted.org/packages/77/e4/6e84eada35cbcc646fc1870f72ccfd4afacb0fae0c37ffbffe7f5dc24bf1/pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1", size = 2254585 },
|
563 |
+
{ url = "https://files.pythonhosted.org/packages/36/57/42a4dd825eab762ba9e690d696d894ba366e06791936056e26e099398cda/pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2", size = 3119239 },
|
564 |
+
{ url = "https://files.pythonhosted.org/packages/98/f7/25f9f9e368226a1d6cf3507081a1a7944eddd3ca7821023377043f5a83c8/pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2", size = 2950803 },
|
565 |
+
{ url = "https://files.pythonhosted.org/packages/59/01/98ead48a6c2e31e6185d4c16c978a67fe3ccb5da5c2ff2ba8475379bb693/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b", size = 3281098 },
|
566 |
+
{ url = "https://files.pythonhosted.org/packages/51/c0/570255b2866a0e4d500a14f950803a2ec273bac7badc43320120b9262450/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2", size = 3323665 },
|
567 |
+
{ url = "https://files.pythonhosted.org/packages/0e/75/689b4ec0483c42bfc7d1aacd32ade7a226db4f4fac57c6fdcdf90c0731e3/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830", size = 3310533 },
|
568 |
+
{ url = "https://files.pythonhosted.org/packages/3d/30/38bd6149cf53da1db4bad304c543ade775d225961c4310f30425995cb9ec/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734", size = 3414886 },
|
569 |
+
{ url = "https://files.pythonhosted.org/packages/ec/3d/c32a51d848401bd94cabb8767a39621496491ee7cd5199856b77da9b18ad/pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316", size = 2567508 },
|
570 |
+
]
|
571 |
+
|
572 |
+
[[package]]
|
573 |
+
name = "protobuf"
|
574 |
+
version = "5.28.3"
|
575 |
+
source = { registry = "https://pypi.org/simple" }
|
576 |
+
sdist = { url = "https://files.pythonhosted.org/packages/74/6e/e69eb906fddcb38f8530a12f4b410699972ab7ced4e21524ece9d546ac27/protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b", size = 422479 }
|
577 |
+
wheels = [
|
578 |
+
{ url = "https://files.pythonhosted.org/packages/d1/c5/05163fad52d7c43e124a545f1372d18266db36036377ad29de4271134a6a/protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24", size = 419624 },
|
579 |
+
{ url = "https://files.pythonhosted.org/packages/9c/4c/4563ebe001ff30dca9d7ed12e471fa098d9759712980cde1fd03a3a44fb7/protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868", size = 431464 },
|
580 |
+
{ url = "https://files.pythonhosted.org/packages/1c/f2/baf397f3dd1d3e4af7e3f5a0382b868d25ac068eefe1ebde05132333436c/protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687", size = 414743 },
|
581 |
+
{ url = "https://files.pythonhosted.org/packages/85/50/cd61a358ba1601f40e7d38bcfba22e053f40ef2c50d55b55926aecc8fec7/protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584", size = 316511 },
|
582 |
+
{ url = "https://files.pythonhosted.org/packages/5d/ae/3257b09328c0b4e59535e497b0c7537d4954038bdd53a2f0d2f49d15a7c4/protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135", size = 316624 },
|
583 |
+
{ url = "https://files.pythonhosted.org/packages/ad/c3/2377c159e28ea89a91cf1ca223f827ae8deccb2c9c401e5ca233cd73002f/protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed", size = 169511 },
|
584 |
+
]
|
585 |
+
|
586 |
+
[[package]]
|
587 |
+
name = "psutil"
|
588 |
+
version = "6.1.0"
|
589 |
+
source = { registry = "https://pypi.org/simple" }
|
590 |
+
sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 }
|
591 |
+
wheels = [
|
592 |
+
{ url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 },
|
593 |
+
{ url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 },
|
594 |
+
{ url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 },
|
595 |
+
{ url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 },
|
596 |
+
{ url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 },
|
597 |
+
{ url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 },
|
598 |
+
{ url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 },
|
599 |
+
]
|
600 |
+
|
601 |
+
[[package]]
|
602 |
+
name = "pydantic"
|
603 |
+
version = "2.9.2"
|
604 |
+
source = { registry = "https://pypi.org/simple" }
|
605 |
+
dependencies = [
|
606 |
+
{ name = "annotated-types" },
|
607 |
+
{ name = "pydantic-core" },
|
608 |
+
{ name = "typing-extensions" },
|
609 |
+
]
|
610 |
+
sdist = { url = "https://files.pythonhosted.org/packages/a9/b7/d9e3f12af310e1120c21603644a1cd86f59060e040ec5c3a80b8f05fae30/pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f", size = 769917 }
|
611 |
+
wheels = [
|
612 |
+
{ url = "https://files.pythonhosted.org/packages/df/e4/ba44652d562cbf0bf320e0f3810206149c8a4e99cdbf66da82e97ab53a15/pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12", size = 434928 },
|
613 |
+
]
|
614 |
+
|
615 |
+
[[package]]
|
616 |
+
name = "pydantic-core"
|
617 |
+
version = "2.23.4"
|
618 |
+
source = { registry = "https://pypi.org/simple" }
|
619 |
+
dependencies = [
|
620 |
+
{ name = "typing-extensions" },
|
621 |
+
]
|
622 |
+
sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156 }
|
623 |
+
wheels = [
|
624 |
+
{ url = "https://files.pythonhosted.org/packages/5c/8b/d3ae387f66277bd8104096d6ec0a145f4baa2966ebb2cad746c0920c9526/pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b", size = 1867835 },
|
625 |
+
{ url = "https://files.pythonhosted.org/packages/46/76/f68272e4c3a7df8777798282c5e47d508274917f29992d84e1898f8908c7/pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166", size = 1776689 },
|
626 |
+
{ url = "https://files.pythonhosted.org/packages/cc/69/5f945b4416f42ea3f3bc9d2aaec66c76084a6ff4ff27555bf9415ab43189/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb", size = 1800748 },
|
627 |
+
{ url = "https://files.pythonhosted.org/packages/50/ab/891a7b0054bcc297fb02d44d05c50e68154e31788f2d9d41d0b72c89fdf7/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916", size = 1806469 },
|
628 |
+
{ url = "https://files.pythonhosted.org/packages/31/7c/6e3fa122075d78f277a8431c4c608f061881b76c2b7faca01d317ee39b5d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07", size = 2002246 },
|
629 |
+
{ url = "https://files.pythonhosted.org/packages/ad/6f/22d5692b7ab63fc4acbc74de6ff61d185804a83160adba5e6cc6068e1128/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232", size = 2659404 },
|
630 |
+
{ url = "https://files.pythonhosted.org/packages/11/ac/1e647dc1121c028b691028fa61a4e7477e6aeb5132628fde41dd34c1671f/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2", size = 2053940 },
|
631 |
+
{ url = "https://files.pythonhosted.org/packages/91/75/984740c17f12c3ce18b5a2fcc4bdceb785cce7df1511a4ce89bca17c7e2d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f", size = 1921437 },
|
632 |
+
{ url = "https://files.pythonhosted.org/packages/a0/74/13c5f606b64d93f0721e7768cd3e8b2102164866c207b8cd6f90bb15d24f/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3", size = 1966129 },
|
633 |
+
{ url = "https://files.pythonhosted.org/packages/18/03/9c4aa5919457c7b57a016c1ab513b1a926ed9b2bb7915bf8e506bf65c34b/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071", size = 2110908 },
|
634 |
+
{ url = "https://files.pythonhosted.org/packages/92/2c/053d33f029c5dc65e5cf44ff03ceeefb7cce908f8f3cca9265e7f9b540c8/pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119", size = 1735278 },
|
635 |
+
{ url = "https://files.pythonhosted.org/packages/de/81/7dfe464eca78d76d31dd661b04b5f2036ec72ea8848dd87ab7375e185c23/pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f", size = 1917453 },
|
636 |
+
{ url = "https://files.pythonhosted.org/packages/13/a9/5d582eb3204464284611f636b55c0a7410d748ff338756323cb1ce721b96/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5", size = 1857135 },
|
637 |
+
{ url = "https://files.pythonhosted.org/packages/2c/57/faf36290933fe16717f97829eabfb1868182ac495f99cf0eda9f59687c9d/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec", size = 1740583 },
|
638 |
+
{ url = "https://files.pythonhosted.org/packages/91/7c/d99e3513dc191c4fec363aef1bf4c8af9125d8fa53af7cb97e8babef4e40/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480", size = 1793637 },
|
639 |
+
{ url = "https://files.pythonhosted.org/packages/29/18/812222b6d18c2d13eebbb0f7cdc170a408d9ced65794fdb86147c77e1982/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068", size = 1941963 },
|
640 |
+
{ url = "https://files.pythonhosted.org/packages/0f/36/c1f3642ac3f05e6bb4aec3ffc399fa3f84895d259cf5f0ce3054b7735c29/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801", size = 1915332 },
|
641 |
+
{ url = "https://files.pythonhosted.org/packages/f7/ca/9c0854829311fb446020ebb540ee22509731abad886d2859c855dd29b904/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728", size = 1957926 },
|
642 |
+
{ url = "https://files.pythonhosted.org/packages/c0/1c/7836b67c42d0cd4441fcd9fafbf6a027ad4b79b6559f80cf11f89fd83648/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433", size = 2100342 },
|
643 |
+
{ url = "https://files.pythonhosted.org/packages/a9/f9/b6bcaf874f410564a78908739c80861a171788ef4d4f76f5009656672dfe/pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753", size = 1920344 },
|
644 |
+
]
|
645 |
+
|
646 |
+
[[package]]
|
647 |
+
name = "pygments"
|
648 |
+
version = "2.18.0"
|
649 |
+
source = { registry = "https://pypi.org/simple" }
|
650 |
+
sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 }
|
651 |
+
wheels = [
|
652 |
+
{ url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 },
|
653 |
+
]
|
654 |
+
|
655 |
+
[[package]]
|
656 |
+
name = "pyyaml"
|
657 |
+
version = "6.0.2"
|
658 |
+
source = { registry = "https://pypi.org/simple" }
|
659 |
+
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
|
660 |
+
wheels = [
|
661 |
+
{ url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 },
|
662 |
+
{ url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 },
|
663 |
+
{ url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 },
|
664 |
+
{ url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 },
|
665 |
+
{ url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 },
|
666 |
+
{ url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 },
|
667 |
+
{ url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 },
|
668 |
+
{ url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 },
|
669 |
+
{ url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 },
|
670 |
+
]
|
671 |
+
|
672 |
+
[[package]]
|
673 |
+
name = "regex"
|
674 |
+
version = "2024.9.11"
|
675 |
+
source = { registry = "https://pypi.org/simple" }
|
676 |
+
sdist = { url = "https://files.pythonhosted.org/packages/f9/38/148df33b4dbca3bd069b963acab5e0fa1a9dbd6820f8c322d0dd6faeff96/regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd", size = 399403 }
|
677 |
+
wheels = [
|
678 |
+
{ url = "https://files.pythonhosted.org/packages/63/12/497bd6599ce8a239ade68678132296aec5ee25ebea45fc8ba91aa60fceec/regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408", size = 482488 },
|
679 |
+
{ url = "https://files.pythonhosted.org/packages/c1/24/595ddb9bec2a9b151cdaf9565b0c9f3da9f0cb1dca6c158bc5175332ddf8/regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d", size = 287443 },
|
680 |
+
{ url = "https://files.pythonhosted.org/packages/69/a8/b2fb45d9715b1469383a0da7968f8cacc2f83e9fbbcd6b8713752dd980a6/regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5", size = 284561 },
|
681 |
+
{ url = "https://files.pythonhosted.org/packages/88/87/1ce4a5357216b19b7055e7d3b0efc75a6e426133bf1e7d094321df514257/regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c", size = 783177 },
|
682 |
+
{ url = "https://files.pythonhosted.org/packages/3c/65/b9f002ab32f7b68e7d1dcabb67926f3f47325b8dbc22cc50b6a043e1d07c/regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8", size = 823193 },
|
683 |
+
{ url = "https://files.pythonhosted.org/packages/22/91/8339dd3abce101204d246e31bc26cdd7ec07c9f91598472459a3a902aa41/regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35", size = 809950 },
|
684 |
+
{ url = "https://files.pythonhosted.org/packages/cb/19/556638aa11c2ec9968a1da998f07f27ec0abb9bf3c647d7c7985ca0b8eea/regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71", size = 782661 },
|
685 |
+
{ url = "https://files.pythonhosted.org/packages/d1/e9/7a5bc4c6ef8d9cd2bdd83a667888fc35320da96a4cc4da5fa084330f53db/regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8", size = 772348 },
|
686 |
+
{ url = "https://files.pythonhosted.org/packages/f1/0b/29f2105bfac3ed08e704914c38e93b07c784a6655f8a015297ee7173e95b/regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a", size = 697460 },
|
687 |
+
{ url = "https://files.pythonhosted.org/packages/71/3a/52ff61054d15a4722605f5872ad03962b319a04c1ebaebe570b8b9b7dde1/regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d", size = 769151 },
|
688 |
+
{ url = "https://files.pythonhosted.org/packages/97/07/37e460ab5ca84be8e1e197c3b526c5c86993dcc9e13cbc805c35fc2463c1/regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137", size = 777478 },
|
689 |
+
{ url = "https://files.pythonhosted.org/packages/65/7b/953075723dd5ab00780043ac2f9de667306ff9e2a85332975e9f19279174/regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6", size = 845373 },
|
690 |
+
{ url = "https://files.pythonhosted.org/packages/40/b8/3e9484c6230b8b6e8f816ab7c9a080e631124991a4ae2c27a81631777db0/regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca", size = 845369 },
|
691 |
+
{ url = "https://files.pythonhosted.org/packages/b7/99/38434984d912edbd2e1969d116257e869578f67461bd7462b894c45ed874/regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a", size = 773935 },
|
692 |
+
{ url = "https://files.pythonhosted.org/packages/ab/67/43174d2b46fa947b7b9dfe56b6c8a8a76d44223f35b1d64645a732fd1d6f/regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0", size = 261624 },
|
693 |
+
{ url = "https://files.pythonhosted.org/packages/c4/2a/4f9c47d9395b6aff24874c761d8d620c0232f97c43ef3cf668c8b355e7a7/regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623", size = 274020 },
|
694 |
+
]
|
695 |
+
|
696 |
+
[[package]]
|
697 |
+
name = "requests"
|
698 |
+
version = "2.32.3"
|
699 |
+
source = { registry = "https://pypi.org/simple" }
|
700 |
+
dependencies = [
|
701 |
+
{ name = "certifi" },
|
702 |
+
{ name = "charset-normalizer" },
|
703 |
+
{ name = "idna" },
|
704 |
+
{ name = "urllib3" },
|
705 |
+
]
|
706 |
+
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
|
707 |
+
wheels = [
|
708 |
+
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
|
709 |
+
]
|
710 |
+
|
711 |
+
[[package]]
|
712 |
+
name = "rich"
|
713 |
+
version = "13.9.3"
|
714 |
+
source = { registry = "https://pypi.org/simple" }
|
715 |
+
dependencies = [
|
716 |
+
{ name = "markdown-it-py" },
|
717 |
+
{ name = "pygments" },
|
718 |
+
{ name = "typing-extensions" },
|
719 |
+
]
|
720 |
+
sdist = { url = "https://files.pythonhosted.org/packages/d9/e9/cf9ef5245d835065e6673781dbd4b8911d352fb770d56cf0879cf11b7ee1/rich-13.9.3.tar.gz", hash = "sha256:bc1e01b899537598cf02579d2b9f4a415104d3fc439313a7a2c165d76557a08e", size = 222889 }
|
721 |
+
wheels = [
|
722 |
+
{ url = "https://files.pythonhosted.org/packages/9a/e2/10e9819cf4a20bd8ea2f5dabafc2e6bf4a78d6a0965daeb60a4b34d1c11f/rich-13.9.3-py3-none-any.whl", hash = "sha256:9836f5096eb2172c9e77df411c1b009bace4193d6a481d534fea75ebba758283", size = 242157 },
|
723 |
+
]
|
724 |
+
|
725 |
+
[[package]]
|
726 |
+
name = "safetensors"
|
727 |
+
version = "0.4.5"
|
728 |
+
source = { registry = "https://pypi.org/simple" }
|
729 |
+
sdist = { url = "https://files.pythonhosted.org/packages/cb/46/a1c56ed856c6ac3b1a8b37abe5be0cac53219367af1331e721b04d122577/safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310", size = 65702 }
|
730 |
+
wheels = [
|
731 |
+
{ url = "https://files.pythonhosted.org/packages/38/10/0798ec2c8704c2d172620d8a3725bed92cdd75516357b1a3e64d4229ea4e/safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7", size = 392312 },
|
732 |
+
{ url = "https://files.pythonhosted.org/packages/2b/9e/9648d8dbb485c40a4a0212b7537626ae440b48156cc74601ca0b7a7615e0/safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27", size = 381858 },
|
733 |
+
{ url = "https://files.pythonhosted.org/packages/8b/67/49556aeacc00df353767ed31d68b492fecf38c3f664c52692e4d92aa0032/safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761", size = 441382 },
|
734 |
+
{ url = "https://files.pythonhosted.org/packages/5d/ce/e9f4869a37bb11229e6cdb4e73a6ef23b4f360eee9dca5f7e40982779704/safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c", size = 439001 },
|
735 |
+
{ url = "https://files.pythonhosted.org/packages/a0/27/aee8cf031b89c34caf83194ec6b7f2eed28d053fff8b6da6d00c85c56035/safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56", size = 478026 },
|
736 |
+
{ url = "https://files.pythonhosted.org/packages/da/33/1d9fc4805c623636e7d460f28eec92ebd1856f7a552df8eb78398a1ef4de/safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737", size = 495545 },
|
737 |
+
{ url = "https://files.pythonhosted.org/packages/b9/df/6f766b56690709d22e83836e4067a1109a7d84ea152a6deb5692743a2805/safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5", size = 435016 },
|
738 |
+
{ url = "https://files.pythonhosted.org/packages/90/fa/7bc3f18086201b1e55a42c88b822ae197d0158e12c54cd45c887305f1b7e/safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b", size = 456273 },
|
739 |
+
{ url = "https://files.pythonhosted.org/packages/3e/59/2ae50150d37a65c1c5f01aec74dc737707b8bbecdc76307e5a1a12c8a376/safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6", size = 619669 },
|
740 |
+
{ url = "https://files.pythonhosted.org/packages/fe/43/10f0bb597aef62c9c154152e265057089f3c729bdd980e6c32c3ec2407a4/safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163", size = 605212 },
|
741 |
+
{ url = "https://files.pythonhosted.org/packages/7c/75/ede6887ea0ceaba55730988bfc7668dc147a8758f907fa6db26fbb681b8e/safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc", size = 272652 },
|
742 |
+
{ url = "https://files.pythonhosted.org/packages/ba/f0/919c72a9eef843781e652d0650f2819039943e69b69d5af2d0451a23edc3/safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1", size = 285879 },
|
743 |
+
{ url = "https://files.pythonhosted.org/packages/cf/ff/037ae4c0ee32db496669365e66079b6329906c6814722b159aa700e67208/safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410", size = 392951 },
|
744 |
+
{ url = "https://files.pythonhosted.org/packages/f1/d6/6621e16b35bf83ae099eaab07338f04991a26c9aa43879d05f19f35e149c/safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c", size = 383417 },
|
745 |
+
{ url = "https://files.pythonhosted.org/packages/ae/88/3068e1bb16f5e9f9068901de3cf7b3db270b9bfe6e7d51d4b55c1da0425d/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597", size = 442311 },
|
746 |
+
{ url = "https://files.pythonhosted.org/packages/f7/15/a2bb77ebbaa76b61ec2e9f731fe4db7f9473fd855d881957c51b3a168892/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920", size = 436678 },
|
747 |
+
{ url = "https://files.pythonhosted.org/packages/ec/79/9608c4546cdbfe3860dd7aa59e3562c9289113398b1a0bd89b68ce0a9d41/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a", size = 457316 },
|
748 |
+
{ url = "https://files.pythonhosted.org/packages/0f/23/b17b483f2857835962ad33e38014efd4911791187e177bc23b057d35bee8/safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab", size = 620565 },
|
749 |
+
{ url = "https://files.pythonhosted.org/packages/19/46/5d11dc300feaad285c2f1bd784ff3f689f5e0ab6be49aaf568f3a77019eb/safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f", size = 606660 },
|
750 |
+
]
|
751 |
+
|
752 |
+
[[package]]
|
753 |
+
name = "setuptools"
|
754 |
+
version = "75.2.0"
|
755 |
+
source = { registry = "https://pypi.org/simple" }
|
756 |
+
sdist = { url = "https://files.pythonhosted.org/packages/07/37/b31be7e4b9f13b59cde9dcaeff112d401d49e0dc5b37ed4a9fc8fb12f409/setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec", size = 1350308 }
|
757 |
+
wheels = [
|
758 |
+
{ url = "https://files.pythonhosted.org/packages/31/2d/90165d51ecd38f9a02c6832198c13a4e48652485e2ccf863ebb942c531b6/setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8", size = 1249825 },
|
759 |
+
]
|
760 |
+
|
761 |
+
[[package]]
|
762 |
+
name = "sympy"
|
763 |
+
version = "1.13.3"
|
764 |
+
source = { registry = "https://pypi.org/simple" }
|
765 |
+
dependencies = [
|
766 |
+
{ name = "mpmath" },
|
767 |
+
]
|
768 |
+
sdist = { url = "https://files.pythonhosted.org/packages/11/8a/5a7fd6284fa8caac23a26c9ddf9c30485a48169344b4bd3b0f02fef1890f/sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9", size = 7533196 }
|
769 |
+
wheels = [
|
770 |
+
{ url = "https://files.pythonhosted.org/packages/99/ff/c87e0622b1dadea79d2fb0b25ade9ed98954c9033722eb707053d310d4f3/sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73", size = 6189483 },
|
771 |
+
]
|
772 |
+
|
773 |
+
[[package]]
|
774 |
+
name = "tokenizers"
|
775 |
+
version = "0.19.1"
|
776 |
+
source = { registry = "https://pypi.org/simple" }
|
777 |
+
dependencies = [
|
778 |
+
{ name = "huggingface-hub" },
|
779 |
+
]
|
780 |
+
sdist = { url = "https://files.pythonhosted.org/packages/48/04/2071c150f374aab6d5e92aaec38d0f3c368d227dd9e0469a1f0966ac68d1/tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3", size = 321039 }
|
781 |
+
wheels = [
|
782 |
+
{ url = "https://files.pythonhosted.org/packages/c1/60/91cac8d496b304ec5a22f07606893cad35ea8e1a8406dc8909e365f97a80/tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97", size = 2533301 },
|
783 |
+
{ url = "https://files.pythonhosted.org/packages/4c/12/9cb68762ff5fee1efd51aefe2f62cb225f26f060a68a3779e1060bbc7a59/tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77", size = 2440223 },
|
784 |
+
{ url = "https://files.pythonhosted.org/packages/e4/03/b2020e6a78fb994cff1ec962adc157c23109172a46b4fe451d6d0dd33fdb/tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4", size = 3683779 },
|
785 |
+
{ url = "https://files.pythonhosted.org/packages/50/4e/2e5549a26dc6f9e434f83bebf16c2d7dc9dc3477cc0ec8b23ede4d465b90/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642", size = 3569431 },
|
786 |
+
{ url = "https://files.pythonhosted.org/packages/75/79/158626bd794e75551e0c6bb93f1cd3c9ba08ba14b181b98f09e95994f609/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46", size = 3424739 },
|
787 |
+
{ url = "https://files.pythonhosted.org/packages/65/8e/5f4316976c26009f1ae0b6543f3d97af29afa5ba5dc145251e6a07314618/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1", size = 3965791 },
|
788 |
+
{ url = "https://files.pythonhosted.org/packages/6a/e1/5dbac9618709972434eea072670cd69fba1aa988e6200f16057722b4bf96/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe", size = 4049879 },
|
789 |
+
{ url = "https://files.pythonhosted.org/packages/40/4f/eb78de4af3b17b589f43a369cbf0c3a7173f25c3d2cd93068852c07689aa/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e", size = 3607049 },
|
790 |
+
{ url = "https://files.pythonhosted.org/packages/f5/f8/141dcb0f88e9452af8d20d14dd53aab5937222a2bb4f2c04bfed6829263c/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98", size = 9634084 },
|
791 |
+
{ url = "https://files.pythonhosted.org/packages/2e/be/debb7caa3f88ed54015170db16e07aa3a5fea2d3983d0dde92f98d888dc8/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3", size = 9949480 },
|
792 |
+
{ url = "https://files.pythonhosted.org/packages/7a/e7/26bedf5d270d293d572a90bd66b0b030012aedb95d8ee87e8bcd446b76fb/tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837", size = 2041462 },
|
793 |
+
{ url = "https://files.pythonhosted.org/packages/f4/85/d999b9a05fd101d48f1a365d68be0b109277bb25c89fb37a389d669f9185/tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403", size = 2220036 },
|
794 |
+
{ url = "https://files.pythonhosted.org/packages/cf/7b/38fb7207cde3d1dc5272411cd18178e6437cdc1ef08cac5d0e8cfd57f38c/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334", size = 2532668 },
|
795 |
+
{ url = "https://files.pythonhosted.org/packages/1d/0d/2c452fe17fc17f0cdb713acb811eebb1f714b8c21d497c4672af4f491229/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd", size = 2438321 },
|
796 |
+
{ url = "https://files.pythonhosted.org/packages/19/e0/f9e915d028b45798723eab59c253da28040aa66b9f31dcb7cfc3be88fa37/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594", size = 3682304 },
|
797 |
+
{ url = "https://files.pythonhosted.org/packages/ce/2b/db8a94608c392752681c2ca312487b7cd5bcc4f77e24a90daa4916138271/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda", size = 3566208 },
|
798 |
+
{ url = "https://files.pythonhosted.org/packages/d8/58/2e998462677c4c0eb5123ce386bcb488a155664d273d0283122866515f09/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022", size = 3605791 },
|
799 |
+
{ url = "https://files.pythonhosted.org/packages/83/ac/26bc2e2bb2a054dc2e51699628936f5474e093b68da6ccdde04b2fc39ab8/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e", size = 9632867 },
|
800 |
+
{ url = "https://files.pythonhosted.org/packages/45/b6/36c1bb106bbe96012c9367df89ed01599cada036c0b96d38fbbdbeb75c9f/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75", size = 9945103 },
|
801 |
+
]
|
802 |
+
|
803 |
+
[[package]]
|
804 |
+
name = "torch"
|
805 |
+
version = "2.2.2"
|
806 |
+
source = { registry = "https://pypi.org/simple" }
|
807 |
+
dependencies = [
|
808 |
+
{ name = "filelock" },
|
809 |
+
{ name = "fsspec" },
|
810 |
+
{ name = "jinja2" },
|
811 |
+
{ name = "networkx" },
|
812 |
+
{ name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
813 |
+
{ name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
814 |
+
{ name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
815 |
+
{ name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
816 |
+
{ name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
817 |
+
{ name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
818 |
+
{ name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
819 |
+
{ name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
820 |
+
{ name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
821 |
+
{ name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
822 |
+
{ name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
823 |
+
{ name = "sympy" },
|
824 |
+
{ name = "triton", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" },
|
825 |
+
{ name = "typing-extensions" },
|
826 |
+
]
|
827 |
+
wheels = [
|
828 |
+
{ url = "https://files.pythonhosted.org/packages/33/b3/1fcc3bccfddadfd6845dcbfe26eb4b099f1dfea5aa0e5cfb92b3c98dba5b/torch-2.2.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:bc889d311a855dd2dfd164daf8cc903a6b7273a747189cebafdd89106e4ad585", size = 755526581 },
|
829 |
+
{ url = "https://files.pythonhosted.org/packages/c3/7c/aeb0c5789a3f10cf909640530cd75b314959b9d9914a4996ed2c7bf8779d/torch-2.2.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:15dffa4cc3261fa73d02f0ed25f5fa49ecc9e12bf1ae0a4c1e7a88bbfaad9030", size = 86623646 },
|
830 |
+
{ url = "https://files.pythonhosted.org/packages/3a/81/684d99e536b20e869a7c1222cf1dd233311fb05d3628e9570992bfb65760/torch-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:11e8fe261233aeabd67696d6b993eeb0896faa175c6b41b9a6c9f0334bdad1c5", size = 198579616 },
|
831 |
+
{ url = "https://files.pythonhosted.org/packages/3b/55/7192974ab13e5e5577f45d14ce70d42f5a9a686b4f57bbe8c9ab45c4a61a/torch-2.2.2-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:b2e2200b245bd9f263a0d41b6a2dab69c4aca635a01b30cca78064b0ef5b109e", size = 150788930 },
|
832 |
+
{ url = "https://files.pythonhosted.org/packages/33/6b/21496316c9b8242749ee2a9064406271efdf979e91d440e8a3806b5e84bf/torch-2.2.2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:877b3e6593b5e00b35bbe111b7057464e76a7dd186a287280d941b564b0563c2", size = 59707286 },
|
833 |
+
]
|
834 |
+
|
835 |
+
[[package]]
|
836 |
+
name = "torchvision"
|
837 |
+
version = "0.17.2"
|
838 |
+
source = { registry = "https://pypi.org/simple" }
|
839 |
+
dependencies = [
|
840 |
+
{ name = "numpy" },
|
841 |
+
{ name = "pillow" },
|
842 |
+
{ name = "torch" },
|
843 |
+
]
|
844 |
+
wheels = [
|
845 |
+
{ url = "https://files.pythonhosted.org/packages/a2/70/c781e0433ab7b8f6f693580e5065ae17c3785b2836200311765f99535ef8/torchvision-0.17.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:1f2910fe3c21ad6875b2720d46fad835b2e4b336e9553d31ca364d24c90b1d4f", size = 1666426 },
|
846 |
+
{ url = "https://files.pythonhosted.org/packages/64/3d/a0385fd301e6e2eefb1d9cf2ac97d0c33ebf4a764aba0f066e6e16324916/torchvision-0.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ecc1c503fa8a54fbab777e06a7c228032b8ab78efebf35b28bc8f22f544f51f1", size = 1571154 },
|
847 |
+
{ url = "https://files.pythonhosted.org/packages/e0/2f/d13cb0ffc4808f85b880ef66ab6cfef10bd35e5c151dae68ea18cf6bf636/torchvision-0.17.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:f400145fc108833e7c2fc28486a04989ca742146d7a2a2cc48878ebbb40cdbbd", size = 6915896 },
|
848 |
+
{ url = "https://files.pythonhosted.org/packages/fb/41/c8a440ebb1d4958baf02f08f6df56a3a30bea3fdcdc99076cb7da023babe/torchvision-0.17.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e9e4bed404af33dfc92eecc2b513d21ddc4c242a7fd8708b3b09d3a26aa6f444", size = 14008514 },
|
849 |
+
{ url = "https://files.pythonhosted.org/packages/0a/b8/027b3c36e61a26d14d4e9a8413d4a09e5fd8d3e01e3efce78447ca1dc3dd/torchvision-0.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:ba2e62f233eab3d42b648c122a3a29c47cc108ca314dfd5cbb59cd3a143fd623", size = 1165527 },
|
850 |
+
]
|
851 |
+
|
852 |
+
[[package]]
|
853 |
+
name = "tqdm"
|
854 |
+
version = "4.66.5"
|
855 |
+
source = { registry = "https://pypi.org/simple" }
|
856 |
+
dependencies = [
|
857 |
+
{ name = "colorama", marker = "platform_system == 'Windows'" },
|
858 |
+
]
|
859 |
+
sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 }
|
860 |
+
wheels = [
|
861 |
+
{ url = "https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd", size = 78351 },
|
862 |
+
]
|
863 |
+
|
864 |
+
[[package]]
|
865 |
+
name = "transformers"
|
866 |
+
version = "4.41.2"
|
867 |
+
source = { registry = "https://pypi.org/simple" }
|
868 |
+
dependencies = [
|
869 |
+
{ name = "filelock" },
|
870 |
+
{ name = "huggingface-hub" },
|
871 |
+
{ name = "numpy" },
|
872 |
+
{ name = "packaging" },
|
873 |
+
{ name = "pyyaml" },
|
874 |
+
{ name = "regex" },
|
875 |
+
{ name = "requests" },
|
876 |
+
{ name = "safetensors" },
|
877 |
+
{ name = "tokenizers" },
|
878 |
+
{ name = "tqdm" },
|
879 |
+
]
|
880 |
+
sdist = { url = "https://files.pythonhosted.org/packages/50/c9/b8acdf584f19558e29f46d36b5ed80954c2d3831811f129a5a6e84c4537b/transformers-4.41.2.tar.gz", hash = "sha256:80a4db216533d573e9cc7388646c31ed9480918feb7c55eb211249cb23567f87", size = 7841904 }
|
881 |
+
wheels = [
|
882 |
+
{ url = "https://files.pythonhosted.org/packages/d9/b7/98f821d70102e2d38483bbb7013a689d2d646daa4495377bc910374ad727/transformers-4.41.2-py3-none-any.whl", hash = "sha256:05555d20e43f808de1ef211ab64803cdb513170cef70d29a888b589caebefc67", size = 9092643 },
|
883 |
+
]
|
884 |
+
|
885 |
+
[[package]]
|
886 |
+
name = "triton"
|
887 |
+
version = "2.2.0"
|
888 |
+
source = { registry = "https://pypi.org/simple" }
|
889 |
+
dependencies = [
|
890 |
+
{ name = "filelock" },
|
891 |
+
]
|
892 |
+
wheels = [
|
893 |
+
{ url = "https://files.pythonhosted.org/packages/95/05/ed974ce87fe8c8843855daa2136b3409ee1c126707ab54a8b72815c08b49/triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5", size = 167900779 },
|
894 |
+
]
|
895 |
+
|
896 |
+
[[package]]
|
897 |
+
name = "typing-extensions"
|
898 |
+
version = "4.12.2"
|
899 |
+
source = { registry = "https://pypi.org/simple" }
|
900 |
+
sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
|
901 |
+
wheels = [
|
902 |
+
{ url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
|
903 |
+
]
|
904 |
+
|
905 |
+
[[package]]
|
906 |
+
name = "urllib3"
|
907 |
+
version = "2.2.3"
|
908 |
+
source = { registry = "https://pypi.org/simple" }
|
909 |
+
sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 }
|
910 |
+
wheels = [
|
911 |
+
{ url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 },
|
912 |
+
]
|
913 |
+
|
914 |
+
[[package]]
|
915 |
+
name = "xformers"
|
916 |
+
version = "0.0.25.post1"
|
917 |
+
source = { registry = "https://pypi.org/simple" }
|
918 |
+
dependencies = [
|
919 |
+
{ name = "numpy" },
|
920 |
+
{ name = "torch" },
|
921 |
+
]
|
922 |
+
sdist = { url = "https://files.pythonhosted.org/packages/ca/66/f5977922658ff2aea8b3222901fa0b9922778988c6d3a428cfd083892191/xformers-0.0.25.post1.tar.gz", hash = "sha256:397430bd0162fd5a75eb8bc50b0ba242200881e48fd6404a19376f853f8c0444", size = 4083274 }
|
923 |
+
wheels = [
|
924 |
+
{ url = "https://files.pythonhosted.org/packages/5f/9b/f781a50d965717a2a2ea2c8d15e0f30deec8f9751a9874e850ba9ab0fadc/xformers-0.0.25.post1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:cdfe9560848fa5ba75fc04d3da8803658e35997adc6075ee6bbf6d67c1f0fa5e", size = 222517307 },
|
925 |
+
{ url = "https://files.pythonhosted.org/packages/25/19/301789926809dc167ac104c29e26703369b3d47e6c1a2861db9efccbdc10/xformers-0.0.25.post1-cp310-cp310-win_amd64.whl", hash = "sha256:ddc22273f2ff06b886d9e86f17997e4f1f3074fdeb5d46bcdf50b704430df528", size = 208697268 },
|
926 |
+
]
|
927 |
+
|
928 |
+
[[package]]
|
929 |
+
name = "zipp"
|
930 |
+
version = "3.20.2"
|
931 |
+
source = { registry = "https://pypi.org/simple" }
|
932 |
+
sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 }
|
933 |
+
wheels = [
|
934 |
+
{ url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 },
|
935 |
+
]
|