Kohaku-Blueleaf
commited on
Commit
•
ff3b374
1
Parent(s):
17f136e
remove xformers
Browse files- diff.py +0 -1
- requirements.txt +0 -1
diff.py
CHANGED
@@ -38,7 +38,6 @@ def load_model(model_id="KBlueLeaf/Kohaku-XL-Zeta", device="cuda"):
|
|
38 |
model_id, torch_dtype=torch.float16
|
39 |
).to(device)
|
40 |
unet: UNet2DConditionModel = pipe.k_diffusion_model.inner_model.model
|
41 |
-
unet.enable_xformers_memory_efficient_attention()
|
42 |
pipe.scheduler.set_timesteps = partial(
|
43 |
set_timesteps_polyexponential, pipe.scheduler, pipe.scheduler.sigmas
|
44 |
)
|
|
|
38 |
model_id, torch_dtype=torch.float16
|
39 |
).to(device)
|
40 |
unet: UNet2DConditionModel = pipe.k_diffusion_model.inner_model.model
|
|
|
41 |
pipe.scheduler.set_timesteps = partial(
|
42 |
set_timesteps_polyexponential, pipe.scheduler, pipe.scheduler.sigmas
|
43 |
)
|
requirements.txt
CHANGED
@@ -3,4 +3,3 @@ spaces
|
|
3 |
diffusers
|
4 |
k_diffusion
|
5 |
sentencepiece
|
6 |
-
xformers
|
|
|
3 |
diffusers
|
4 |
k_diffusion
|
5 |
sentencepiece
|
|