clementchadebec
commited on
Commit
•
7ea18b4
1
Parent(s):
3d6573d
Update app.py
Browse files
app.py
CHANGED
@@ -7,12 +7,24 @@ import torch
|
|
7 |
from diffusers import StableDiffusion3Pipeline, SD3Transformer2DModel, FlashFlowMatchEulerDiscreteScheduler
|
8 |
from peft import PeftModel
|
9 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
12 |
IS_SPACE = os.environ.get("SPACE_ID", None) is not None
|
13 |
|
14 |
transformer = SD3Transformer2DModel.from_pretrained(
|
15 |
-
|
16 |
subfolder="transformer",
|
17 |
torch_dtype=torch.float16,
|
18 |
)
|
@@ -22,7 +34,7 @@ transformer = PeftModel.from_pretrained(transformer, "jasperai/flash-sd3")
|
|
22 |
if torch.cuda.is_available():
|
23 |
torch.cuda.max_memory_allocated(device=device)
|
24 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
25 |
-
|
26 |
transformer=transformer,
|
27 |
torch_dtype=torch.float16,
|
28 |
text_encoder_3=None,
|
@@ -32,7 +44,7 @@ if torch.cuda.is_available():
|
|
32 |
pipe = pipe.to(device)
|
33 |
else:
|
34 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
35 |
-
|
36 |
transformer=transformer,
|
37 |
torch_dtype=torch.float16,
|
38 |
text_encoder_3=None,
|
@@ -42,7 +54,7 @@ else:
|
|
42 |
|
43 |
|
44 |
pipe.scheduler = FlashFlowMatchEulerDiscreteScheduler.from_pretrained(
|
45 |
-
|
46 |
subfolder="scheduler",
|
47 |
)
|
48 |
|
|
|
7 |
from diffusers import StableDiffusion3Pipeline, SD3Transformer2DModel, FlashFlowMatchEulerDiscreteScheduler
|
8 |
from peft import PeftModel
|
9 |
import os
|
10 |
+
from huggingface_hub import snapshot_download
|
11 |
+
|
12 |
+
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
13 |
+
|
14 |
+
model_path = snapshot_download(
|
15 |
+
repo_id="stabilityai/stable-diffusion-3-medium",
|
16 |
+
revision="refs/pr/26",
|
17 |
+
repo_type="model",
|
18 |
+
ignore_patterns=["*.md", "*..gitattributes"],
|
19 |
+
local_dir="stable-diffusion-3-medium",
|
20 |
+
token=huggingface_token, # type a new token-id.
|
21 |
+
)
|
22 |
|
23 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
24 |
IS_SPACE = os.environ.get("SPACE_ID", None) is not None
|
25 |
|
26 |
transformer = SD3Transformer2DModel.from_pretrained(
|
27 |
+
model_path,
|
28 |
subfolder="transformer",
|
29 |
torch_dtype=torch.float16,
|
30 |
)
|
|
|
34 |
if torch.cuda.is_available():
|
35 |
torch.cuda.max_memory_allocated(device=device)
|
36 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
37 |
+
model_path,
|
38 |
transformer=transformer,
|
39 |
torch_dtype=torch.float16,
|
40 |
text_encoder_3=None,
|
|
|
44 |
pipe = pipe.to(device)
|
45 |
else:
|
46 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
47 |
+
model_path,
|
48 |
transformer=transformer,
|
49 |
torch_dtype=torch.float16,
|
50 |
text_encoder_3=None,
|
|
|
54 |
|
55 |
|
56 |
pipe.scheduler = FlashFlowMatchEulerDiscreteScheduler.from_pretrained(
|
57 |
+
model_path,
|
58 |
subfolder="scheduler",
|
59 |
)
|
60 |
|