Spaces:
Running
on
Zero
Running
on
Zero
Fix HF login
Browse files
app.py
CHANGED
@@ -9,9 +9,10 @@ import spaces
|
|
9 |
import torch
|
10 |
from diffusers import StableDiffusion3Pipeline, DPMSolverMultistepScheduler, AutoencoderKL, StableDiffusion3Img2ImgPipeline
|
11 |
from transformers import T5EncoderModel, BitsAndBytesConfig
|
12 |
-
from huggingface_hub import
|
13 |
|
14 |
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
|
|
15 |
|
16 |
DESCRIPTION = """# Stable Diffusion 3"""
|
17 |
if not torch.cuda.is_available():
|
@@ -30,7 +31,6 @@ def load_pipeline():
|
|
30 |
|
31 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
32 |
model_id,
|
33 |
-
huggingface_token=huggingface_token,
|
34 |
#device_map="balanced",
|
35 |
torch_dtype=torch.float16
|
36 |
)
|
|
|
9 |
import torch
|
10 |
from diffusers import StableDiffusion3Pipeline, DPMSolverMultistepScheduler, AutoencoderKL, StableDiffusion3Img2ImgPipeline
|
11 |
from transformers import T5EncoderModel, BitsAndBytesConfig
|
12 |
+
from huggingface_hub import login
|
13 |
|
14 |
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
15 |
+
login(token=huggingface_token)
|
16 |
|
17 |
DESCRIPTION = """# Stable Diffusion 3"""
|
18 |
if not torch.cuda.is_available():
|
|
|
31 |
|
32 |
pipe = StableDiffusion3Pipeline.from_pretrained(
|
33 |
model_id,
|
|
|
34 |
#device_map="balanced",
|
35 |
torch_dtype=torch.float16
|
36 |
)
|