Spaces:
Running
on
A10G
Running
on
A10G
Linoy Tsaban
commited on
Commit
•
74b2d2b
1
Parent(s):
c0ff22d
Update app.py
Browse files
app.py
CHANGED
@@ -23,7 +23,8 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
23 |
|
24 |
pipe = SemanticStableDiffusionImg2ImgPipeline_DPMSolver.from_pretrained(sd_model_id,torch_dtype=torch.float16).to(device)
|
25 |
# pipe.scheduler = DDIMScheduler.from_config(sd_model_id, subfolder = "scheduler")
|
26 |
-
pipe.scheduler = DPMSolverMultistepSchedulerInject(
|
|
|
27 |
|
28 |
blip_processor = AutoProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
|
29 |
blip_model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base",torch_dtype=torch.float16).to(device)
|
|
|
23 |
|
24 |
pipe = SemanticStableDiffusionImg2ImgPipeline_DPMSolver.from_pretrained(sd_model_id,torch_dtype=torch.float16).to(device)
|
25 |
# pipe.scheduler = DDIMScheduler.from_config(sd_model_id, subfolder = "scheduler")
|
26 |
+
pipe.scheduler = DPMSolverMultistepSchedulerInject.from_pretrained(sd_model_id, subfolder="scheduler"
|
27 |
+
, algorithm_type="sde-dpmsolver++", solver_order=2)
|
28 |
|
29 |
blip_processor = AutoProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
|
30 |
blip_model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base",torch_dtype=torch.float16).to(device)
|