Spaces:
Running
on
Zero
Running
on
Zero
disable float16
Browse files- edit_app.py +3 -1
edit_app.py
CHANGED
@@ -85,10 +85,12 @@ If you're not getting what you want, there may be a few reasons:
|
|
85 |
def main():
|
86 |
model_id = "MudeHui/ip2p-warp-gpt4v"
|
87 |
if torch.cuda.is_available():
|
88 |
-
pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.
|
89 |
pipe = pipe.to('cuda')
|
|
|
90 |
else:
|
91 |
pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float, safety_checker=None)
|
|
|
92 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
93 |
|
94 |
image_options = {path.split("/")[-1].split(".")[0]: path for path in sorted(glob("imgs/*png"))}
|
|
|
85 |
def main():
|
86 |
model_id = "MudeHui/ip2p-warp-gpt4v"
|
87 |
if torch.cuda.is_available():
|
88 |
+
pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float, safety_checker=None)
|
89 |
pipe = pipe.to('cuda')
|
90 |
+
print("Running on GPU")
|
91 |
else:
|
92 |
pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float, safety_checker=None)
|
93 |
+
print("Running on CPU")
|
94 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
95 |
|
96 |
image_options = {path.split("/")[-1].split(".")[0]: path for path in sorted(glob("imgs/*png"))}
|