Spaces:
Running
on
Zero
Running
on
Zero
arthur-qiu
commited on
Commit
•
fd2f5c4
1
Parent(s):
bcfdc4b
init
Browse files
app.py
CHANGED
@@ -9,7 +9,10 @@ from pipeline_freescale import StableDiffusionXLPipeline
|
|
9 |
from free_lunch_utils import register_free_upblock2d, register_free_crossattn_upblock2d
|
10 |
|
11 |
@spaces.GPU(duration=120)
|
12 |
-
def infer_gpu_part(pipe, generator, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale):
|
|
|
|
|
|
|
13 |
pipe = pipe.to("cuda")
|
14 |
generator = generator.to("cuda")
|
15 |
resul = pipe(prompt, negative_prompt=negative_prompt, generator=generator,
|
@@ -40,14 +43,11 @@ def infer(prompt, output_size, ddim_steps, guidance_scale, cosine_scale, seed, o
|
|
40 |
|
41 |
model_ckpt = "stabilityai/stable-diffusion-xl-base-1.0"
|
42 |
pipe = StableDiffusionXLPipeline.from_pretrained(model_ckpt, torch_dtype=torch.float16)
|
43 |
-
|
44 |
-
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
45 |
-
register_free_crossattn_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
46 |
-
|
47 |
generator = torch.Generator()
|
48 |
generator = generator.manual_seed(seed)
|
49 |
|
50 |
-
result = infer_gpu_part(pipe, generator, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale)
|
51 |
|
52 |
image = result.images[0]
|
53 |
save_path = 'output.png'
|
@@ -204,4 +204,5 @@ with gr.Blocks(css=css) as demo:
|
|
204 |
outputs=[image_result],
|
205 |
api_name="freescalehf")
|
206 |
|
207 |
-
|
|
|
|
9 |
from free_lunch_utils import register_free_upblock2d, register_free_crossattn_upblock2d
|
10 |
|
11 |
@spaces.GPU(duration=120)
|
12 |
+
def infer_gpu_part(pipe, generator, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu):
|
13 |
+
if not disable_freeu:
|
14 |
+
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
15 |
+
register_free_crossattn_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
16 |
pipe = pipe.to("cuda")
|
17 |
generator = generator.to("cuda")
|
18 |
resul = pipe(prompt, negative_prompt=negative_prompt, generator=generator,
|
|
|
43 |
|
44 |
model_ckpt = "stabilityai/stable-diffusion-xl-base-1.0"
|
45 |
pipe = StableDiffusionXLPipeline.from_pretrained(model_ckpt, torch_dtype=torch.float16)
|
46 |
+
|
|
|
|
|
|
|
47 |
generator = torch.Generator()
|
48 |
generator = generator.manual_seed(seed)
|
49 |
|
50 |
+
result = infer_gpu_part(pipe, generator, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu)
|
51 |
|
52 |
image = result.images[0]
|
53 |
save_path = 'output.png'
|
|
|
204 |
outputs=[image_result],
|
205 |
api_name="freescalehf")
|
206 |
|
207 |
+
if __name__ == "__main__":
|
208 |
+
demo.queue(max_size=8).launch(show_api=True)
|