Spaces:
Running
on
Zero
Running
on
Zero
bugfix
Browse files
app.py
CHANGED
@@ -46,7 +46,7 @@ pipe = FluxControlNetInpaintPipeline.from_pretrained(base_model, controlnet=cont
|
|
46 |
torch.backends.cuda.matmul.allow_tf32 = True
|
47 |
pipe.vae.enable_tiling()
|
48 |
pipe.vae.enable_slicing()
|
49 |
-
pipe.enable_model_cpu_offload() # for saving memory
|
50 |
|
51 |
control_mode_ids = {
|
52 |
"scribble_hed": 0,
|
@@ -184,16 +184,15 @@ def run_flux(
|
|
184 |
prompt=prompt,
|
185 |
image=image,
|
186 |
mask_image=mask,
|
187 |
-
control_image=control_image,
|
188 |
-
control_mode=control_mode,
|
189 |
-
controlnet_conditioning_scale=[0.
|
190 |
width=width,
|
191 |
height=height,
|
192 |
strength=strength_slider,
|
193 |
generator=generator,
|
194 |
num_inference_steps=num_inference_steps_slider,
|
195 |
# max_sequence_length=256,
|
196 |
-
cross_attention_kwargs={"scale":0.5},
|
197 |
joint_attention_kwargs={"scale": lora_scale}
|
198 |
).images[0]
|
199 |
progress(99, "Generate image success!")
|
|
|
46 |
torch.backends.cuda.matmul.allow_tf32 = True
|
47 |
pipe.vae.enable_tiling()
|
48 |
pipe.vae.enable_slicing()
|
49 |
+
# pipe.enable_model_cpu_offload() # for saving memory
|
50 |
|
51 |
control_mode_ids = {
|
52 |
"scribble_hed": 0,
|
|
|
184 |
prompt=prompt,
|
185 |
image=image,
|
186 |
mask_image=mask,
|
187 |
+
control_image=[control_image],
|
188 |
+
control_mode=[control_mode],
|
189 |
+
controlnet_conditioning_scale=[0.55],
|
190 |
width=width,
|
191 |
height=height,
|
192 |
strength=strength_slider,
|
193 |
generator=generator,
|
194 |
num_inference_steps=num_inference_steps_slider,
|
195 |
# max_sequence_length=256,
|
|
|
196 |
joint_attention_kwargs={"scale": lora_scale}
|
197 |
).images[0]
|
198 |
progress(99, "Generate image success!")
|