Spaces:
Running
on
Zero
Running
on
Zero
prithivMLmods
commited on
Commit
•
1f0bbab
1
Parent(s):
46a84c2
Update app.py
Browse files
app.py
CHANGED
@@ -550,7 +550,7 @@ def update_selection(evt: gr.SelectData, width, height):
|
|
550 |
height,
|
551 |
)
|
552 |
|
553 |
-
@spaces.GPU(duration=
|
554 |
def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
|
555 |
pipe.to("cuda")
|
556 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
@@ -587,7 +587,7 @@ def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps
|
|
587 |
).images[0]
|
588 |
return final_image
|
589 |
|
590 |
-
@spaces.GPU(duration=
|
591 |
def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
|
592 |
if selected_index is None:
|
593 |
raise gr.Error("You must select a LoRA before proceeding.")
|
|
|
550 |
height,
|
551 |
)
|
552 |
|
553 |
+
@spaces.GPU(duration=100)
|
554 |
def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
|
555 |
pipe.to("cuda")
|
556 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
|
|
587 |
).images[0]
|
588 |
return final_image
|
589 |
|
590 |
+
@spaces.GPU(duration=100)
|
591 |
def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
|
592 |
if selected_index is None:
|
593 |
raise gr.Error("You must select a LoRA before proceeding.")
|