Spaces:
Running
on
Zero
Running
on
Zero
new update
Browse files
app.py
CHANGED
@@ -40,7 +40,7 @@ def pil_to_binary_mask(pil_image, threshold=0):
|
|
40 |
return output_mask
|
41 |
|
42 |
|
43 |
-
base_path = '
|
44 |
example_path = os.path.join(os.path.dirname(__file__), 'example')
|
45 |
|
46 |
unet = UNet2DConditionModel.from_pretrained(
|
@@ -121,7 +121,7 @@ pipe = TryonPipeline.from_pretrained(
|
|
121 |
)
|
122 |
pipe.unet_encoder = UNet_Encoder
|
123 |
|
124 |
-
@spaces.GPU
|
125 |
def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
|
126 |
device = "cuda"
|
127 |
|
@@ -260,8 +260,8 @@ for ex_human in human_list_path:
|
|
260 |
|
261 |
image_blocks = gr.Blocks().queue()
|
262 |
with image_blocks as demo:
|
263 |
-
gr.Markdown("##
|
264 |
-
gr.Markdown("Virtual Try-on with your image and garment image.
|
265 |
with gr.Row():
|
266 |
with gr.Column():
|
267 |
imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
|
|
|
40 |
return output_mask
|
41 |
|
42 |
|
43 |
+
base_path = 'Roopansh/Ailusion-VTON-DEMO-v1.1'
|
44 |
example_path = os.path.join(os.path.dirname(__file__), 'example')
|
45 |
|
46 |
unet = UNet2DConditionModel.from_pretrained(
|
|
|
121 |
)
|
122 |
pipe.unet_encoder = UNet_Encoder
|
123 |
|
124 |
+
@spaces.GPU(duration=120)
|
125 |
def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
|
126 |
device = "cuda"
|
127 |
|
|
|
260 |
|
261 |
image_blocks = gr.Blocks().queue()
|
262 |
with image_blocks as demo:
|
263 |
+
gr.Markdown("## AILUSION VTON DEMO πππ")
|
264 |
+
gr.Markdown("Virtual Try-on with your image and garment image.")
|
265 |
with gr.Row():
|
266 |
with gr.Column():
|
267 |
imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
|