Roopansh commited on
Commit
fe331b9
β€’
1 Parent(s): f4e0db4

new update

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -40,7 +40,7 @@ def pil_to_binary_mask(pil_image, threshold=0):
40
  return output_mask
41
 
42
 
43
- base_path = 'yisol/IDM-VTON'
44
  example_path = os.path.join(os.path.dirname(__file__), 'example')
45
 
46
  unet = UNet2DConditionModel.from_pretrained(
@@ -121,7 +121,7 @@ pipe = TryonPipeline.from_pretrained(
121
  )
122
  pipe.unet_encoder = UNet_Encoder
123
 
124
- @spaces.GPU
125
  def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
126
  device = "cuda"
127
 
@@ -260,8 +260,8 @@ for ex_human in human_list_path:
260
 
261
  image_blocks = gr.Blocks().queue()
262
  with image_blocks as demo:
263
- gr.Markdown("## IDM-VTON πŸ‘•πŸ‘”πŸ‘š")
264
- gr.Markdown("Virtual Try-on with your image and garment image. Check out the [source codes](https://github.com/yisol/IDM-VTON) and the [model](https://huggingface.co/yisol/IDM-VTON)")
265
  with gr.Row():
266
  with gr.Column():
267
  imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)
 
40
  return output_mask
41
 
42
 
43
+ base_path = 'Roopansh/Ailusion-VTON-DEMO-v1.1'
44
  example_path = os.path.join(os.path.dirname(__file__), 'example')
45
 
46
  unet = UNet2DConditionModel.from_pretrained(
 
121
  )
122
  pipe.unet_encoder = UNet_Encoder
123
 
124
+ @spaces.GPU(duration=120)
125
  def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
126
  device = "cuda"
127
 
 
260
 
261
  image_blocks = gr.Blocks().queue()
262
  with image_blocks as demo:
263
+ gr.Markdown("## AILUSION VTON DEMO πŸ‘•πŸ‘”πŸ‘š")
264
+ gr.Markdown("Virtual Try-on with your image and garment image.")
265
  with gr.Row():
266
  with gr.Column():
267
  imgs = gr.ImageEditor(sources='upload', type="pil", label='Human. Mask with pen or use auto-masking', interactive=True)