fffiloni commited on
Commit
afb1563
·
verified ·
1 Parent(s): eb0fbff

increase ZeroGPU needed time for inference

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -133,7 +133,7 @@ def run_inference(temp_dir, removed_bg_path):
133
  except subprocess.CalledProcessError as e:
134
  return f"Error during inference: {str(e)}"
135
 
136
- @spaces.GPU(duration=90)
137
  def process_image(input_pil, remove_bg, progress=gr.Progress(track_tqdm=True)):
138
 
139
  torch.cuda.empty_cache()
 
133
  except subprocess.CalledProcessError as e:
134
  return f"Error during inference: {str(e)}"
135
 
136
+ @spaces.GPU(duration=140)
137
  def process_image(input_pil, remove_bg, progress=gr.Progress(track_tqdm=True)):
138
 
139
  torch.cuda.empty_cache()