Charbel Malo commited on
Commit
9641cdc
1 Parent(s): 5ee28a7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -88,13 +88,15 @@ if USE_CUDA:
88
  if "CUDAExecutionProvider" in available_providers:
89
  print("\n********** Running on CUDA **********\n")
90
  PROVIDER = ["CUDAExecutionProvider", "CPUExecutionProvider"]
 
91
  else:
92
  USE_CUDA = False
93
  print("\n********** CUDA unavailable running on CPU **********\n")
 
94
  else:
95
  USE_CUDA = False
96
  print("\n********** Running on CPU **********\n")
97
-
98
  device = "cuda" if USE_CUDA else "cpu"
99
  EMPTY_CACHE = lambda: torch.cuda.empty_cache() if device == "cuda" else None
100
 
@@ -136,7 +138,7 @@ load_face_swapper_model()
136
  ## ------------------------------ MAIN PROCESS ------------------------------
137
 
138
 
139
- @spaces.GPU(duration=299, enable_queue=True)
140
  def process(
141
  input_type,
142
  image_path,
 
88
  if "CUDAExecutionProvider" in available_providers:
89
  print("\n********** Running on CUDA **********\n")
90
  PROVIDER = ["CUDAExecutionProvider", "CPUExecutionProvider"]
91
+ cv2.setNumThreads(32)
92
  else:
93
  USE_CUDA = False
94
  print("\n********** CUDA unavailable running on CPU **********\n")
95
+ cv2.setNumThreads(1)
96
  else:
97
  USE_CUDA = False
98
  print("\n********** Running on CPU **********\n")
99
+ cv2.setNumThreads(1)
100
  device = "cuda" if USE_CUDA else "cpu"
101
  EMPTY_CACHE = lambda: torch.cuda.empty_cache() if device == "cuda" else None
102
 
 
138
  ## ------------------------------ MAIN PROCESS ------------------------------
139
 
140
 
141
+ @spaces.GPU(duration=200)
142
  def process(
143
  input_type,
144
  image_path,