ohayonguy
commited on
Commit
•
fc1702b
1
Parent(s):
0d4c368
fixed launch
Browse files
app.py
CHANGED
@@ -85,7 +85,7 @@ def enhance_face(img, face_helper, has_aligned, only_center_face=False, paste_ba
|
|
85 |
|
86 |
@torch.inference_mode()
|
87 |
@spaces.GPU()
|
88 |
-
def inference(img, aligned, scale):
|
89 |
if scale > 4:
|
90 |
scale = 4 # avoid too large scale value
|
91 |
try:
|
@@ -133,7 +133,7 @@ def inference(img, aligned, scale):
|
|
133 |
h, w = img.shape[0:2]
|
134 |
output = cv2.resize(output, (int(w * scale / 2), int(h * scale / 2)), interpolation=interpolation)
|
135 |
except Exception as error:
|
136 |
-
print('
|
137 |
if img_mode == 'RGBA': # RGBA images should be saved in png format
|
138 |
extension = 'png'
|
139 |
else:
|
@@ -156,8 +156,12 @@ demo = gr.Interface(
|
|
156 |
gr.Image(type="filepath", label="Input"),
|
157 |
gr.Radio(['aligned', 'unaligned'], type="value", value='unaligned', label='Image Alignment'),
|
158 |
gr.Number(label="Rescaling factor", value=2),
|
|
|
159 |
], [
|
160 |
gr.Image(type="numpy", label="Output (The whole image)"),
|
161 |
gr.File(label="Download the output image")
|
162 |
],
|
163 |
)
|
|
|
|
|
|
|
|
85 |
|
86 |
@torch.inference_mode()
|
87 |
@spaces.GPU()
|
88 |
+
def inference(img, aligned, scale, num_steps):
|
89 |
if scale > 4:
|
90 |
scale = 4 # avoid too large scale value
|
91 |
try:
|
|
|
133 |
h, w = img.shape[0:2]
|
134 |
output = cv2.resize(output, (int(w * scale / 2), int(h * scale / 2)), interpolation=interpolation)
|
135 |
except Exception as error:
|
136 |
+
print('Wrong scale input.', error)
|
137 |
if img_mode == 'RGBA': # RGBA images should be saved in png format
|
138 |
extension = 'png'
|
139 |
else:
|
|
|
156 |
gr.Image(type="filepath", label="Input"),
|
157 |
gr.Radio(['aligned', 'unaligned'], type="value", value='unaligned', label='Image Alignment'),
|
158 |
gr.Number(label="Rescaling factor", value=2),
|
159 |
+
gr.Number(label="Number of flow steps", value=25),
|
160 |
], [
|
161 |
gr.Image(type="numpy", label="Output (The whole image)"),
|
162 |
gr.File(label="Download the output image")
|
163 |
],
|
164 |
)
|
165 |
+
|
166 |
+
|
167 |
+
demo.queue(max_size=20).launch()
|