Spaces:
Running
Running
Huang
commited on
Commit
•
2eb5b9e
1
Parent(s):
af241a6
add annotator
Browse files
app.py
CHANGED
@@ -1,17 +1,12 @@
|
|
1 |
import importlib
|
2 |
import os
|
|
|
3 |
import gradio as gr
|
|
|
4 |
|
5 |
from annotator.util import resize_image, HWC3
|
6 |
|
7 |
-
config =
|
8 |
-
"canny": "CannyDetector",
|
9 |
-
"hed": "HedDetector",
|
10 |
-
"mlsd": "MLSDProcessor",
|
11 |
-
"midas": "MidasProcessor",
|
12 |
-
"openpose": "OpenposeDetector",
|
13 |
-
"uniformer": "UniformerDetector"
|
14 |
-
}
|
15 |
|
16 |
package_annotator = "annotator"
|
17 |
|
@@ -38,92 +33,28 @@ def process(cls):
|
|
38 |
block = gr.Blocks().queue()
|
39 |
examples = [os.path.join(os.path.dirname(__file__), "examples/demo.jpeg")]
|
40 |
with block:
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
with gr.
|
45 |
-
with gr.
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
run_button = gr.Button(label="Run")
|
65 |
-
gr.Examples(examples, input_image)
|
66 |
-
with gr.Column():
|
67 |
-
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto")
|
68 |
-
run_button.click(fn=process(config["hed"]), inputs=[input_image, resolution], outputs=[gallery])
|
69 |
-
|
70 |
-
with gr.Tab("MLSD Edge"):
|
71 |
-
with gr.Row():
|
72 |
-
gr.Markdown("## MLSD Edge")
|
73 |
-
with gr.Row():
|
74 |
-
with gr.Column():
|
75 |
-
input_image = gr.Image(source='upload', type="numpy")
|
76 |
-
value_threshold = gr.Slider(label="value_threshold", minimum=0.01, maximum=2.0, value=0.1, step=0.01)
|
77 |
-
distance_threshold = gr.Slider(label="distance_threshold", minimum=0.01, maximum=20.0, value=0.1,
|
78 |
-
step=0.01)
|
79 |
-
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=384, step=64)
|
80 |
-
run_button = gr.Button(label="Run")
|
81 |
-
gr.Examples(examples, input_image)
|
82 |
-
with gr.Column():
|
83 |
-
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto")
|
84 |
-
run_button.click(fn=process(config["mlsd"]),
|
85 |
-
inputs=[input_image, resolution, value_threshold, distance_threshold],
|
86 |
-
outputs=[gallery])
|
87 |
-
|
88 |
-
with gr.Tab("MIDAS Depth and Normal"):
|
89 |
-
with gr.Row():
|
90 |
-
gr.Markdown("## MIDAS Depth and Normal")
|
91 |
-
with gr.Row():
|
92 |
-
with gr.Column():
|
93 |
-
input_image = gr.Image(source='upload', type="numpy")
|
94 |
-
alpha = gr.Slider(label="alpha", minimum=0.1, maximum=20.0, value=6.2, step=0.01)
|
95 |
-
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=384, step=64)
|
96 |
-
run_button = gr.Button(label="Run")
|
97 |
-
gr.Examples(examples, input_image)
|
98 |
-
with gr.Column():
|
99 |
-
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto")
|
100 |
-
run_button.click(fn=process(config["midas"]), inputs=[input_image, resolution, alpha], outputs=[gallery])
|
101 |
-
|
102 |
-
with gr.Tab("Openpose"):
|
103 |
-
with gr.Row():
|
104 |
-
gr.Markdown("## Openpose")
|
105 |
-
with gr.Row():
|
106 |
-
with gr.Column():
|
107 |
-
input_image = gr.Image(source='upload', type="numpy")
|
108 |
-
hand = gr.Checkbox(label='detect hand', value=False)
|
109 |
-
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
110 |
-
run_button = gr.Button(label="Run")
|
111 |
-
gr.Examples(examples, input_image)
|
112 |
-
with gr.Column():
|
113 |
-
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto")
|
114 |
-
run_button.click(fn=process(config["openpose"]), inputs=[input_image, resolution, hand], outputs=[gallery])
|
115 |
-
|
116 |
-
with gr.Tab("Uniformer Segmentation"):
|
117 |
-
with gr.Row():
|
118 |
-
gr.Markdown("## Uniformer Segmentation")
|
119 |
-
with gr.Row():
|
120 |
-
with gr.Column():
|
121 |
-
input_image = gr.Image(source='upload', type="numpy")
|
122 |
-
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
123 |
-
run_button = gr.Button(label="Run")
|
124 |
-
gr.Examples(examples, input_image)
|
125 |
-
with gr.Column():
|
126 |
-
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto")
|
127 |
-
run_button.click(fn=process(config["uniformer"]), inputs=[input_image, resolution], outputs=[gallery])
|
128 |
|
129 |
block.launch()
|
|
|
1 |
import importlib
|
2 |
import os
|
3 |
+
|
4 |
import gradio as gr
|
5 |
+
from omegaconf import OmegaConf
|
6 |
|
7 |
from annotator.util import resize_image, HWC3
|
8 |
|
9 |
+
config = OmegaConf.load("config/annotator.yaml")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
package_annotator = "annotator"
|
12 |
|
|
|
33 |
block = gr.Blocks().queue()
|
34 |
examples = [os.path.join(os.path.dirname(__file__), "examples/demo.jpeg")]
|
35 |
with block:
|
36 |
+
for key in config.keys():
|
37 |
+
cls, input_element = config[key]["process"], config[key].get("input")
|
38 |
+
input_append = []
|
39 |
+
with gr.Tab(key):
|
40 |
+
with gr.Row():
|
41 |
+
gr.Markdown("## " + key)
|
42 |
+
with gr.Row():
|
43 |
+
with gr.Column():
|
44 |
+
input_image = gr.Image(source='upload', type="numpy")
|
45 |
+
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
46 |
+
|
47 |
+
if input_element is not None:
|
48 |
+
for item in input_element:
|
49 |
+
input_append.append(getattr(gr, item["attr"])(**item["args"]))
|
50 |
+
|
51 |
+
run_button = gr.Button(label="Run")
|
52 |
+
gr.Examples(examples, input_image)
|
53 |
+
with gr.Column():
|
54 |
+
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto")
|
55 |
+
|
56 |
+
run_button.click(fn=process(cls),
|
57 |
+
inputs=[input_image, resolution] + input_append,
|
58 |
+
outputs=[gallery])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
block.launch()
|