Spaces:
Running
Running
import importlib | |
import os | |
import gradio as gr | |
from annotator.util import resize_image, HWC3 | |
config = { | |
"canny": "CannyDetector", | |
"hed": "HedDetector", | |
"mlsd": "MLSDProcessor", | |
"midas": "MidasProcessor", | |
"openpose": "OpenposeDetector", | |
"uniformer": "UniformerDetector" | |
} | |
package_annotator = "annotator" | |
def process_image(cls: str, img, res, *kwargs): | |
img = resize_image(HWC3(img), res) | |
# load_model() | |
module_imp = importlib.import_module(package_annotator) | |
model = getattr(module_imp, cls) | |
image_processor = model() | |
result = image_processor(img, *kwargs) | |
if type(result) == tuple: | |
return result | |
return [result] | |
def process(cls): | |
def process_fc(img, res, *args): | |
return process_image(cls, img, res, *args) | |
return process_fc | |
block = gr.Blocks().queue() | |
examples = [os.path.join(os.path.dirname(__file__), "examples/demo.jpeg")] | |
with block: | |
with gr.Tab("Canny Edge"): | |
with gr.Row(): | |
gr.Markdown("## Canny Edge") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(source='upload', type="numpy") | |
low_threshold = gr.Slider(label="low_threshold", minimum=1, maximum=255, value=100, step=1) | |
high_threshold = gr.Slider(label="high_threshold", minimum=1, maximum=255, value=200, step=1) | |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64) | |
run_button = gr.Button(label="Run") | |
gr.Examples(examples, input_image) | |
with gr.Column(): | |
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto") | |
run_button.click(fn=process(config["canny"]), inputs=[input_image, resolution, low_threshold, high_threshold], | |
outputs=[gallery]) | |
with gr.Tab("HED Edge"): | |
with gr.Row(): | |
gr.Markdown("## HED Edge") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(source='upload', type="numpy") | |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64) | |
run_button = gr.Button(label="Run") | |
gr.Examples(examples, input_image) | |
with gr.Column(): | |
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto") | |
run_button.click(fn=process(config["hed"]), inputs=[input_image, resolution], outputs=[gallery]) | |
with gr.Tab("MLSD Edge"): | |
with gr.Row(): | |
gr.Markdown("## MLSD Edge") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(source='upload', type="numpy") | |
value_threshold = gr.Slider(label="value_threshold", minimum=0.01, maximum=2.0, value=0.1, step=0.01) | |
distance_threshold = gr.Slider(label="distance_threshold", minimum=0.01, maximum=20.0, value=0.1, | |
step=0.01) | |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=384, step=64) | |
run_button = gr.Button(label="Run") | |
gr.Examples(examples, input_image) | |
with gr.Column(): | |
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto") | |
run_button.click(fn=process(config["mlsd"]), | |
inputs=[input_image, resolution, value_threshold, distance_threshold], | |
outputs=[gallery]) | |
with gr.Tab("MIDAS Depth and Normal"): | |
with gr.Row(): | |
gr.Markdown("## MIDAS Depth and Normal") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(source='upload', type="numpy") | |
alpha = gr.Slider(label="alpha", minimum=0.1, maximum=20.0, value=6.2, step=0.01) | |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=384, step=64) | |
run_button = gr.Button(label="Run") | |
gr.Examples(examples, input_image) | |
with gr.Column(): | |
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto") | |
run_button.click(fn=process(config["midas"]), inputs=[input_image, resolution, alpha], outputs=[gallery]) | |
with gr.Tab("Openpose"): | |
with gr.Row(): | |
gr.Markdown("## Openpose") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(source='upload', type="numpy") | |
hand = gr.Checkbox(label='detect hand', value=False) | |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64) | |
run_button = gr.Button(label="Run") | |
gr.Examples(examples, input_image) | |
with gr.Column(): | |
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto") | |
run_button.click(fn=process(config["openpose"]), inputs=[input_image, resolution, hand], outputs=[gallery]) | |
with gr.Tab("Uniformer Segmentation"): | |
with gr.Row(): | |
gr.Markdown("## Uniformer Segmentation") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(source='upload', type="numpy") | |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64) | |
run_button = gr.Button(label="Run") | |
gr.Examples(examples, input_image) | |
with gr.Column(): | |
gallery = gr.Gallery(label="Generated images", show_label=False).style(height="auto") | |
run_button.click(fn=process(config["uniformer"]), inputs=[input_image, resolution], outputs=[gallery]) | |
block.launch() | |