File size: 1,590 Bytes
2d974fa
 
 
 
 
 
 
 
fa523ea
0202e71
2d974fa
fa523ea
2d974fa
 
 
 
 
 
 
 
 
22e5baa
2d974fa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d28ba0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#!/usr/bin/env python

import pathlib

import gradio as gr

from model import run_model

DESCRIPTION = '# [CutS3D](https://leonsick.github.io/cuts3d/): Cutting Semantics in 3D for 2D Unsupervised Instance Segmentation \n\n' \
                'This is a demo for the CutS3D Zero-Shot model. The model is trained on [ImageNet](https://image-net.org/), initially with unsupervised pseudo-masks and then further with one round of self-training. The first prediction will likely be slow as the model is downloaded. Subsequent predictions will be faster.'

paths = sorted(pathlib.Path('demo_imgs').glob('*.jpg'))

with gr.Blocks(css='style.css') as demo:
    gr.Markdown(DESCRIPTION)
    with gr.Row():
        with gr.Column():
            image = gr.Image(label='Input image', type='filepath')
            score_threshold = gr.Slider(label='Score threshold',
                                        minimum=0,
                                        maximum=1,
                                        value=0.45,
                                        step=0.05)
            run_button = gr.Button('Run')
        with gr.Column():
            result = gr.Image(label='Result', type='numpy')
    with gr.Row():
        gr.Examples(examples=[[path.as_posix()] for path in paths],
                    inputs=image)

    run_button.click(fn=run_model,
                     inputs=[
                         image,
                         score_threshold,
                     ],
                     outputs=result,
                     api_name='run')
demo.queue(max_size=60).launch(debug=True)