File size: 1,989 Bytes
2023a9f
 
 
 
 
582519c
2023a9f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
582519c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2023a9f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import os

import gradio as gr

from cls import _CLS_MODELS, _DEFAULT_CLS_MODEL, _gr_classification
from monochrome import _gr_monochrome, _DEFAULT_MONO_MODEL, _MONO_MODELS

if __name__ == '__main__':
    with gr.Blocks() as demo:
        with gr.Tabs():
            with gr.Tab('Classification'):
                with gr.Row():
                    with gr.Column():
                        gr_cls_input_image = gr.Image(type='pil', label='Original Image')
                        gr_cls_model = gr.Dropdown(_CLS_MODELS, value=_DEFAULT_CLS_MODEL, label='Model')
                        gr_cls_infer_size = gr.Slider(224, 640, value=384, step=32, label='Infer Size')
                        gr_cls_submit = gr.Button(value='Submit', variant='primary')

                    with gr.Column():
                        gr_cls_output = gr.Label(label='Classes')

                    gr_cls_submit.click(
                        _gr_classification,
                        inputs=[gr_cls_input_image, gr_cls_model, gr_cls_infer_size],
                        outputs=[gr_cls_output],
                    )

            with gr.Tab('Monochrome'):
                with gr.Row():
                    with gr.Column():
                        gr_mono_input_image = gr.Image(type='pil', label='Original Image')
                        gr_mono_model = gr.Dropdown(_MONO_MODELS, value=_DEFAULT_MONO_MODEL, label='Model')
                        gr_mono_infer_size = gr.Slider(224, 640, value=384, step=32, label='Infer Size')
                        gr_mono_submit = gr.Button(value='Submit', variant='primary')

                    with gr.Column():
                        gr_mono_output = gr.Label(label='Classes')

                    gr_mono_submit.click(
                        _gr_monochrome,
                        inputs=[gr_mono_input_image, gr_mono_model, gr_mono_infer_size],
                        outputs=[gr_mono_output],
                    )

    demo.queue(os.cpu_count()).launch()