Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -45,11 +45,18 @@ def run_theia(image):
|
|
45 |
|
46 |
return [(theia_decode_results, "Theia Results"), (gt_decode_results, "Ground Truth")]
|
47 |
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
)
|
55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
return [(theia_decode_results, "Theia Results"), (gt_decode_results, "Ground Truth")]
|
47 |
|
48 |
+
with gr.Blocks() as demo:
|
49 |
+
gr.HTML(load_description("gradio_title.md"))
|
50 |
+
gr.Markdown("This space demonstrates decoding Theia-predicted VFM representations to their original teacher model outputs. For DINOv2 we apply the PCA visualization, for SAM we use its decoder to generate segmentation masks (but with SAM's pipeline of prompting), and for Depth-Anything we use its decoder head to do depth prediction.")
|
51 |
+
|
52 |
+
with gr.Row():
|
53 |
+
with gr.Column():
|
54 |
+
input_image = gr.Image(type="pil", label="Input Image")
|
55 |
+
submit_button = gr.Button("Submit")
|
56 |
+
|
57 |
+
with gr.Column():
|
58 |
+
output_gallery = gr.Gallery(label="Input, DINOv2, SAM, Depth Anything", type="numpy")
|
59 |
+
|
60 |
+
submit_button.click(run_theia, inputs=input_image, outputs=output_gallery)
|
61 |
+
|
62 |
+
demo.launch()
|