nev commited on
Commit
5c8fbca
·
1 Parent(s): d16f22f

Let's see.

Browse files
Files changed (2) hide show
  1. app.py +41 -2
  2. requirements.txt +0 -1
app.py CHANGED
@@ -1,16 +1,54 @@
1
  from depth import MidasDepth
2
  import gradio as gr
3
  import numpy as np
4
- import cv2
5
 
6
 
7
  depth_estimator = MidasDepth()
8
 
9
 
10
  def get_depth(rgb):
 
11
  depth = depth_estimator.get_depth(rgb)
12
 
13
- return rgb, (depth.clip(0, 64) * 1024).astype("uint16")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
 
16
  gr.Interface(fn=get_depth, inputs=[
@@ -18,5 +56,6 @@ gr.Interface(fn=get_depth, inputs=[
18
  ], outputs=[
19
  gr.components.Image(type="pil", label="image"),
20
  gr.components.Image(type="numpy", label="depth"),
 
21
 
22
  ]).launch(share=True)
 
1
  from depth import MidasDepth
2
  import gradio as gr
3
  import numpy as np
4
+ import tempfile
5
 
6
 
7
  depth_estimator = MidasDepth()
8
 
9
 
10
  def get_depth(rgb):
11
+ rgb = rgb.convert("RGB")
12
  depth = depth_estimator.get_depth(rgb)
13
 
14
+ h, w, _ = rgb.shape
15
+ grid = np.mgrid[0:h, 0:w].transpose(1, 2, 0
16
+ ).reshape(-1, 2)[..., ::-1]
17
+ flat_grid = grid[:, 1] * w + grid[:, 0]
18
+
19
+ positions = np.concatenate(((grid - np.array([[w, h]])
20
+ / 2) / w * 2,
21
+ depth.flatten()[flat_grid][..., np.newaxis]),
22
+ axis=-1)
23
+ positions[:, :-1] *= positions[:, -1:]
24
+ positions[:, :2] *= -1
25
+
26
+ pick_edges = depth < 0
27
+ y, x = (t.flatten() for t in np.mgrid[0:h, 0:w])
28
+ faces = np.concatenate((
29
+ np.stack((y * w + x,
30
+ (y - 1) * w + x,
31
+ y * w + (x - 1)), axis=-1)
32
+ [(~pick_edges.flatten()) * (x > 0) * (y > 0)],
33
+ np.stack((y * w + x,
34
+ (y + 1) * w + x,
35
+ y * w + (x + 1)), axis=-1)
36
+ [(~pick_edges.flatten()) * (x < w - 1) * (y < im.shape[0] - 1)]
37
+ ))
38
+
39
+ tf = tempfile.NamedTemporaryFile(suffix=".obj").name
40
+ save_obj(positions, rgb.reshape(-1, 3), faces, tf)
41
+
42
+ return rgb, (depth.clip(0, 64) * 1024).astype("uint16"), tf
43
+
44
+
45
+ def save_obj(positions, rgb, faces, filename):
46
+ with open(filename, "w") as f:
47
+ for position, color in zip(positions, rgb):
48
+ f.write(
49
+ f"v {' '.join(map(str, position))} {' '.join(map(str, color))}")
50
+ for face in faces:
51
+ f.write(f"f {' '.join(map(str, face))}")
52
 
53
 
54
  gr.Interface(fn=get_depth, inputs=[
 
56
  ], outputs=[
57
  gr.components.Image(type="pil", label="image"),
58
  gr.components.Image(type="numpy", label="depth"),
59
+ gr.components.Model3D()
60
 
61
  ]).launch(share=True)
requirements.txt CHANGED
@@ -1,3 +1,2 @@
1
  torch
2
- opencv-python
3
  timm
 
1
  torch
 
2
  timm