kcml commited on
Commit
dfed601
·
1 Parent(s): e510cdf

mean depth

Browse files
Files changed (1) hide show
  1. handcrafted_solution.py +69 -18
handcrafted_solution.py CHANGED
@@ -122,12 +122,41 @@ def get_uv_depth(vertices, depth):
122
  vertex_depth = depth[(uv_int[:, 1] , uv_int[:, 0])]
123
  return uv, vertex_depth
124
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  def get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_th = 50.0):
126
  '''Get the vertices and edges from the gestalt segmentation mask of the house'''
127
  vertices = []
128
  connections = []
129
 
130
- color_th = 10.0 # Cost ->2.6
131
 
132
  #-------------------------
133
  # combined map from ade
@@ -151,7 +180,7 @@ def get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_
151
  #print(f'apex_color= {apex_color}')
152
  #apex_mask = cv2.inRange(gest_seg_np, apex_color-0.5, apex_color+0.5)
153
  apex_mask = cv2.inRange(gest_seg_np, apex_color-color_th, apex_color+color_th) # include more pts
154
- # apex_mask = cv2.bitwise_and(apex_mask, ade_mask) # remove pts
155
  if apex_mask.sum() > 0:
156
  output = cv2.connectedComponentsWithStats(apex_mask, 8, cv2.CV_32S)
157
  (numLabels, labels, stats, centroids) = output
@@ -174,17 +203,6 @@ def get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_
174
  apex_map_on_ade[uu+ss[0], vv+ss[1]] = (255,255,255)
175
  apex_map_on_gest[uu+ss[0], vv+ss[1]] = (255,255,255)
176
 
177
- # imsave apex
178
- import random
179
- rid = random.random()
180
- filename_apex_ade = f'apex_map_on_ade_{rid}.jpg'
181
- cv2.imwrite(filename_apex_ade, apex_map_on_ade)
182
- filename_apex_gest = f'apex_map_on_gest_{rid}.jpg'
183
- cv2.imwrite(filename_apex_gest, apex_map_on_gest)
184
- filename_apex_map = f'apex_map_{rid}.jpg'
185
- cv2.imwrite(filename_apex_map, apex_map)
186
-
187
-
188
 
189
  eave_end_color = np.array(gestalt_color_mapping['eave_end_point'])
190
  #eave_end_mask = cv2.inRange(gest_seg_np, eave_end_color-0.5, eave_end_color+0.5)
@@ -198,7 +216,29 @@ def get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_
198
  for i in range(numLabels-1):
199
  vert = {"xy": centroids[i], "type": "eave_end_point"}
200
  vertices.append(vert)
201
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  print(f'{len(vertices)} vertices detected')
203
  # Connectivity
204
  apex_pts = []
@@ -360,13 +400,23 @@ def predict(entry, visualize=False) -> Tuple[np.ndarray, List[int]]:
360
  good_entry['K'],
361
  good_entry['R'],
362
  good_entry['t']
363
- )):
 
 
 
 
 
 
 
 
 
 
364
  ade_seg = ade.resize(depth.size)
365
  ade_seg_np = np.array(ade_seg).astype(np.uint8)
366
  gest_seg = gest.resize(depth.size)
367
  gest_seg_np = np.array(gest_seg).astype(np.uint8)
368
  # Metric3D
369
- depth_np = np.array(depth) / 2.5 # 2.5 is the scale estimation coefficient
370
  #vertices, connections = get_vertices_and_edges_from_segmentation(gest_seg_np, edge_th = 20.)
371
  vertices, connections = get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_th = 20.)
372
 
@@ -374,8 +424,9 @@ def predict(entry, visualize=False) -> Tuple[np.ndarray, List[int]]:
374
  print (f'Not enough vertices or connections in image {i}')
375
  vert_edge_per_image[i] = np.empty((0, 2)), [], np.empty((0, 3))
376
  continue
377
- uv, depth_vert = get_uv_depth(vertices, depth_np)
378
- #uv, depth_vert, ade_category_vert, vertices = get_uv_dept_category(vertices, depth_np, ade_seg_np)
 
379
  # Normalize the uv to the camera intrinsics
380
  xy_local = np.ones((len(uv), 3))
381
  xy_local[:, 0] = (uv[:, 0] - K[0,2]) / K[0,0]
 
122
  vertex_depth = depth[(uv_int[:, 1] , uv_int[:, 0])]
123
  return uv, vertex_depth
124
 
125
+ def get_smooth_uv_depth(vertices, depth):
126
+ '''Get the depth of the vertices from the depth image'''
127
+ #print(f'max depth = {np.max(depth)}')
128
+ uv = []
129
+ for v in vertices:
130
+ uv.append(v['xy'])
131
+ uv = np.array(uv)
132
+ uv_int = uv.astype(np.int32)
133
+ H, W = depth.shape[:2]
134
+ a = np.clip( uv_int[:, 0], 0, W-1)
135
+ b = np.clip( uv_int[:, 1], 0, H-1)
136
+ def get_local_depth(x,y, H, W, depth, r=5):
137
+ '''return a smooth version of detph in radius r'''
138
+ local_depths = []
139
+ for i in range(max(0, x - r), min(W, x + r + 1)):
140
+ for j in range(max(0, y - r), min(H, y + r + 1)):
141
+ if np.sqrt((i - x)**2 + (j - y)**2) <= r:
142
+ local_depths.append(depth[j, i])
143
+ return local_depths
144
+
145
+ vertex_depth = []
146
+ for x,y in zip(a,b):
147
+ local_depths = get_local_depth(x,y, H, W, depth, 5)
148
+ local_mean = np.mean(local_depths)
149
+ vertex_depth.append(local_mean)
150
+ vertex_depth = np.array(vertex_depth)
151
+
152
+ return uv, vertex_depth
153
+
154
  def get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_th = 50.0):
155
  '''Get the vertices and edges from the gestalt segmentation mask of the house'''
156
  vertices = []
157
  connections = []
158
 
159
+ color_th = 10.0 # Cost ->2.6
160
 
161
  #-------------------------
162
  # combined map from ade
 
180
  #print(f'apex_color= {apex_color}')
181
  #apex_mask = cv2.inRange(gest_seg_np, apex_color-0.5, apex_color+0.5)
182
  apex_mask = cv2.inRange(gest_seg_np, apex_color-color_th, apex_color+color_th) # include more pts
183
+ #apex_mask = cv2.bitwise_and(apex_mask, ade_mask) # remove pts
184
  if apex_mask.sum() > 0:
185
  output = cv2.connectedComponentsWithStats(apex_mask, 8, cv2.CV_32S)
186
  (numLabels, labels, stats, centroids) = output
 
203
  apex_map_on_ade[uu+ss[0], vv+ss[1]] = (255,255,255)
204
  apex_map_on_gest[uu+ss[0], vv+ss[1]] = (255,255,255)
205
 
 
 
 
 
 
 
 
 
 
 
 
206
 
207
  eave_end_color = np.array(gestalt_color_mapping['eave_end_point'])
208
  #eave_end_mask = cv2.inRange(gest_seg_np, eave_end_color-0.5, eave_end_color+0.5)
 
216
  for i in range(numLabels-1):
217
  vert = {"xy": centroids[i], "type": "eave_end_point"}
218
  vertices.append(vert)
219
+
220
+ uu = int(centroids[i][1])
221
+ vv = int(centroids[i][0])
222
+ # plot a cross
223
+ apex_map_on_ade[uu, vv] = (255,0,0)
224
+ shift=[(1,0),(-1,0),(0,1),(0,-1), (2,0),(-2,0),(0,2),(0,-2), (3,0),(-3,0),(0,3),(0,-3)]
225
+ h,w,_ = apex_map_on_ade.shape
226
+ for ss in shift:
227
+ if uu+ss[0] >= 0 and uu+ss[0] < h and vv+ss[1] >= 0 and vv+ss[1] < w:
228
+ apex_map[uu+ss[0], vv+ss[1]] = (255,0,0)
229
+ apex_map_on_ade[uu+ss[0], vv+ss[1]] = (255,0,0)
230
+ apex_map_on_gest[uu+ss[0], vv+ss[1]] = (255,0,0)
231
+
232
+ # imsave apex and eave_end
233
+ import random
234
+ rid = random.random()
235
+ filename_apex_ade = f'apex_map_on_ade_{rid}.jpg'
236
+ cv2.imwrite(filename_apex_ade, apex_map_on_ade)
237
+ filename_apex_gest = f'apex_map_on_gest_{rid}.jpg'
238
+ cv2.imwrite(filename_apex_gest, apex_map_on_gest)
239
+ filename_apex_map = f'apex_map_{rid}.jpg'
240
+ cv2.imwrite(filename_apex_map, apex_map)
241
+
242
  print(f'{len(vertices)} vertices detected')
243
  # Connectivity
244
  apex_pts = []
 
400
  good_entry['K'],
401
  good_entry['R'],
402
  good_entry['t']
403
+ )):
404
+ ''' entry 0 suggests:
405
+ depth_scale = 1
406
+ if i==1:
407
+ depth_scale = 2.5
408
+ elif i==2: # only visualize view 0,1
409
+ continue
410
+ '''
411
+ depth_scale = 2.5
412
+
413
+
414
  ade_seg = ade.resize(depth.size)
415
  ade_seg_np = np.array(ade_seg).astype(np.uint8)
416
  gest_seg = gest.resize(depth.size)
417
  gest_seg_np = np.array(gest_seg).astype(np.uint8)
418
  # Metric3D
419
+ depth_np = np.array(depth) / depth_scale # / 2.5 # 2.5 is the scale estimation coefficient # don't use 2.5...
420
  #vertices, connections = get_vertices_and_edges_from_segmentation(gest_seg_np, edge_th = 20.)
421
  vertices, connections = get_vertices_and_edges_from_two_segmentations(ade_seg_np, gest_seg_np, edge_th = 20.)
422
 
 
424
  print (f'Not enough vertices or connections in image {i}')
425
  vert_edge_per_image[i] = np.empty((0, 2)), [], np.empty((0, 3))
426
  continue
427
+ #uv, depth_vert = get_uv_depth(vertices, depth_np)
428
+ uv, depth_vert = get_smooth_uv_depth(vertices, depth_np)
429
+
430
  # Normalize the uv to the camera intrinsics
431
  xy_local = np.ones((len(uv), 3))
432
  xy_local[:, 0] = (uv[:, 0] - K[0,2]) / K[0,0]