little update to the reader
Browse files
dronescapes_reader/__init__.py
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
"""init file"""
|
2 |
from .multitask_dataset import MultiTaskDataset, NpzRepresentation
|
3 |
-
from .dronescapes_representations import DepthRepresentation, OpticalFlowRepresentation, SemanticRepresentation
|
|
|
4 |
|
5 |
_color_map = [[0, 255, 0], [0, 127, 0], [255, 255, 0], [255, 255, 255],
|
6 |
[255, 0, 0], [0, 0, 255], [0, 255, 255], [127, 127, 63]]
|
7 |
_m2f_name = "semantic_mask2former_swin_mapillary_converted"
|
8 |
dronescapes_task_types = { # some pre-baked representations
|
9 |
-
"rgb":
|
10 |
-
"edges_dexined":
|
11 |
"depth_dpt": DepthRepresentation("depth_dpt", min_depth=0, max_depth=0.999),
|
12 |
"depth_sfm_manual202204": DepthRepresentation("depth_sfm_manual202204", min_depth=0, max_depth=300),
|
13 |
"opticalflow_rife": OpticalFlowRepresentation,
|
|
|
1 |
"""init file"""
|
2 |
from .multitask_dataset import MultiTaskDataset, NpzRepresentation
|
3 |
+
from .dronescapes_representations import DepthRepresentation, OpticalFlowRepresentation, SemanticRepresentation, \
|
4 |
+
ColorRepresentation
|
5 |
|
6 |
_color_map = [[0, 255, 0], [0, 127, 0], [255, 255, 0], [255, 255, 255],
|
7 |
[255, 0, 0], [0, 0, 255], [0, 255, 255], [127, 127, 63]]
|
8 |
_m2f_name = "semantic_mask2former_swin_mapillary_converted"
|
9 |
dronescapes_task_types = { # some pre-baked representations
|
10 |
+
"rgb": ColorRepresentation("rgb"),
|
11 |
+
"edges_dexined": ColorRepresentation("edges_dexined"),
|
12 |
"depth_dpt": DepthRepresentation("depth_dpt", min_depth=0, max_depth=0.999),
|
13 |
"depth_sfm_manual202204": DepthRepresentation("depth_sfm_manual202204", min_depth=0, max_depth=300),
|
14 |
"opticalflow_rife": OpticalFlowRepresentation,
|
dronescapes_reader/dronescapes_representations.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
"""Dronescapes representations -- adds various loading/writing/image showing capabilities to dronescapes tasks"""
|
|
|
2 |
from pathlib import Path
|
3 |
import numpy as np
|
4 |
import torch as tr
|
@@ -6,29 +7,48 @@ import flow_vis
|
|
6 |
from overrides import overrides
|
7 |
from matplotlib.cm import hot # pylint: disable=no-name-in-module
|
8 |
from .multitask_dataset import NpzRepresentation
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
class DepthRepresentation(NpzRepresentation):
|
11 |
"""DepthRepresentation. Implements depth task-specific stuff, like hotmap."""
|
12 |
def __init__(self, *args, min_depth: float, max_depth: float, **kwargs):
|
13 |
super().__init__(*args, **kwargs)
|
|
|
14 |
self.min_depth = min_depth
|
15 |
self.max_depth = max_depth
|
16 |
|
17 |
@overrides
|
18 |
def plot_fn(self, x: tr.Tensor) -> np.ndarray:
|
19 |
x = x.detach().cpu().numpy()
|
20 |
-
x = np.clip(x,
|
21 |
-
x = np.nan_to_num((x - x.min()) / (x.max() - x.min()), 0)
|
22 |
y = hot(x.squeeze())[..., 0:3]
|
23 |
y = np.uint8(y * 255)
|
24 |
return y
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
class OpticalFlowRepresentation(NpzRepresentation):
|
27 |
"""OpticalFlowRepresentation. Implements depth task-specific stuff, like using flow_vis."""
|
28 |
@overrides
|
29 |
def plot_fn(self, x: tr.Tensor) -> np.ndarray:
|
30 |
return flow_vis.flow_to_color(x.squeeze().nan_to_num(0).detach().cpu().numpy())
|
31 |
|
|
|
|
|
|
|
|
|
32 |
class SemanticRepresentation(NpzRepresentation):
|
33 |
"""SemanticRepresentation. Implements depth task-specific stuff, like using flow_vis."""
|
34 |
def __init__(self, *args, classes: int | list[str], color_map: list[tuple[int, int, int]], **kwargs):
|
@@ -36,17 +56,18 @@ class SemanticRepresentation(NpzRepresentation):
|
|
36 |
self.classes = list(range(classes)) if isinstance(classes, int) else classes
|
37 |
self.n_classes = len(self.classes)
|
38 |
self.color_map = color_map
|
39 |
-
assert len(color_map) == self.n_classes, (color_map, self.n_classes)
|
40 |
|
41 |
@overrides
|
42 |
def load_from_disk(self, path: Path) -> tr.Tensor:
|
43 |
res = super().load_from_disk(path)
|
44 |
assert len(res.shape) == 2, f"Only argmaxed data supported, got: {res.shape}"
|
|
|
45 |
return res
|
46 |
|
47 |
@overrides
|
48 |
def plot_fn(self, x: tr.Tensor) -> np.ndarray:
|
49 |
-
x = x.squeeze().nan_to_num(0).detach().cpu().numpy()
|
50 |
new_images = np.zeros((*x.shape, 3), dtype=np.uint8)
|
51 |
for i in range(self.n_classes):
|
52 |
new_images[x == i] = self.color_map[i]
|
|
|
1 |
"""Dronescapes representations -- adds various loading/writing/image showing capabilities to dronescapes tasks"""
|
2 |
+
from __future__ import annotations
|
3 |
from pathlib import Path
|
4 |
import numpy as np
|
5 |
import torch as tr
|
|
|
7 |
from overrides import overrides
|
8 |
from matplotlib.cm import hot # pylint: disable=no-name-in-module
|
9 |
from .multitask_dataset import NpzRepresentation
|
10 |
+
from torch.nn import functional as F
|
11 |
+
|
12 |
+
class ColorRepresentation(NpzRepresentation):
|
13 |
+
def load_from_disk(self, path: Path) -> tr.Tensor:
|
14 |
+
res = super().load_from_disk(path)
|
15 |
+
return res.float() / 255
|
16 |
+
|
17 |
+
def save_to_disk(self, data: tr.Tensor, path: Path):
|
18 |
+
return super().save_to_disk((data * 255).byte(), path)
|
19 |
|
20 |
class DepthRepresentation(NpzRepresentation):
|
21 |
"""DepthRepresentation. Implements depth task-specific stuff, like hotmap."""
|
22 |
def __init__(self, *args, min_depth: float, max_depth: float, **kwargs):
|
23 |
super().__init__(*args, **kwargs)
|
24 |
+
assert 0 <= min_depth < max_depth, (min_depth, max_depth)
|
25 |
self.min_depth = min_depth
|
26 |
self.max_depth = max_depth
|
27 |
|
28 |
@overrides
|
29 |
def plot_fn(self, x: tr.Tensor) -> np.ndarray:
|
30 |
x = x.detach().cpu().numpy()
|
31 |
+
x = np.clip(x, 0, 1)
|
|
|
32 |
y = hot(x.squeeze())[..., 0:3]
|
33 |
y = np.uint8(y * 255)
|
34 |
return y
|
35 |
|
36 |
+
def load_from_disk(self, path: Path) -> tr.Tensor:
|
37 |
+
res = super().load_from_disk(path)
|
38 |
+
res = res.float().clip(self.min_depth, self.max_depth)
|
39 |
+
res = (res - self.min_depth) / (self.max_depth - self.min_depth)
|
40 |
+
return res
|
41 |
+
|
42 |
class OpticalFlowRepresentation(NpzRepresentation):
|
43 |
"""OpticalFlowRepresentation. Implements depth task-specific stuff, like using flow_vis."""
|
44 |
@overrides
|
45 |
def plot_fn(self, x: tr.Tensor) -> np.ndarray:
|
46 |
return flow_vis.flow_to_color(x.squeeze().nan_to_num(0).detach().cpu().numpy())
|
47 |
|
48 |
+
def load_from_disk(self, path: Path) -> tr.Tensor:
|
49 |
+
res = super().load_from_disk(path).float()
|
50 |
+
return res
|
51 |
+
|
52 |
class SemanticRepresentation(NpzRepresentation):
|
53 |
"""SemanticRepresentation. Implements depth task-specific stuff, like using flow_vis."""
|
54 |
def __init__(self, *args, classes: int | list[str], color_map: list[tuple[int, int, int]], **kwargs):
|
|
|
56 |
self.classes = list(range(classes)) if isinstance(classes, int) else classes
|
57 |
self.n_classes = len(self.classes)
|
58 |
self.color_map = color_map
|
59 |
+
assert len(color_map) == self.n_classes and self.n_classes > 1, (color_map, self.n_classes)
|
60 |
|
61 |
@overrides
|
62 |
def load_from_disk(self, path: Path) -> tr.Tensor:
|
63 |
res = super().load_from_disk(path)
|
64 |
assert len(res.shape) == 2, f"Only argmaxed data supported, got: {res.shape}"
|
65 |
+
res = F.one_hot(res.long(), num_classes=self.n_classes).float()
|
66 |
return res
|
67 |
|
68 |
@overrides
|
69 |
def plot_fn(self, x: tr.Tensor) -> np.ndarray:
|
70 |
+
x = x.squeeze().nan_to_num(0).detach().argmax(-1).cpu().numpy()
|
71 |
new_images = np.zeros((*x.shape, 3), dtype=np.uint8)
|
72 |
for i in range(self.n_classes):
|
73 |
new_images[x == i] = self.color_map[i]
|
dronescapes_reader/multitask_dataset.py
CHANGED
@@ -2,6 +2,7 @@
|
|
2 |
"""MultiTask Dataset module compatible with torch.utils.data.Dataset & DataLoader."""
|
3 |
from __future__ import annotations
|
4 |
from pathlib import Path
|
|
|
5 |
from argparse import ArgumentParser
|
6 |
from pprint import pprint
|
7 |
from natsort import natsorted
|
@@ -12,8 +13,8 @@ from torch.utils.data import Dataset, DataLoader
|
|
12 |
from lovely_tensors import monkey_patch
|
13 |
|
14 |
monkey_patch()
|
15 |
-
BuildDatasetTuple =
|
16 |
-
MultiTaskItem =
|
17 |
|
18 |
class NpzRepresentation:
|
19 |
"""Generic Task with data read from/saved to npz files. Tries to read data as-is from disk and store it as well"""
|
|
|
2 |
"""MultiTask Dataset module compatible with torch.utils.data.Dataset & DataLoader."""
|
3 |
from __future__ import annotations
|
4 |
from pathlib import Path
|
5 |
+
from typing import Dict, List, Tuple
|
6 |
from argparse import ArgumentParser
|
7 |
from pprint import pprint
|
8 |
from natsort import natsorted
|
|
|
13 |
from lovely_tensors import monkey_patch
|
14 |
|
15 |
monkey_patch()
|
16 |
+
BuildDatasetTuple = Tuple[Dict[str, List[Path]], List[str]]
|
17 |
+
MultiTaskItem = Tuple[Dict[str, tr.Tensor], str, List[str]] # [{task: data}, stem(name) | list[stem(name)], [tasks]]
|
18 |
|
19 |
class NpzRepresentation:
|
20 |
"""Generic Task with data read from/saved to npz files. Tries to read data as-is from disk and store it as well"""
|