Seems to work. Implemented SemanticMapper in DronescapesRepresentation. Not fully generic, but good enough.
Browse files
data/train_set/.task_statistics.npz
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f601e71980200cacfc43a1b3ed5d7d932b538b8cf94d569d74911a0d3604b54
|
3 |
+
size 28542
|
dronescapes_reader/dronescapes_representations.py
CHANGED
@@ -94,12 +94,16 @@ class SemanticRepresentation(NpzRepresentation):
|
|
94 |
return new_images
|
95 |
|
96 |
class SemanticMapper(SemanticRepresentation):
|
97 |
-
"""
|
|
|
|
|
|
|
98 |
def __init__(self, *args, original_classes: list[list[str]], mapping: list[dict[str, list[str]]],
|
99 |
color_map: list[tuple[int, int, int]],
|
100 |
merge_fn: Callable[[list[np.ndarray]], np.ndarray] | None = None, **kwargs):
|
101 |
super().__init__(*args, classes=list(mapping[0].keys()), color_map=color_map, **kwargs)
|
102 |
-
assert len(self.dependencies) >= 1
|
|
|
103 |
assert isinstance(mapping, list), type(mapping)
|
104 |
assert len(mapping) == (B := len(self.dependencies)), (len(mapping), B)
|
105 |
assert (A := len(original_classes)) == len(self.dependencies), (A, B)
|
@@ -233,8 +237,10 @@ tasks = [ # some pre-baked representations
|
|
233 |
NormalsRepresentation("normals_sfm_manual202204"),
|
234 |
OpticalFlowRepresentation("opticalflow_rife"),
|
235 |
SemanticRepresentation("semantic_segprop8", classes=8, color_map=color_map_8classes),
|
236 |
-
|
237 |
-
|
|
|
|
|
238 |
SemanticRepresentation("semantic_mask2former_coco_47429163_0", classes=coco_classes, color_map=coco_color_map),
|
239 |
SemanticRepresentation("semantic_mask2former_mapillary_49189528_0", classes=mapillary_classes,
|
240 |
color_map=mapillary_color_map),
|
|
|
94 |
return new_images
|
95 |
|
96 |
class SemanticMapper(SemanticRepresentation):
|
97 |
+
"""
|
98 |
+
Maps one or more semantic segmentations to a final one + a merge fn. Copy-pasta from VRE.
|
99 |
+
TODO: allow non-semantic dependencies (must pass the correct load_fn or object instead of assuming semantic deps)
|
100 |
+
"""
|
101 |
def __init__(self, *args, original_classes: list[list[str]], mapping: list[dict[str, list[str]]],
|
102 |
color_map: list[tuple[int, int, int]],
|
103 |
merge_fn: Callable[[list[np.ndarray]], np.ndarray] | None = None, **kwargs):
|
104 |
super().__init__(*args, classes=list(mapping[0].keys()), color_map=color_map, **kwargs)
|
105 |
+
assert len(self.dependencies) >= 1 and self.dependencies[0] != self.name, \
|
106 |
+
"No dependencies provided. Need at least one semantic segmentation to map."
|
107 |
assert isinstance(mapping, list), type(mapping)
|
108 |
assert len(mapping) == (B := len(self.dependencies)), (len(mapping), B)
|
109 |
assert (A := len(original_classes)) == len(self.dependencies), (A, B)
|
|
|
237 |
NormalsRepresentation("normals_sfm_manual202204"),
|
238 |
OpticalFlowRepresentation("opticalflow_rife"),
|
239 |
SemanticRepresentation("semantic_segprop8", classes=8, color_map=color_map_8classes),
|
240 |
+
SemanticRepresentation("semantic_mask2former_swin_mapillary_converted", classes=8, color_map=color_map_8classes),
|
241 |
+
SemanticMapper("semantic_mask2former_swin_mapillary_converted2", original_classes=[mapillary_classes],
|
242 |
+
mapping=[m2f_mapillary_to_8_classes], color_map=color_map_8classes,
|
243 |
+
dependencies=["semantic_mask2former_mapillary_49189528_0"]),
|
244 |
SemanticRepresentation("semantic_mask2former_coco_47429163_0", classes=coco_classes, color_map=coco_color_map),
|
245 |
SemanticRepresentation("semantic_mask2former_mapillary_49189528_0", classes=mapillary_classes,
|
246 |
color_map=mapillary_color_map),
|
dronescapes_reader/multitask_dataset.py
CHANGED
@@ -107,7 +107,10 @@ class MultiTaskDataset(Dataset):
|
|
107 |
self._statistics = None if normalization is None else self._compute_statistics()
|
108 |
if self._statistics is not None:
|
109 |
for task_name, task in self.name_to_task.items():
|
110 |
-
|
|
|
|
|
|
|
111 |
|
112 |
# Public methods and properties
|
113 |
|
|
|
107 |
self._statistics = None if normalization is None else self._compute_statistics()
|
108 |
if self._statistics is not None:
|
109 |
for task_name, task in self.name_to_task.items():
|
110 |
+
try:
|
111 |
+
task.set_normalization(self.normalization[task_name], self._statistics[task_name])
|
112 |
+
except:
|
113 |
+
breakpoint()
|
114 |
|
115 |
# Public methods and properties
|
116 |
|
scripts/dronescapes_viewer.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
scripts/dronescapes_viewer.py
CHANGED
@@ -7,6 +7,7 @@ from dronescapes_reader.dronescapes_representations import dronescapes_task_type
|
|
7 |
from pprint import pprint
|
8 |
from torch.utils.data import DataLoader
|
9 |
import random
|
|
|
10 |
|
11 |
def main():
|
12 |
assert len(sys.argv) == 2, f"Usage ./dronescapes_viewer.py /path/to/dataset"
|
@@ -23,6 +24,10 @@ def main():
|
|
23 |
data, name, repr_names = reader[rand_ix] # get a random item
|
24 |
pprint({k: v for k, v in data.items()})
|
25 |
|
|
|
|
|
|
|
|
|
26 |
print("== Random loaded batch ==")
|
27 |
batch_data, name, repr_names = reader[rand_ix: min(len(reader), rand_ix + 5)] # get a random batch
|
28 |
pprint({k: v for k, v in batch_data.items()}) # Nones are converted to 0s automagically
|
|
|
7 |
from pprint import pprint
|
8 |
from torch.utils.data import DataLoader
|
9 |
import random
|
10 |
+
import numpy as np
|
11 |
|
12 |
def main():
|
13 |
assert len(sys.argv) == 2, f"Usage ./dronescapes_viewer.py /path/to/dataset"
|
|
|
24 |
data, name, repr_names = reader[rand_ix] # get a random item
|
25 |
pprint({k: v for k, v in data.items()})
|
26 |
|
27 |
+
img_data = {}
|
28 |
+
for k, v in data.items():
|
29 |
+
img_data[k] = reader.name_to_task[k].plot_fn(v) if v is not None else np.zeros((*reader.data_shape[k][0:2], 3))
|
30 |
+
|
31 |
print("== Random loaded batch ==")
|
32 |
batch_data, name, repr_names = reader[rand_ix: min(len(reader), rand_ix + 5)] # get a random batch
|
33 |
pprint({k: v for k, v in batch_data.items()}) # Nones are converted to 0s automagically
|