wangyi111 commited on
Commit
3cdd994
·
verified ·
1 Parent(s): 9cf1797

Upload senbench_clouds2_wrapper.py

Browse files
Files changed (1) hide show
  1. cloud_s2/senbench_clouds2_wrapper.py +155 -0
cloud_s2/senbench_clouds2_wrapper.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import kornia as K
2
+ import torch
3
+ from torchgeo.datasets import CloudCoverDetection
4
+ from typing import ClassVar
5
+ from collections.abc import Callable, Sequence
6
+ from torch import Tensor
7
+ from datetime import date
8
+ import os
9
+ import pandas as pd
10
+ import numpy as np
11
+ import rasterio
12
+ from pyproj import Transformer
13
+ from typing import TypeAlias
14
+ Path: TypeAlias = str | os.PathLike[str]
15
+
16
+ class SenBenchCloudS2(CloudCoverDetection):
17
+ url = None
18
+ all_bands = ('B02', 'B03', 'B04', 'B08')
19
+ splits: ClassVar[dict[str, str]] = {'train': 'public', 'val': 'private', 'test': 'private'}
20
+
21
+ def __init__(
22
+ self,
23
+ root: Path = 'data',
24
+ split: str = 'train',
25
+ bands: Sequence[str] = all_bands,
26
+ transforms: Callable[[dict[str, Tensor]], dict[str, Tensor]] | None = None,
27
+ download: bool = False,
28
+ ) -> None:
29
+
30
+ #super().__init__(root=root, split=split, bands=bands, transforms=transforms, download=download)
31
+ assert split in self.splits
32
+ assert set(bands) <= set(self.all_bands)
33
+
34
+ self.root = root
35
+ self.split = split
36
+ self.bands = bands
37
+ self.transforms = transforms
38
+ self.download = download
39
+
40
+ self.csv = os.path.join(self.root, self.split, f'{self.split}_metadata.csv')
41
+ self._verify()
42
+
43
+ self.metadata = pd.read_csv(self.csv)
44
+
45
+ self.reference_date = date(1970, 1, 1)
46
+ self.patch_area = (16*10)**2 # patchsize 16 pix, gsd 10m
47
+
48
+ def __getitem__(self, index: int) -> dict[str, Tensor]:
49
+ """Returns a sample from dataset.
50
+
51
+ Args:
52
+ index: index to return
53
+
54
+ Returns:
55
+ data, metadata (lon,lat,days,area) and label at given index
56
+ """
57
+ chip_id = self.metadata.iat[index, 0]
58
+ date_str = self.metadata.iat[index, 2]
59
+ date_obj = date(int(date_str[:4]), int(date_str[5:7]), int(date_str[8:10]))
60
+ delta = (date_obj - self.reference_date).days
61
+
62
+ image, coord = self._load_image(chip_id)
63
+ label = self._load_target(chip_id)
64
+
65
+ meta_info = np.array([coord[0], coord[1], delta, self.patch_area]).astype(np.float32)
66
+
67
+ sample = {'image': image, 'mask': label, 'meta': torch.from_numpy(meta_info)}
68
+
69
+ if self.transforms is not None:
70
+ sample = self.transforms(sample)
71
+
72
+ # # add metadata
73
+ # sample['meta'] = torch.from_numpy(meta_info)
74
+
75
+ return sample
76
+
77
+ def _load_image(self, chip_id: str) -> Tensor:
78
+ """Load all source images for a chip.
79
+
80
+ Args:
81
+ chip_id: ID of the chip.
82
+
83
+ Returns:
84
+ a tensor of stacked source image data, coord (lon,lat)
85
+ """
86
+ path = os.path.join(self.root, self.split, f'{self.split}_features', chip_id)
87
+ images = []
88
+ coords = None
89
+ for band in self.bands:
90
+ with rasterio.open(os.path.join(path, f'{band}.tif')) as src:
91
+ images.append(src.read(1).astype(np.float32))
92
+ if coords is None:
93
+ cx,cy = src.xy(src.height // 2, src.width // 2)
94
+ if src.crs.to_string() != 'EPSG:4326':
95
+ crs_transformer = Transformer.from_crs(src.crs, 'epsg:4326', always_xy=True)
96
+ lon, lat = crs_transformer.transform(cx,cy)
97
+ else:
98
+ lon, lat = cx, cy
99
+
100
+ return torch.from_numpy(np.stack(images, axis=0)), (lon,lat)
101
+
102
+
103
+
104
+ class SegDataAugmentation(torch.nn.Module):
105
+ def __init__(self, split, size):
106
+ super().__init__()
107
+
108
+ mean = torch.Tensor([0.0])
109
+ std = torch.Tensor([1.0])
110
+
111
+ self.norm = K.augmentation.Normalize(mean=mean, std=std)
112
+
113
+ if split == "train":
114
+ self.transform = K.augmentation.AugmentationSequential(
115
+ K.augmentation.Resize(size=size, align_corners=True),
116
+ K.augmentation.RandomRotation(degrees=90, p=0.5, align_corners=True),
117
+ K.augmentation.RandomHorizontalFlip(p=0.5),
118
+ K.augmentation.RandomVerticalFlip(p=0.5),
119
+ data_keys=["input", "mask"],
120
+ )
121
+ else:
122
+ self.transform = K.augmentation.AugmentationSequential(
123
+ K.augmentation.Resize(size=size, align_corners=True),
124
+ data_keys=["input", "mask"],
125
+ )
126
+
127
+ @torch.no_grad()
128
+ def forward(self, batch: dict[str,]):
129
+ """Torchgeo returns a dictionary with 'image' and 'label' keys, but engine expects a tuple"""
130
+ x,mask = batch["image"], batch["mask"]
131
+ x = self.norm(x)
132
+ x_out, mask_out = self.transform(x, mask)
133
+ return x_out.squeeze(0), mask_out.squeeze(0).squeeze(0), batch["meta"]
134
+
135
+ class SenBenchCloudS2Dataset:
136
+ def __init__(self, config):
137
+ self.dataset_config = config
138
+ self.img_size = (config.image_resolution, config.image_resolution)
139
+ self.root_dir = config.data_path
140
+
141
+ def create_dataset(self):
142
+ train_transform = SegDataAugmentation(split="train", size=self.img_size)
143
+ eval_transform = SegDataAugmentation(split="test", size=self.img_size)
144
+
145
+ dataset_train = SenBenchCloudS2(
146
+ root=self.root_dir, split="train", transforms=train_transform
147
+ )
148
+ dataset_val = SenBenchCloudS2(
149
+ root=self.root_dir, split="val", transforms=eval_transform
150
+ )
151
+ dataset_test = SenBenchCloudS2(
152
+ root=self.root_dir, split="test", transforms=eval_transform
153
+ )
154
+
155
+ return dataset_train, dataset_val, dataset_test