Copernicus-Bench / lc100_s3olci /old /dataset_lc100_s3olci_seg.py
wangyi111's picture
Rename lc100_s3olci/dataset_lc100_s3olci_seg.py to lc100_s3olci/old/dataset_lc100_s3olci_seg.py
4b593ae verified
raw
history blame
5.39 kB
from torch.utils.data import DataLoader, Dataset
import cv2
import os
import rasterio
import torch
import numpy as np
from pyproj import Transformer
from datetime import date
S3_OLCI_SCALE = [0.0139465,0.0133873,0.0121481,0.0115198,0.0100953,0.0123538,0.00879161,0.00876539,
0.0095103,0.00773378,0.00675523,0.0071996,0.00749684,0.0086512,0.00526779,0.00530267,
0.00493004,0.00549962,0.00502847,0.00326378,0.00324118]
LC100_CLSID = {
0: 0, # unknown
20: 1,
30: 2,
40: 3,
50: 4,
60: 5,
70: 6,
80: 7,
90: 8,
100: 9,
111: 10,
112: 11,
113: 12,
114: 13,
115: 14,
116: 15,
121: 16,
122: 17,
123: 18,
124: 19,
125: 20,
126: 21,
200: 22, # ocean
}
class S3OLCI_LC100SegDataset(Dataset):
'''
6908/1727 train/test images 96x96x21
23 classes LULC segmentation
nodata: -inf
time series: 1-4 time stamps / location
'''
def __init__(self, root_dir, mode='static', split='train', meta=False):
self.root_dir = root_dir
self.mode = mode
self.meta = meta
self.img_dir = os.path.join(root_dir, split, 's3_olci')
self.label_dir = os.path.join(root_dir, split, 'lc100')
self.fnames = os.listdir(self.label_dir)
if self.mode == 'static':
self.static_csv = os.path.join(root_dir, split, 'static_fnames.csv')
with open(self.static_csv, 'r') as f:
lines = f.readlines()
self.static_img = {}
for line in lines:
dirname = line.strip().split(',')[0]
img_fname = line.strip().split(',')[1]
self.static_img[dirname] = img_fname
if self.meta:
self.reference_date = date(1970, 1, 1)
def __len__(self):
return len(self.fnames)
def __getitem__(self, idx):
fname = self.fnames[idx]
label_path = os.path.join(self.label_dir, fname)
s3_path = os.path.join(self.img_dir, fname.replace('.tif', ''))
if self.mode == 'static':
img_fname = self.static_img[fname.replace('.tif', '')]
s3_paths = [os.path.join(s3_path, img_fname)]
else:
img_fnames = os.listdir(s3_path)
s3_paths = []
for img_fname in img_fnames:
s3_paths.append(os.path.join(s3_path, img_fname))
imgs = []
img_paths = []
meta_infos = []
for img_path in s3_paths:
with rasterio.open(img_path) as src:
img = src.read()
chs = []
for b in range(21):
#ch = cv2.resize(img[b], (96,96), interpolation=cv2.INTER_CUBIC)
ch = cv2.resize(img[b], (282,282), interpolation=cv2.INTER_CUBIC)
chs.append(ch)
img = np.stack(chs)
img[np.isnan(img)] = 0
for b in range(21):
img[b] = img[b]*S3_OLCI_SCALE[b]
img = torch.from_numpy(img).float()
if self.meta:
# get lon, lat
cx,cy = src.xy(src.height // 2, src.width // 2)
# convert to lon, lat
#crs_transformer = Transformer.from_crs(src.crs, 'epsg:4326')
#lon, lat = crs_transformer.transform(cx,cy)
lon, lat = cx, cy
# get time
img_fname = os.path.basename(img_path)
date_str = img_fname.split('_')[1][:8]
date_obj = date(int(date_str[:4]), int(date_str[4:6]), int(date_str[6:8]))
delta = (date_obj - self.reference_date).days
meta_info = np.array([lon, lat, delta, 0]).astype(np.float32)
else:
meta_info = np.array([np.nan,np.nan,np.nan,np.nan]).astype(np.float32)
imgs.append(img)
img_paths.append(img_path)
meta_infos.append(meta_info)
if self.mode == 'series':
# pad to 4 images if less than 4
while len(imgs) < 4:
imgs.append(img)
img_paths.append(img_path)
meta_infos.append(meta_info)
with rasterio.open(label_path) as src:
label = src.read(1)
label = cv2.resize(label, (282,282), interpolation=cv2.INTER_NEAREST) # 0-650
label_new = np.zeros_like(label)
for k,v in LC100_CLSID.items():
label_new[label==k] = v
label = torch.from_numpy(label_new.astype('int64'))
if self.mode == 'static':
return imgs[0], meta_infos[0], label
elif self.mode == 'series':
return imgs[0], imgs[1], imgs[2], imgs[3], meta_infos[0], meta_infos[1], meta_infos[2], meta_infos[3], label
if __name__ == '__main__':
dataset = S3OLCI_LC100SegDataset(root_dir='../data/downstream/cgls_lc100', mode='static', split='train', meta=True)
dataloader = DataLoader(dataset, batch_size=64, shuffle=False, num_workers=4)
for i, data in enumerate(dataloader):
#print(data[0].shape)
#print(data[1].shape)
#print(data[1])
#print(data[2])
#print(data[0].max())
#break
pass