Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2 

3# gms_preprocessing, spatial and spectral homogenization of satellite remote sensing data 

4# 

5# Copyright (C) 2020 Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz-potsdam.de) 

6# 

7# This software was developed within the context of the GeoMultiSens project funded 

8# by the German Federal Ministry of Education and Research 

9# (project grant code: 01 IS 14 010 A-C). 

10# 

11# This program is free software: you can redistribute it and/or modify it under 

12# the terms of the GNU General Public License as published by the Free Software 

13# Foundation, either version 3 of the License, or (at your option) any later version. 

14# Please note the following exception: `gms_preprocessing` depends on tqdm, which 

15# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files 

16# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore". 

17# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE. 

18# 

19# This program is distributed in the hope that it will be useful, but WITHOUT 

20# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

21# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

22# details. 

23# 

24# You should have received a copy of the GNU Lesser General Public License along 

25# with this program. If not, see <http://www.gnu.org/licenses/>. 

26 

27import datetime 

28import os 

29import re 

30import warnings 

31 

32import gdal 

33import gdalnumeric 

34import numpy as np 

35from natsort import natsorted 

36 

37try: 

38 from pyhdf import SD 

39except ImportError: 

40 SD = None 

41 

42from geoarray import GeoArray 

43from py_tools_ds.geo.coord_calc import calc_FullDataset_corner_positions 

44from py_tools_ds.geo.coord_trafo import pixelToLatLon, imYX2mapYX 

45from py_tools_ds.geo.map_info import mapinfo2geotransform 

46from py_tools_ds.geo.projection import EPSG2WKT, isProjectedOrGeographic 

47 

48from ..options.config import GMS_config as CFG 

49from . import geoprocessing as GEOP 

50from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene 

51from ..misc.locks import IOLock 

52from ..model.gms_object import GMS_object 

53from ..model import metadata as META 

54 

55__author__ = 'Daniel Scheffler' 

56 

57 

58class L1A_object(GMS_object): 

59 """Features input reader and raster-/metadata homogenization.""" 

60 

61 def __init__(self, image_type='', satellite='', sensor='', subsystem='', sensormode='', acq_datetime=None, 

62 entity_ID='', scene_ID=-9999, filename='', dataset_ID=-9999, proc_status='', **kwargs): 

63 """:param : instance of gms_object.GMS_object or None 

64 """ 

65 # TODO docstring 

66 

67 # NOTE: kwargs is in here to allow instanciating with additional arg 'proc_level' 

68 

69 # load default attribute values and methods 

70 super(L1A_object, self).__init__() 

71 

72 # unpack kwargs 

73 self.proc_level = 'L1A' 

74 self.image_type = image_type # FIXME not needed anymore? 

75 self.satellite = satellite 

76 self.sensor = sensor 

77 self.subsystem = subsystem 

78 self.sensormode = sensormode 

79 self.acq_datetime = acq_datetime 

80 self.entity_ID = entity_ID 

81 self.scene_ID = scene_ID 

82 self.filename = filename 

83 self.dataset_ID = dataset_ID 

84 

85 # set pathes (only if there are valid init args) 

86 if image_type and satellite and sensor: 

87 self.set_pathes() # also overwrites logfile 

88 self.validate_pathes() 

89 

90 if self.path_archive_valid: 

91 self.logger.info('L1A object for %s %s%s (entity-ID %s) successfully initialized.' 

92 % (self.satellite, self.sensor, 

93 (' ' + self.subsystem) if self.subsystem not in [None, ''] else '', self.entity_ID)) 

94 

95 # (re)set the processing status 

96 if self.scene_ID in self.proc_status_all_GMSobjs: 

97 del self.proc_status_all_GMSobjs[self.scene_ID] 

98 

99 self.proc_status = proc_status or 'initialized' # if proc_status = 'running' is given by L1A_map 

100 

101 def import_rasterdata(self): 

102 if re.search(r"ALOS", self.satellite, re.I): 

103 '''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver. 

104 But: UL of metadata refers to [row =0, col=21]! So the imported GeoTransform is correct when 

105 the first 21 columns are deleted.''' 

106 self.archive_to_rasObj(self.path_archive, self.path_InFilePreprocessor, 

107 subset=['block', [[None, None], [21, None]]]) 

108 elif re.search(r"Terra", self.satellite, re.I): 

109 self.ASTER_HDF_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor) 

110 else: 

111 self.archive_to_rasObj(self.path_archive, path_output=self.path_InFilePreprocessor) 

112 

113 # set shape_fullArr 

114 self.shape_fullArr = self.arr.shape 

115 

116 def archive_to_rasObj(self, path_archive, path_output=None, subset=None): 

117 from ..misc.helper_functions import convert_absPathArchive_to_GDALvsiPath 

118 

119 assert subset is None or isinstance(subset, list) and len(subset) == 2, \ 

120 "Subset argument has be a list with 2 elements." 

121 if subset: 

122 assert subset[0] == 'block',\ 

123 "The function 'archive_to_rasObj' only supports block subsetting. Attempted to perform '%s' " \ 

124 "subsetting." % subset[0] 

125 

126 self.logger.info('Reading %s %s %s image data...' % (self.satellite, self.sensor, self.subsystem)) 

127 gdal_path_archive = convert_absPathArchive_to_GDALvsiPath(path_archive) 

128 project_dir = os.path.abspath(os.curdir) 

129 os.chdir(os.path.dirname(path_archive)) 

130 files_in_archive = gdal.ReadDirRecursive(gdal_path_archive) # needs ~12sek for Landsat-8 

131 assert files_in_archive, 'No files in archive %s for scene %s (entity ID: %s). ' \ 

132 % (os.path.basename(path_archive), self.scene_ID, self.entity_ID) 

133 full_LayerBandsAssignment = META.get_LayerBandsAssignment(self.GMS_identifier, no_thermal=False, no_pan=False) 

134 

135 #################################################### 

136 # get list of raster files to be read from archive # 

137 #################################################### 

138 

139 image_files = [] 

140 is_ALOS_Landsat_S2 = \ 

141 re.search(r'ALOS', self.satellite) or re.search(r'Landsat', self.satellite) or \ 

142 re.search(r'Sentinel-2', self.satellite) 

143 n_files2search = len(full_LayerBandsAssignment) if is_ALOS_Landsat_S2 else 1 

144 

145 for File in natsorted(files_in_archive): 

146 search_res = \ 

147 re.search(r"IMG-0[0-9]-[\s\S]*", File) if re.search(r'ALOS', self.satellite) else \ 

148 re.search(r"[\S]*_B[1-9][0-9]?[\S]*.TIF", File) if re.search(r'Landsat', self.satellite) else \ 

149 re.search(r"[0-9]*.tif", File) if re.search(r'RapidEye', self.satellite) else \ 

150 re.search(r"imagery.tif", File) if re.search(r'SPOT', self.satellite) else \ 

151 re.search(r"[\S]*.SAFE/GRANULE/%s/IMG_DATA/[\S]*_B[0-9][\S]*.jp2" 

152 % self.entity_ID, File) if re.search(r'Sentinel-2', self.satellite) else None 

153 

154 if search_res: 

155 if re.search(r'Sentinel-2', self.satellite): 

156 # add only those files that are corresponding to subsystem (e.g. S2A10: fullLBA = ['2','3','4','8']) 

157 if 1 in [1 if re.search(r"[\S]*_B[0]?%s.jp2" % LBAn, os.path.basename(File)) else 0 

158 for LBAn in full_LayerBandsAssignment]: 

159 image_files.append(File) 

160 else: 

161 image_files.append(File) 

162 

163 # create and fill raster object 

164 if n_files2search > 1: 

165 ##################################### 

166 # validate number of expected files # 

167 ##################################### 

168 

169 if re.search(r'ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31): 

170 expected_files_count = 2 * len(full_LayerBandsAssignment) 

171 else: 

172 expected_files_count = len(full_LayerBandsAssignment) 

173 

174 assert len(image_files) == expected_files_count, "Expected %s image files in '%s'. Found %s." \ 

175 % (len(full_LayerBandsAssignment), path_archive, 

176 len(image_files)) 

177 

178 ############################### 

179 # get paths of files to stack # 

180 ############################### 

181 

182 # NOTE: image_files is a SORTED list of image filenames; self.LayerBandsAssignment may be sorted by CWL 

183 filtered_files = [] 

184 for bN in self.LayerBandsAssignment: # unsorted, e.g., ['1', '2', '3', '4', '5', '9', '6', '7'] 

185 for fN, b in zip(image_files, natsorted(full_LayerBandsAssignment)): # both sorted nicely 

186 if b == bN: 

187 filtered_files.append(fN) 

188 

189 paths_files2stack = [os.path.join(gdal_path_archive, i) for i in filtered_files] 

190 

191 ######################### 

192 # read the raster data # 

193 ######################### 

194 

195 rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger) 

196 

197 # in case a subset is to be read: prepare rasObj instance to read a subset 

198 if subset: 

199 full_dim = [0, rasObj.rowEnd, 0, rasObj.colEnd] 

200 sub_dim = [subset[1][0][0], subset[1][0][1], subset[1][1][0], subset[1][0][1]] 

201 sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))] 

202 subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]] 

203 rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset) 

204 

205 # perform layer stack 

206 with IOLock(allowed_slots=CFG.max_parallel_reads_writes, logger=self.logger): 

207 if CFG.inmem_serialization and path_output is None: # numpy array output 

208 self.arr = rasObj.Layerstacking(paths_files2stack) 

209 self.path_InFilePreprocessor = paths_files2stack[0] 

210 else: # 'MEMORY' or physical output 

211 rasObj.Layerstacking(paths_files2stack, path_output=path_output) # writes an output (gdal_merge) 

212 self.arr = path_output 

213 

214 else: 

215 assert image_files != [], 'No image files found within the archive matching the expected naming scheme.' 

216 path_file2load = os.path.join(gdal_path_archive, image_files[0]) 

217 rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger) 

218 

219 if subset: 

220 full_dim = [0, rasObj.rowEnd, 0, rasObj.colEnd] 

221 sub_dim = [subset[1][0][0], subset[1][0][1], subset[1][1][0], subset[1][0][1]] 

222 sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))] 

223 subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]] 

224 rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset) 

225 

226 # read a single file 

227 with IOLock(allowed_slots=CFG.max_parallel_reads_writes, logger=self.logger): 

228 if CFG.inmem_serialization and path_output is None: # numpy array output 

229 self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \ 

230 gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows) 

231 self.path_InFilePreprocessor = path_file2load 

232 else: # 'MEMORY' or physical output 

233 GEOP.ndarray2gdal(rasObj.tondarray(), path_output, 

234 geotransform=rasObj.geotransform, projection=rasObj.projection) 

235 self.arr = path_output 

236 

237 os.chdir(project_dir) 

238 

239 def ASTER_HDF_to_rasObj(self, path_archive, path_output=None): 

240 subsystem_identifier = 'VNIR' if self.subsystem in ['VNIR1', 'VNIR2'] else 'SWIR' \ 

241 if self.subsystem == 'SWIR' else 'TIR' 

242 

243 ds = gdal.Open(path_archive) 

244 

245 if ds: 

246 sds_md = ds.GetMetadata('SUBDATASETS') 

247 data_arr = None 

248 for bidx, b in enumerate(self.LayerBandsAssignment): 

249 sds_name = [i for i in sds_md.values() if '%s_Band%s:ImageData' % (subsystem_identifier, b) in str(i) or 

250 '%s_Swath:ImageData%s' % (subsystem_identifier, b) in str(i)][0] 

251 data = gdalnumeric.LoadFile(sds_name) 

252 if bidx == 0: 

253 data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype) 

254 data_arr[:, :, bidx] = data 

255 

256 if CFG.inmem_serialization and path_output is None: # numpy array output 

257 self.arr = data_arr 

258 else: 

259 GEOP.ndarray2gdal(data_arr, path_output, geotransform=ds.GetGeoTransform(), 

260 projection=ds.GetProjection(), direction=3) 

261 self.arr = path_output 

262 

263 elif SD is not None: 

264 self.logger.info('Missing HDF4 support within GDAL. Reading HDF file using alternative reader.') 

265 hdfFile = SD.SD(path_archive, SD.SDC.READ) 

266 i, list_matching_dsIdx = 0, [] 

267 

268 while True: # Get dataset indices within HDF file 

269 # noinspection PyBroadException 

270 try: 

271 ds = hdfFile.select(i) 

272 if subsystem_identifier in str(ds.dimensions()) and 'ImagePixel' in str(ds.dimensions()): 

273 list_matching_dsIdx.append(i) 

274 i += 1 

275 except Exception: 

276 break 

277 

278 list_matching_dsIdx = list_matching_dsIdx[:3] if self.subsystem == 'VNIR1' else \ 

279 [list_matching_dsIdx[-1]] if self.subsystem == 'VNIR2' else list_matching_dsIdx 

280 data_arr = None 

281 for i, dsIdx in enumerate(list_matching_dsIdx): 

282 data = hdfFile.select(dsIdx)[:] 

283 if i == 0: 

284 data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype) 

285 data_arr[:, :, i] = data 

286 

287 if CFG.inmem_serialization and path_output is None: # numpy array output 

288 self.arr = data_arr 

289 else: 

290 GEOP.ndarray2gdal(data_arr, path_output, direction=3) 

291 self.arr = path_output 

292 

293 else: 

294 self.logger.error('Missing HDF4 support. Reading HDF file failed.') 

295 raise ImportError('No suitable library for reading HDF4 data available.') 

296 

297 del ds 

298 

299 def import_metadata(self): 

300 """Reads metainformation of the given file from the given ASCII metafile. 

301 Works for: RapidEye (metadata.xml),SPOT(metadata.dim),LANDSAT(mtl.txt),ASTER(downloaded coremetadata), 

302 ALOS(summary.txt & Leader file) 

303 

304 :return: 

305 """ 

306 

307 self.logger.info('Reading %s %s %s metadata...' % (self.satellite, self.sensor, self.subsystem)) 

308 self.MetaObj = META.METADATA(self.GMS_identifier) 

309 self.MetaObj.read_meta(self.scene_ID, self.path_InFilePreprocessor, 

310 self.path_MetaPreprocessor, self.LayerBandsAssignment) 

311 

312 self.logger.debug("The following metadata have been read:") 

313 [self.logger.debug("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.overview.items()] 

314 

315 # set some object attributes directly linked to metadata 

316 self.subsystem = self.MetaObj.Subsystem 

317 self.arr.nodata = self.MetaObj.spec_vals['fill'] 

318 

319 # update acquisition date to a complete datetime object incl. date, time and timezone 

320 if self.MetaObj.AcqDateTime: 

321 self.acq_datetime = self.MetaObj.AcqDateTime 

322 

323 # set arr_desc 

324 self.arr_desc = \ 

325 'DN' if self.MetaObj.PhysUnit == 'DN' else \ 

326 'Rad' if self.MetaObj.PhysUnit == "W * m-2 * sr-1 * micrometer-1" else \ 

327 'TOA_Ref' if re.search(r'TOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \ 

328 'BOA_Ref' if re.search(r'BOA_Reflectance', self.MetaObj.PhysUnit, re.I) else \ 

329 'Temp' if re.search(r'Degrees Celsius', self.MetaObj.PhysUnit, re.I) else None 

330 

331 assert self.arr_desc, 'GMS_obj contains an unexpected physical unit: %s' % self.MetaObj.PhysUnit 

332 

333 def calc_TOARadRefTemp(self, subset=None): 

334 """Convert DN, Rad or TOA_Ref data to TOA Reflectance, to Radiance or to Surface Temperature 

335 (depending on CFG.target_radunit_optical and target_radunit_thermal). 

336 The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk 

337 in tiles, provide an item of self.tile_pos as the 'subset' argument.""" 

338 

339 proc_opt_therm = sorted(list(set(self.dict_LayerOptTherm.values()))) 

340 assert proc_opt_therm in [['optical', 'thermal'], ['optical'], ['thermal']] 

341 

342 inFill, inZero, inSaturated = \ 

343 self.MetaObj.spec_vals['fill'], self.MetaObj.spec_vals['zero'], self.MetaObj.spec_vals['saturated'] 

344 (rS, rE), (cS, cE) = self.arr_pos or ((0, self.shape_fullArr[0]), (0, self.shape_fullArr[1])) 

345 # in_mem = hasattr(self, 'arr') and isinstance(self.arr, np.ndarray) 

346 # if in_mem: 

347 # (rS, rE), (cS, cE) = 

348 # self.arr_pos if self.arr_pos else ((0,self.shape_fullArr[0]),(0,self.shape_fullArr[1])) 

349 # bands = true_bands = self.arr.shape[2] if len(self.arr.shape) == 3 else 1 

350 # else: 

351 # subset = subset if subset else ['block', self.arr_pos] if self.arr_pos else ['cube', None] 

352 # bands, rS, rE, cS, cE = 

353 # list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7] 

354 # ds = gdal.Open(self.MetaObj.Dataname); true_bands = ds.RasterCount; ds = None 

355 assert len(self.LayerBandsAssignment) == self.arr.bands, \ 

356 "DN2RadRef-Input data have %s bands although %s bands are specified in self.LayerBandsAssignment." \ 

357 % (self.arr.bands, len(self.LayerBandsAssignment)) 

358 

359 ################################ 

360 # perform conversion if needed # 

361 ################################ 

362 

363 data_optical, data_thermal, optical_bandsList, thermal_bandsList = None, None, [], [] 

364 for optical_thermal in ['optical', 'thermal']: 

365 if optical_thermal not in self.dict_LayerOptTherm.values(): 

366 continue 

367 conv = getattr(CFG, 'target_radunit_%s' % optical_thermal) 

368 conv = conv if conv != 'BOA_Ref' else 'TOA_Ref' 

369 assert conv in ['Rad', 'TOA_Ref', 'Temp'], 'Unsupported conversion type: %s' % conv 

370 arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1] 

371 assert arr_desc in ['DN', 'Rad', 'TOA_Ref', 'Temp'], 'Unsupported array description: %s' % arr_desc 

372 

373 bList = [i for i, lr in enumerate(self.LayerBandsAssignment) if 

374 self.dict_LayerOptTherm[lr] == optical_thermal] 

375 

376 # custom_subsetProps = [[rS,rE],[cS,cE],bList] 

377 

378 inArray = np.take(self.arr, bList, axis=2) 

379 # inArray = np.take(self.arr,bList,axis=2) if in_mem else \ 

380 # INP_R.read_ENVI_image_data_as_array(self.MetaObj.Dataname,'custom',custom_subsetProps,self.logger,q=1) 

381 inArray = inArray[:, :, 0] if len(inArray.shape) == 3 and inArray.shape[2] == 1 else inArray # 3D to 2D 

382 

383 if arr_desc == 'DN': 

384 self.log_for_fullArr_or_firstTile('Calculating %s...' % conv, subset) 

385 

386 # get input parameters # 

387 ######################## 

388 

389 off = [self.MetaObj.Offsets[b] for b in self.LayerBandsAssignment] 

390 gai = [self.MetaObj.Gains[b] for b in self.LayerBandsAssignment] 

391 irr = [self.MetaObj.SolIrradiance[b] for b in self.LayerBandsAssignment] 

392 zen, esd = 90 - float(self.MetaObj.SunElevation), self.MetaObj.EarthSunDist 

393 k1 = [self.MetaObj.ThermalConstK1[b] for b in self.LayerBandsAssignment] 

394 k2 = [self.MetaObj.ThermalConstK2[b] for b in self.LayerBandsAssignment] 

395 

396 OFF, GAI, IRR, K1, K2 = [list(np.array(par)[bList]) for par in [off, gai, irr, k1, k2]] 

397 

398 # perform conversion # 

399 ###################### 

400 res = \ 

401 GEOP.DN2Rad(inArray, OFF, GAI, inFill, inZero, inSaturated) if conv == 'Rad' else \ 

402 GEOP.DN2TOARef(inArray, OFF, GAI, IRR, zen, esd, inFill, inZero, 

403 inSaturated) if conv == 'TOA_Ref' else \ 

404 GEOP.DN2DegreesCelsius_fastforward(inArray, OFF, GAI, K1, K2, 0.95, inFill, inZero, inSaturated) 

405 if conv == 'TOA_Ref': 

406 self.MetaObj.ScaleFactor = CFG.scale_factor_TOARef 

407 

408 elif arr_desc == 'Rad': 

409 raise NotImplementedError("Conversion Rad to %s is currently not supported." % conv) 

410 

411 elif arr_desc == 'TOA_Ref': 

412 if conv == 'Rad': 

413 """ 

414 https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-2-msi/level-1c/algorithm 

415 https://forum.step.esa.int/t/radiometric-conversion-of-sentinel-2-images/1123/ 

416 rToa = (float)(DN_L1C_band / QUANTIFICATION_VALUE); 

417 L = (rToa * e0__SOLAR_IRRADIANCE_For_band * cos(Z__Sun_Angles_Grid_Zenith_Values)) / 

418 (PI * U__earth_sun_distance_correction_factor); 

419 L = (U__earth_sun_distance_correction_factor * rToa * e0__SOLAR_IRRADIANCE_For_band * cos( 

420 Z__Sun_Angles_Grid_Zenith_Values)) / PI;""" 

421 if re.search(r'Sentinel-2', self.satellite, re.I): 

422 warnings.warn('Physical gain values unclear for Sentinel-2! This may cause errors when ' 

423 'calculating radiance from TOA Reflectance. ESA provides only 12 gain values for ' 

424 '13 bands and it not clear for which bands the gains are provided.') 

425 raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." % conv) 

426 else: # conv=='TOA_Ref' 

427 if self.MetaObj.ScaleFactor != CFG.scale_factor_TOARef: 

428 res = self.rescale_array(inArray, CFG.scale_factor_TOARef, self.MetaObj.ScaleFactor) 

429 self.MetaObj.ScaleFactor = CFG.scale_factor_TOARef 

430 self.log_for_fullArr_or_firstTile( 

431 'Rescaling Ref data to scaling factor %d.' % CFG.scale_factor_TOARef) 

432 else: 

433 res = inArray 

434 self.log_for_fullArr_or_firstTile('The input data already represents TOA ' 

435 'reflectance with the desired scale factor of %d.' 

436 % CFG.scale_factor_TOARef) 

437 

438 else: # arr_desc == 'Temp' 

439 raise NotImplementedError("Conversion Temp to %s is currently not supported." % conv) 

440 

441 # ensure int16 as output data type (also relevant for auto-setting of nodata value) 

442 if isinstance(res, (np.ndarray, GeoArray)): 

443 # change data type to int16 and update nodata values within array 

444 res = res if res.dtype == np.int16 else res.astype(np.int16) 

445 res[res == inFill] = get_outFillZeroSaturated(np.int16)[0] 

446 

447 if optical_thermal == 'optical': 

448 data_optical, optical_bandsList = res, bList 

449 else: 

450 data_thermal, thermal_bandsList = res, bList 

451 

452 ################################################# 

453 # combine results from optical and thermal data # 

454 ################################################# 

455 

456 if data_optical is not None and data_thermal is not None: 

457 bands_opt, bands_therm = [1 if len(d.shape) == 2 else d.shape[2] for d in [data_optical, data_thermal]] 

458 r, c, b = data_optical.shape[0], data_optical.shape[1], bands_opt + bands_therm 

459 dataOut = np.empty((r, c, b), data_optical.dtype) 

460 

461 for idx_out, idx_in in zip(optical_bandsList, range(bands_opt)): 

462 dataOut[:, :, idx_out] = data_optical[:, :, idx_in] 

463 for idx_out, idx_in in zip(thermal_bandsList, range(bands_therm)): 

464 dataOut[:, :, idx_out] = data_thermal[:, :, idx_in] 

465 else: 

466 dataOut = data_optical if data_thermal is None else data_thermal 

467 assert dataOut is not None 

468 

469 self.update_spec_vals_according_to_dtype('int16') 

470 tiles_desc = '_'.join([desc for op_th, desc in zip(['optical', 'thermal'], 

471 [CFG.target_radunit_optical, 

472 CFG.target_radunit_thermal]) 

473 if desc in self.dict_LayerOptTherm.values()]) 

474 

475 self.arr = dataOut 

476 self.arr_desc = tiles_desc 

477 

478 return {'desc': tiles_desc, 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': dataOut} 

479 

480 def validate_GeoTransProj_GeoAlign(self): 

481 project_dir = os.path.abspath(os.curdir) 

482 if self.MetaObj.Dataname.startswith('/vsi'): 

483 os.chdir(os.path.dirname(self.path_archive)) 

484 rasObj = GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger) 

485 if rasObj.geotransform == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) and rasObj.projection == '': 

486 if re.search(r'ALOS', self.satellite) and self.MetaObj.ProcLCode == '1B2': 

487 self.GeoTransProj_ok, self.GeoAlign_ok = False, True 

488 else: 

489 self.GeoTransProj_ok, self.GeoAlign_ok = False, False 

490 os.chdir(project_dir) 

491 

492 def reference_data(self, out_CS='UTM'): 

493 """Perform georeferencing of self.arr or the corresponding data on disk respectively. 

494 Method is skipped if self.GeoAlign_ok and self.GeoTransProj_ok evaluate to 'True'. All attributes connected 

495 with the georeference of self.arr are automatically updated.""" 

496 

497 from ..io.output_writer import add_attributes_to_ENVIhdr 

498 

499 if False in [self.GeoAlign_ok, self.GeoTransProj_ok]: 

500 previous_dataname = self.MetaObj.Dataname 

501 if hasattr(self, 'arr') and isinstance(self.arr, (GeoArray, np.ndarray)) and \ 

502 self.MetaObj.Dataname.startswith('/vsi'): 

503 outP = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc) 

504 # FIXME ineffective but needed as long as georeference_by_TieP_or_inherent_GCPs does not support 

505 # FIXME direct array inputs 

506 GEOP.ndarray2gdal(self.arr, outPath=outP, geotransform=mapinfo2geotransform(self.MetaObj.map_info), 

507 projection=self.MetaObj.projection, 

508 direction=3) 

509 assert os.path.isfile(outP), 'Writing tempfile failed.' 

510 self.MetaObj.Dataname = outP 

511 rasObj = GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger) 

512 

513 # --Georeference if neccessary 

514 if self.GeoAlign_ok and not self.GeoTransProj_ok: 

515 self.logger.info('Adding georeference from metadata to image...') 

516 rasObj.add_GeoTransform_Projection_using_MetaData(self.MetaObj.CornerTieP_LonLat, 

517 CS_TYPE=self.MetaObj.CS_TYPE, 

518 CS_DATUM=self.MetaObj.CS_DATUM, 

519 CS_UTM_ZONE=self.MetaObj.CS_UTM_ZONE, 

520 gResolution=self.MetaObj.gResolution, 

521 shape_fullArr=self.shape_fullArr) 

522 self.add_rasterInfo_to_MetaObj(rasObj) 

523 add_attributes_to_ENVIhdr( 

524 {'map info': self.MetaObj.map_info, 'coordinate system string': self.MetaObj.projection}, 

525 os.path.splitext(self.MetaObj.Dataname)[0] + '.hdr') 

526 self.arr = self.MetaObj.Dataname 

527 self.GeoTransProj_ok = True 

528 

529 if not self.GeoAlign_ok: 

530 self.logger.info('Georeferencing image...') 

531 rasObj.georeference_by_TieP_or_inherent_GCPs(TieP=self.MetaObj.CornerTieP_LonLat, dst_CS=out_CS, 

532 dst_CS_datum='WGS84', mode='GDAL', use_workspace=True, 

533 inFill=self.MetaObj.spec_vals['fill']) 

534 

535 if not CFG.inmem_serialization: 

536 path_warped = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc) 

537 GEOP.ndarray2gdal(rasObj.tondarray(direction=3), path_warped, importFile=rasObj.desc, direction=3) 

538 self.MetaObj.Dataname = path_warped 

539 self.arr = path_warped 

540 else: # CFG.inmem_serialization is True 

541 self.arr = rasObj.tondarray(direction=3) 

542 

543 self.shape_fullArr = [rasObj.rows, rasObj.cols, rasObj.bands] 

544 self.add_rasterInfo_to_MetaObj() # uses self.MetaObj.Dataname as inFile 

545 self.update_spec_vals_according_to_dtype() 

546 self.calc_mask_nodata() # uses nodata value from self.MetaObj.spec_vals 

547 self.GeoTransProj_ok, self.GeoAlign_ok = True, True 

548 

549 if rasObj.get_projection_type() == 'LonLat': 

550 self.MetaObj.CornerTieP_LonLat = rasObj.get_corner_coordinates('LonLat') 

551 

552 if rasObj.get_projection_type() == 'UTM': 

553 self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM') 

554 

555 if CFG.inmem_serialization: 

556 self.delete_tempFiles() # these files are needed later in Python execution mode 

557 self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists) 

558 

559 def calc_corner_positions(self): 

560 """Get true corner positions in the form 

561 [UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]""" 

562 

563 # set lonlat corner positions for outer image edges 

564 rows, cols = self.shape_fullArr[:2] 

565 CorPosXY = [(0, 0), (cols, 0), (0, rows), (cols, rows)] 

566 gt = self.mask_nodata.geotransform 

567 # using EPSG code ensures that exactly the same WKT code is used in case of in-mem and disk serialization 

568 prj = EPSG2WKT(self.mask_nodata.epsg) 

569 CorLatLon = pixelToLatLon(CorPosXY, geotransform=gt, projection=prj) 

570 self.corner_lonlat = [tuple(reversed(i)) for i in CorLatLon] 

571 

572 # set true data corner positions (image coordinates) 

573 assert self.arr_shape == 'cube', 'The given GMS object must represent the full image cube for calculating ,' \ 

574 'true corner posistions. Received %s.' % self.arr_shape 

575 assert hasattr(self, 

576 'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask." 

577 self.logger.info('Calculating true data corner positions (image and world coordinates)...') 

578 

579 # if re.search(r'ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31, 

580 # tzinfo=datetime.timezone.utc): 

581 if is_dataset_provided_as_fullScene(self.GMS_identifier): 

582 kw = dict(algorithm='numpy', assert_four_corners=True) 

583 else: 

584 kw = dict(algorithm='shapely', assert_four_corners=False) 

585 self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, **kw) 

586 

587 # set true data corner positions (lon/lat coordinates) 

588 trueCorPosXY = [tuple(reversed(i)) for i in self.trueDataCornerPos] 

589 trueCorLatLon = pixelToLatLon(trueCorPosXY, geotransform=gt, projection=prj) 

590 self.trueDataCornerLonLat = [tuple(reversed(i)) for i in trueCorLatLon] 

591 

592 # set true data corner positions (UTM coordinates) 

593 # calculate trueDataCornerUTM 

594 if isProjectedOrGeographic(prj) == 'projected': 

595 data_corners_utmYX = [imYX2mapYX(imYX, gt=gt) for imYX in self.trueDataCornerPos] 

596 self.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX] 

597 

598 # set full scene corner positions (image and lonlat coordinates) 

599 if is_dataset_provided_as_fullScene(self.GMS_identifier): 

600 assert len(self.trueDataCornerLonLat) == 4, \ 

601 "Dataset %s (entity ID %s) is provided as full scene. Thus exactly 4 image corners must be present " \ 

602 "within the dataset. Found %s corners instead."\ 

603 % (self.scene_ID, self.entity_ID, len(self.trueDataCornerLonLat)) 

604 self.fullSceneCornerPos = self.trueDataCornerPos 

605 self.fullSceneCornerLonLat = self.trueDataCornerLonLat 

606 

607 else: 

608 

609 if re.search(r'AVNIR', self.sensor): 

610 self.fullSceneCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy', 

611 assert_four_corners=False) 

612 # set true data corner positions (lon/lat coordinates) 

613 trueCorPosXY = [tuple(reversed(i)) for i in self.trueDataCornerPos] 

614 trueCorLatLon = pixelToLatLon(trueCorPosXY, geotransform=gt, projection=prj) 

615 self.fullSceneCornerLonLat = [tuple(reversed(i)) for i in trueCorLatLon] 

616 

617 else: 

618 # RapidEye or Sentinel-2 data 

619 

620 if re.search(r'Sentinel-2', self.satellite): 

621 # get fullScene corner coordinates by database query 

622 # -> calculate footprints for all granules of the same S2 datatake 

623 # -> merge them and calculate overall corner positions 

624 self.fullSceneCornerPos = [tuple(reversed(i)) for i in CorPosXY] # outer corner positions 

625 self.fullSceneCornerLonLat = self.corner_lonlat # outer corner positions 

626 else: 

627 # RapidEye 

628 raise NotImplementedError # FIXME 

629 

630 def calc_center_AcqTime(self): 

631 """Calculate scene acquisition time if not given in provider metadata.""" 

632 

633 if self.MetaObj.AcqTime == '': 

634 self.MetaObj.calc_center_acquisition_time(self.fullSceneCornerLonLat, self.logger) 

635 

636 # update timestamp 

637 self.acq_datetime = self.MetaObj.AcqDateTime 

638 

639 def calc_orbit_overpassParams(self): 

640 """Calculate orbit parameters.""" 

641 

642 if is_dataset_provided_as_fullScene(self.GMS_identifier): 

643 self.MetaObj.overpassDurationSec, self.MetaObj.scene_length = \ 

644 self.MetaObj.get_overpassDuration_SceneLength( 

645 self.fullSceneCornerLonLat, self.fullSceneCornerPos, self.shape_fullArr, self.logger) 

646 

647 def add_rasterInfo_to_MetaObj(self, custom_rasObj=None): 

648 """ 

649 Add the attributes 'rows','cols','bands','map_info','projection' and 'physUnit' to self.MetaObj 

650 """ 

651 # TODO is this info still needed in MetaObj? 

652 project_dir = os.path.abspath(os.curdir) 

653 if self.MetaObj.Dataname.startswith('/vsi'): 

654 os.chdir(os.path.dirname(self.path_archive)) 

655 

656 rasObj = custom_rasObj if custom_rasObj else GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger) 

657 self.MetaObj.add_rasObj_dims_projection_physUnit(rasObj, self.dict_LayerOptTherm, self.logger) 

658 

659 prj_type = rasObj.get_projection_type() 

660 assert prj_type, "Projections other than LonLat or UTM are currently not supported. Got. %s." % prj_type 

661 if prj_type == 'LonLat': 

662 self.MetaObj.CornerTieP_LonLat = rasObj.get_corner_coordinates('LonLat') 

663 else: # UTM 

664 self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM') 

665 

666 if self.MetaObj.Dataname.startswith('/vsi'): # only if everthing is kept in memory 

667 os.chdir(project_dir) 

668 self.MetaObj.bands = 1 if len(self.arr.shape) == 2 else self.arr.shape[2] 

669 

670 self.arr.gt = mapinfo2geotransform(self.MetaObj.map_info) 

671 if not self.arr.prj: 

672 self.arr.prj = self.MetaObj.projection 

673 

674 # must be set here because nodata mask has been computed from self.arr without geoinfos: 

675 self.mask_nodata.gt = self.arr.gt 

676 self.mask_nodata.prj = self.arr.prj 

677 

678 def update_spec_vals_according_to_dtype(self, dtype=None): 

679 """Update self.MetaObj.spec_vals. 

680 

681 :param dtype: <str> or <numpy data type> The data type to be used for updating. 

682 If not specified the data type of self.arr is used. 

683 """ 

684 if dtype is None: 

685 if hasattr(self, 'arr') and isinstance(self.arr, np.ndarray): 

686 dtype = self.arr.dtype 

687 else: 

688 arr = gdalnumeric.LoadFile(self.arr, 0, 0, 1, 1) if hasattr(self, 'arr') and isinstance(self.arr, 

689 str) else \ 

690 gdalnumeric.LoadFile(self.MetaObj.Dataname, 0, 0, 1, 1) 

691 assert arr is not None 

692 dtype = arr.dtype 

693 

694 self.MetaObj.spec_vals['fill'], self.MetaObj.spec_vals['zero'], self.MetaObj.spec_vals['saturated'] = \ 

695 get_outFillZeroSaturated(dtype) 

696 self.arr.nodata = self.MetaObj.spec_vals['fill'] 

697 

698 def calc_mean_VAA(self): 

699 """Calculates mean viewing azimuth angle using sensor flight line derived from full scene corner coordinates.""" 

700 

701 if is_dataset_provided_as_fullScene(self.GMS_identifier): 

702 self.VAA_mean = \ 

703 GEOP.calc_VAA_using_fullSceneCornerLonLat(self.fullSceneCornerLonLat, self.MetaObj.orbitParams) 

704 else: 

705 # e.g. Sentinel-2 / RapidEye 

706 self.logger.debug('No precise calculation of mean viewing azimuth angle possible because orbit track ' 

707 'cannot be reconstructed from dataset since full scene corner positions are unknown. ' 

708 'Mean VAA angle is filled with the mean value of the viewing azimuth array provided ' 

709 'in metadata.') 

710 self.VAA_mean = self.MetaObj.IncidenceAngle 

711 

712 self.logger.info('Calculation of mean VAA...: %s' % round(self.VAA_mean, 2))