Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2 

3# gms_preprocessing, spatial and spectral homogenization of satellite remote sensing data 

4# 

5# Copyright (C) 2020 Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz-potsdam.de) 

6# 

7# This software was developed within the context of the GeoMultiSens project funded 

8# by the German Federal Ministry of Education and Research 

9# (project grant code: 01 IS 14 010 A-C). 

10# 

11# This program is free software: you can redistribute it and/or modify it under 

12# the terms of the GNU General Public License as published by the Free Software 

13# Foundation, either version 3 of the License, or (at your option) any later version. 

14# Please note the following exception: `gms_preprocessing` depends on tqdm, which 

15# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files 

16# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore". 

17# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE. 

18# 

19# This program is distributed in the hope that it will be useful, but WITHOUT 

20# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

21# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

22# details. 

23# 

24# You should have received a copy of the GNU Lesser General Public License along 

25# with this program. If not, see <http://www.gnu.org/licenses/>. 

26 

27""" 

28GEOPROCESSING OBJECT 

29"GIS operations to manipulate spatial data. 

30Takes an input dataset, performs an operation on that dataset 

31and returns the result of the operation as an output dataset. 

32""" 

33 

34import collections 

35import datetime 

36import math 

37import os 

38import re 

39import subprocess 

40import time 

41import warnings 

42 

43import numpy as np 

44from matplotlib import dates as mdates 

45 

46# custom 

47try: 

48 from osgeo import osr 

49 from osgeo import gdal 

50 from osgeo import gdalnumeric 

51except ImportError: 

52 import osr 

53 import gdal 

54 import gdalnumeric 

55from gdalconst import GA_ReadOnly 

56 

57import pyproj 

58from pyorbital import astronomy 

59import ephem 

60from shapely.geometry import MultiPoint, Polygon 

61from shapely.ops import cascaded_union 

62 

63from geoarray import GeoArray 

64from py_tools_ds.geo.coord_grid import snap_bounds_to_pixGrid 

65from py_tools_ds.geo.coord_trafo import transform_utm_to_wgs84, transform_wgs84_to_utm, mapXY2imXY, imXY2mapXY 

66 

67from ..options.config import GMS_config as CFG 

68from ..misc.definition_dicts import get_outFillZeroSaturated 

69from ..misc.logging import close_logger 

70 

71__author__ = 'Daniel Scheffler', 'Robert Behling' 

72 

73 

74class GEOPROCESSING(object): 

75 

76 def __init__(self, geodata, logger, workspace=None, subset=None, v=None): 

77 self.logger = logger 

78 self.subset = subset 

79 self.target_radunit_optical = '' 

80 self.target_radunit_thermal = '' 

81 self.outpath_conv = None 

82 # check gdal environment 

83 if v is not None: 

84 self.logger.debug("\n--") 

85 self.logger.debug("GDAL Environment:") 

86 self.logger.debug("\tGDAL-version: ", gdal.VersionInfo()) 

87 self.logger.debug("\tmaximum amount of memory (as bytes) for caching within GDAL: ", gdal.GetCacheMax()) 

88 self.logger.debug("\tthe amount of memory currently used for caching within GDAL: ", gdal.GetCacheUsed()) 

89 self.logger.debug("\tPopFinderLocation: ", gdal.PopFinderLocation()) 

90 

91 # load Raster file to an GDAL Object 

92 # ----register drivers 

93 # • A driver is an object that knows how to interact with a certain data type (such as a shapefile) 

94 # • Need an appropriate driver in order to read or write data (need it explicitly for write) 

95 # register all drivers for reading 

96 

97 gdal.AllRegister() 

98 

99 assert type(geodata) in [str, gdal.Dataset], \ 

100 "GEOP: The argument 'geodata' has to be a path or a gdal.Dataset. Got %s." % type(geodata) 

101 if isinstance(geodata, str): 

102 # ----open files Open(<filename>, <GDALAccess>) 

103 # open raster to gdal object 

104 self.inDs = (gdal.Open(geodata, GA_ReadOnly)) 

105 self.filepath, self.filename = os.path.split(geodata) 

106 self.shortname, self.extension = os.path.splitext(self.filename) 

107 if v: 

108 self.logger.debug("\n--") 

109 self.logger.debug("load data to GDAL:\n\tfilepath: %s\n\tfilename: %s" % (self.filepath, self.filename)) 

110 if not geodata.startswith('/vsi'): 

111 path2check = geodata 

112 else: 

113 # gdal_prefix_dict = {'/vsizip': '.zip', '/vsitar': '.tar', '/vsitar': '.tar.gz', 

114 # '/vsitar' '.gz': '/vsigzip'} 

115 p1 = [geodata.split(i)[0] + i for i in ['.zip', '.tar', '.tar.gz', '.gz', '.tgz'] 

116 if len(geodata.split(i)) > 1 and geodata.split(i)[1].startswith('/')][0] 

117 path2check = os.path.abspath('.' + re.search(r'/vsi[\s\S]*(/[\s\S,.]*)', p1, re.I).group(1)) 

118 assert os.path.exists(path2check), "ERROR: data %s does not exist!" % path2check 

119 assert self.inDs is not None, "ERROR: Could not open %s!" % self.filename 

120 elif isinstance(geodata, gdal.Dataset): 

121 self.inDs = geodata 

122 del geodata 

123 self.filepath, self.filename = None, self.inDs.GetDescription() 

124 self.shortname, self.extension = os.path.splitext(self.filename) 

125 

126 # ****OBJECT ATTRIBUTES*************************************************** 

127 self.workspace = os.path.join(CFG.path_tempdir, 'GEOPROCESSING_temp') if workspace is None else workspace 

128 if v: 

129 self.logger.debug("\n--") 

130 self.logger.debug("\ttemporary geoprocessing workspace", self.workspace) 

131 

132 # --Getting columns, rows and number of bands of inputdataset 

133 self.desc = self.inDs.GetDescription() 

134 if self.subset is None or self.subset[0] == 'cube': 

135 self.cols = self.inDs.RasterXSize 

136 self.rows = self.inDs.RasterYSize 

137 self.bands = self.inDs.RasterCount 

138 self.colStart = 0 

139 self.rowStart = 0 

140 self.bandStart = 0 

141 self.colEnd = self.cols - 1 

142 self.rowEnd = self.rows - 1 

143 self.bandEnd = self.bands - 1 

144 self.bandsList = range(self.bandStart, self.bandEnd + 1) 

145 else: 

146 assert isinstance(self.subset, list) and len(self.subset) == 2, \ 

147 "Unable to initialize GEOP object due to bad arguments at subset keyword. Got %s." % self.subset 

148 self.subset_kwargs_to_cols_rows_bands_colStart_rowStart_bandStart() 

149 

150 self.DataProp = [self.cols, self.rows, self.bands, self.colStart, self.rowStart, self.bandStart] 

151 if v is not None: 

152 self.logger.info("""--\nproperties of the inputdataset\n\tcolumns[cols]: %s\n\trows[rows]: 

153 %s\n\tbands[bands]: %s\n\tDescription[desc]: %s""" % (self.cols, self.rows, self.bands, self.desc)) 

154 

155 # --Getting driver infos of inputdataset 

156 self.drname_s = self.inDs.GetDriver().ShortName 

157 self.drname_l = self.inDs.GetDriver().LongName 

158 self.drhelp = self.inDs.GetDriver().HelpTopic 

159 self.DriverProp = [self.drname_s, self.drname_l, self.drhelp] 

160 

161 if v is not None: 

162 self.logger.info( 

163 "\tDriverShortName[drname_s]: %s\n\tDriverLongName[drnam_l]: %s\n\tDriverHelpWebsite[drhelp]: %s" % ( 

164 self.drname_s, self.drname_l, self.drhelp)) 

165 

166 # --Getting Georeference info of inputdataset 

167 self.geotransform = self.inDs.GetGeoTransform() 

168 self.projection = self.inDs.GetProjection() 

169 self.originX = self.geotransform[0] 

170 self.originY = self.geotransform[3] 

171 self.pixelWidth = self.geotransform[1] 

172 self.pixelHeight = self.geotransform[5] 

173 self.rot1 = self.geotransform[2] 

174 self.rot2 = self.geotransform[4] 

175 self.extent = [self.originX, self.originY, self.originX + (self.cols * self.pixelWidth), 

176 self.originY + (self.rows * self.pixelHeight)] # [ulx, uly, lrx, lry] 

177 self.GeoProp = [self.originX, self.originY, self.pixelWidth, self.pixelHeight, self.rot1, self.rot2] 

178 

179 if v is not None: 

180 self.logger.info("\toriginX[originX]:", self.originX, "\n\toriginY[originY]:", self.originY, 

181 "\n\tresolution: X[pixelWidth]:" + str(self.pixelWidth), 

182 "Y[pixelHeight]:" + str(self.pixelHeight), "\n\trotation: a[rot1]:" + str(self.rot1), 

183 "b[rot2]:" + str(self.rot2)) 

184 

185 self.Prop = {'DataProp': self.DataProp, 'DriverProp': self.DriverProp, 'GeoProp': self.GeoProp} 

186 

187 """****OBJECT METHODS******************************************************""" 

188 

189 def __getstate__(self): 

190 """Defines how the attributes of GEOPROCESSING instances are pickled.""" 

191 close_logger(self.logger) 

192 self.logger = None 

193 

194 return self.__dict__ 

195 

196 def __del__(self): 

197 close_logger(self.logger) 

198 self.logger = None 

199 

200 def subset_kwargs_to_cols_rows_bands_colStart_rowStart_bandStart(self): 

201 shape_fullArr = [self.inDs.RasterYSize, self.inDs.RasterXSize, self.inDs.RasterCount] 

202 self.rows, self.cols, self.bands, self.rowStart, self.rowEnd, self.colStart, self.colEnd, self.bandStart, \ 

203 self.bandEnd, self.bandsList = get_subsetProps_from_subsetArg(shape_fullArr, self.subset).values() 

204 self.subset = self.subset if self.subset[0] != 'cube' else None 

205 

206 def update_dataset_related_attributes(self): 

207 self.desc = self.inDs.GetDescription() 

208 self.filepath, self.filename = os.path.split(self.desc) 

209 self.shortname, self.extension = os.path.splitext(self.filename) 

210 

211 if self.subset is None or self.subset == 'cube': 

212 self.cols = self.inDs.RasterXSize 

213 self.rows = self.inDs.RasterYSize 

214 self.bands = self.inDs.RasterCount 

215 self.colStart = 0 

216 self.rowStart = 0 

217 self.bandStart = 0 

218 self.colEnd = self.cols - 1 

219 self.rowEnd = self.rows - 1 

220 self.bandEnd = self.bands - 1 

221 self.bandsList = range(self.bandStart, self.bandEnd + 1) 

222 else: 

223 self.subset_kwargs_to_cols_rows_bands_colStart_rowStart_bandStart() 

224 

225 self.DataProp = [self.cols, self.rows, self.bands, self.colStart, self.rowStart, self.bandStart] 

226 self.drname_s = self.inDs.GetDriver().ShortName 

227 self.drname_l = self.inDs.GetDriver().LongName 

228 self.drhelp = self.inDs.GetDriver().HelpTopic 

229 self.DriverProp = [self.drname_s, self.drname_l, self.drhelp] 

230 

231 self.geotransform = self.inDs.GetGeoTransform() 

232 self.projection = self.inDs.GetProjection() 

233 self.originX = self.geotransform[0] 

234 self.originY = self.geotransform[3] 

235 self.pixelWidth = self.geotransform[1] 

236 self.pixelHeight = self.geotransform[5] 

237 self.rot1 = self.geotransform[2] # FIXME check 

238 self.rot2 = self.geotransform[4] 

239 self.extent = [self.originX, self.originY, self.originX + (self.cols * self.pixelWidth), 

240 self.originY + (self.rows * self.pixelHeight)] # [ulx, uly, lrx, lry] 

241 self.GeoProp = [self.originX, self.originY, self.pixelWidth, self.pixelHeight, self.rot1, self.rot2] 

242 

243 self.Prop = {'DataProp': self.DataProp, 'DriverProp': self.DriverProp, 'GeoProp': self.GeoProp} 

244 

245 def gdalinfo(self): 

246 """get properties of the Inputdatasets via gdalinfo 

247 

248 (die Infos die er hier ausschreibt in subrocess.Popen können einer variable als Text übergeben werden. Aus 

249 diesem Text kann ich dann die Infos als Attribute ausschreiben. 

250 als ersatz für die jetzige Attributerzeugung 

251 Muss die Attributerzeugung wirklich sein? kann ich nicht alle irgendwie über das GDAL-objekt abfragen! 

252 bis jetzt hab ich aber nur die Infos die oben stehen als Abfrage ermöglichen können 

253 """ 

254 subprocess.Popen(["gdalinfo", os.path.join(self.filepath, self.filename)]).wait() 

255 

256 def add_GeoTransform_Projection_using_MetaData(self, CornerTieP_LonLat, CS_EPSG=None, CS_TYPE=None, CS_DATUM=None, 

257 CS_UTM_ZONE=None, gResolution=None, shape_fullArr=None): 

258 """ Method assumes that north is up. Map rotations are not respected. 

259 

260 :param CornerTieP_LonLat: 

261 :param CS_EPSG: 

262 :param CS_TYPE: 

263 :param CS_DATUM: 

264 :param CS_UTM_ZONE: 

265 :param gResolution: 

266 :param shape_fullArr: 

267 """ 

268 if CornerTieP_LonLat == [] or CornerTieP_LonLat is None: 

269 self.logger.error('Failed to add projection. L1A object must have corner coordinates.') 

270 else: 

271 # Projection 

272 srs = osr.SpatialReference() 

273 if CS_EPSG is not None: 

274 srs.ImportFromEPSG(CS_EPSG) 

275 self.inDs.SetProjection(srs.ExportToWkt()) 

276 elif CS_TYPE is not None and CS_DATUM is not None: 

277 if CS_TYPE == 'UTM' and CS_DATUM == 'WGS84': 

278 UTM_zone = int(1 + (CornerTieP_LonLat[0][0] + 180.0) / 6.0) if CS_UTM_ZONE is None else CS_UTM_ZONE 

279 EPSG_code = int('326' + str(UTM_zone)) if CornerTieP_LonLat[0][1] > 0.0 else int( 

280 '327' + str(UTM_zone)) 

281 elif CS_TYPE == 'LonLat' and CS_DATUM == 'WGS84': 

282 EPSG_code = 4326 

283 else: 

284 self.logger.error('Coordinate system type %s is not yet implemented. Since corner coordinates in ' 

285 'Lon/Lat are available, it has been set to Lon/Lat.' % CS_TYPE) 

286 EPSG_code, CS_TYPE, CS_DATUM = (4326, 'LonLat', 'WGS84') 

287 srs.ImportFromEPSG(EPSG_code) 

288 self.inDs.SetProjection(srs.ExportToWkt()) 

289 else: 

290 CS_TYPE, CS_DATUM, CS_EPSG = ('LonLat', 'WGS84', 4326) 

291 srs.ImportFromEPSG(CS_EPSG) 

292 self.inDs.SetProjection(srs.ExportToWkt()) 

293 self.projection = self.inDs.GetProjection() 

294 # GeoTransform 

295 self.originX, self.originY = CornerTieP_LonLat[0] if CS_TYPE == 'LonLat' else transform_wgs84_to_utm( 

296 CornerTieP_LonLat[0][0], CornerTieP_LonLat[0][1]) 

297 if gResolution is None or gResolution == -99.: 

298 if shape_fullArr is None: 

299 self.logger.error("Failed to add projection. Please provide at least one of the arguments " 

300 "'gResolution' or 'shape_fullArr'!") 

301 else: 

302 gResolution = float(pyproj.Geod(ellps=CS_DATUM).inv(CornerTieP_LonLat[0][0], 

303 CornerTieP_LonLat[0][1], 

304 CornerTieP_LonLat[1][0], 

305 CornerTieP_LonLat[1][1])[2]) / shape_fullArr[1] 

306 # distance in [m]/cols 

307 self.logger.info( 

308 "While adding projection the ground sampling distance had to be calculated. It has " 

309 "been set to %s meters." % gResolution) 

310 self.pixelWidth, self.pixelHeight, self.rot1, self.rot2 = (gResolution, gResolution * -1., 0, 0) 

311 GT = [self.originX, self.pixelWidth, self.rot1, self.originY, self.rot2, self.pixelHeight] 

312 self.inDs.SetGeoTransform(GT) 

313 self.geotransform = self.inDs.GetGeoTransform() 

314 

315 def georeference_by_TieP_or_inherent_GCPs(self, use_inherent_GCPs=False, TieP=None, dst_EPSG_code=None, 

316 dst_CS='UTM', dst_CS_datum='WGS84', mode='GDAL', use_workspace=True, 

317 inFill=None): 

318 """Warp image to new Projection. 

319 

320 :param use_inherent_GCPs: 

321 :param TieP: Corner Tie Points - always LonLat. 

322 :param dst_EPSG_code: EPSG-Code defines LonLat or UTM coordinates. 

323 :param dst_CS: 

324 :param dst_CS_datum: 

325 :param mode: 

326 :param use_workspace: 

327 :param inFill: 

328 """ 

329 if use_inherent_GCPs and TieP is None: 

330 self.logger.info('Georeferencing dataset using inherent GCP list...') 

331 if use_inherent_GCPs and TieP: 

332 self.logger.info("\n\nWARNING: User defined tie points are provided though 'use_inherent_GCPs' is true. " 

333 "Georeferencing dataset using inherent GCP list...") 

334 if not use_inherent_GCPs and TieP: 

335 self.logger.info('Georeferencing dataset by given tiepoints...') 

336 gcp_ul = [1, 1, TieP[0][0], TieP[0][1]] # col/row/map_x/map_y 

337 gcp_ur = [self.cols, 1, TieP[1][0], TieP[1][1]] 

338 gcp_ll = [1, self.rows, TieP[2][0], TieP[2][1]] 

339 gcp_lr = [self.cols, self.rows, TieP[3][0], TieP[3][1]] 

340 

341 if dst_EPSG_code is None or dst_EPSG_code == -99.: 

342 if dst_CS == 'UTM' and dst_CS_datum == 'WGS84': 

343 UTM_zone = int(1 + (TieP[0][0] + 180.0) / 6.0) 

344 dst_EPSG_code = int('326' + str(UTM_zone)) if TieP[0][1] > 0.0 else int('327' + str(UTM_zone)) 

345 if dst_CS == 'LonLat' and dst_CS_datum == 'WGS84': 

346 dst_EPSG_code = 4326 

347 

348 assert mode in ['rasterio', 'GDAL'], "The 'mode' argument must be set to 'rasterio' or 'GDAL'." 

349 

350 if mode == 'rasterio': 

351 """needs no temporary files but does not support multiprocessing""" 

352 raise NotImplementedError() 

353 # out_prj = EPSG2WKT(dst_EPSG_code) 

354 # proj_geo = isProjectedOrGeographic(self.projection) 

355 # 

356 # assert proj_geo in ['projected', 'geographic'] 

357 # TieP = TieP if proj_geo == 'geographic' else \ 

358 # [transform_wgs84_to_utm(LonLat[0], LonLat[1], get_UTMzone(prj=self.projection)) for LonLat in TieP] 

359 # xmin, xmax, ymin, ymax = HLP_F.corner_coord_to_minmax(TieP) 

360 # t0 = time.time() 

361 # in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.desc), 0, 2), 0, 1) 

362 # print('reading time', time.time() - t0) 

363 # if inFill is None: 

364 # inFill = get_outFillZeroSaturated(in_arr.dtype)[0] 

365 # out_nodataVal = get_outFillZeroSaturated(in_arr.dtype)[0] 

366 # t0 = time.time() 

367 

368 # warped, gt, prj = warp_ndarray(in_arr, self.geotransform, self.projection, out_prj, 

369 # out_bounds=([xmin, ymin, xmax, ymax]), rspAlg='cubic', 

370 # in_nodata=inFill, out_nodata=out_nodataVal)[0] 

371 

372 # multiprocessing: not implemented further because multiproceesing within Python Mappers is not possible 

373 # args = [(in_arr[:,:,i],self.geotransform,self.projection,out_prj,([xmin, ymin, xmax, ymax]), 

374 # 2,in_nodataVal) for i in range(in_arr.shape[2])] 

375 # import multiprocessing 

376 # with multiprocessing.Pool() as pool: 

377 # results = pool.map(warp_mp,args) 

378 

379 # print('warping time', time.time() - t0) 

380 # from spectral.io import envi 

381 # envi.save_image('/dev/shm/test_warped.hdr',warped,dtype=str(np.dtype(warped.dtype)), 

382 # force=True,ext='.bsq',interleave='bsq') 

383 

384 else: # mode == 'GDAL' 

385 """needs temporary files but does support multiprocessing and configuring cache size""" 

386 t0 = time.time() 

387 in_dtype = gdalnumeric.LoadFile(self.desc, 0, 0, 1, 1).dtype 

388 if inFill is None: 

389 inFill = get_outFillZeroSaturated(in_dtype)[0] 

390 out_nodataVal = get_outFillZeroSaturated(in_dtype)[0] 

391 gcps = ' '.join(['-gcp %s %s %s %s' % tuple(gcp) for gcp in [gcp_ul, gcp_ur, gcp_ll, gcp_lr]]) 

392 

393 if use_workspace: 

394 inFile = self.desc 

395 translatedFile = os.path.splitext(inFile)[0] + '_translate' if not use_inherent_GCPs else inFile 

396 warpedFile = os.path.splitext(inFile)[0] + '_warp' 

397 if not use_inherent_GCPs: 

398 os.system('gdal_translate -of ENVI %s \ 

399 -a_srs EPSG:%s -q %s %s' % (gcps, dst_EPSG_code, inFile, translatedFile)) 

400 

401 # os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps 

402 # -r cubic -srcnodata -%s -dstnodata %s -overwrite -q %s %s' \ 

403 # %(dst_EPSG_code, in_nodataVal,out_nodataVal, translatedFile, warpedFile)) 

404 os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -t_srs EPSG:%s -tps -r \ 

405 cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=%s -q %s %s' 

406 % (dst_EPSG_code, inFill, out_nodataVal, CFG.CPUs, translatedFile, warpedFile)) 

407 # import shutil 

408 # only for bugfixing: 

409 # shutil.copy(translatedFile, \ 

410 # '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') 

411 # shutil.copy(translatedFile+'.hdr',\ 

412 # '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') 

413 # shutil.copy(warpedFile, \ 

414 # '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') 

415 # shutil.copy(warpedFile+'.hdr', \ 

416 # '//misc/hy5/scheffler/Skripte_Models/python/gms_preprocessing/sandbox/out/') 

417 else: 

418 VRT_name = os.path.splitext(os.path.basename(self.desc))[0] + '.vrt' 

419 # dst_ds = gdal.GetDriverByName('VRT').CreateCopy(VRT_name, self.inDs, 0) 

420 # del dst_ds 

421 inFile = VRT_name 

422 translatedFile = os.path.splitext(inFile)[0] + '_translate.vrt' if not use_inherent_GCPs else self.desc 

423 warpedFile = os.path.splitext(inFile)[0] + '_warp.vrt' 

424 if not use_inherent_GCPs: 

425 os.system('gdal_translate -of VRT %s \ 

426 -a_srs EPSG:%s -q %s %s' % (gcps, dst_EPSG_code, inFile, translatedFile)) 

427 # os.system('gdalwarp -of VRT --config GDAL_CACHEMAX 500 -wm 500 -ot Int16 -t_srs EPSG:%s -tps -r \ 

428 # cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=10 -q %s %s' \ 

429 # %(dst_EPSG_code,in_nodataVal,out_nodataVal,translatedFile,warpedFile)) 

430 os.system('gdalwarp -of VRT --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps -r \ 

431 cubic -srcnodata %s -dstnodata %s -overwrite -multi -wo NUM_THREADS=%s -q %s %s' 

432 % (dst_EPSG_code, inFill, out_nodataVal, CFG.CPUs, translatedFile, warpedFile)) 

433 # print('warped') 

434 print('GDAL warping time', time.time() - t0) 

435 

436 self.inDs = gdal.OpenShared(warpedFile, GA_ReadOnly) \ 

437 if inFile.endswith('.vrt') else gdal.Open(warpedFile, GA_ReadOnly) 

438 # print('read successful') 

439 self.update_dataset_related_attributes() 

440 

441 # def get_row_column_bounds(self,arr_shape = None, arr_pos = None): 

442 # if arr_shape == None and arr_pos == None: 

443 # arr_shape = 'cube' 

444 # try: 

445 # if arr_shape == 'row': # arr_pos = int 

446 # row_start,row_end,col_start,col_end, rows,cols = arr_pos,arr_pos+1,0,self.cols, 1,self.cols 

447 # elif arr_shape == 'col': # arr_pos = int 

448 # row_start,row_end,col_start,col_end, rows,cols = 0,self.rows,arr_pos,arr_pos+1, self.rows,1 

449 # elif arr_shape == 'band' or arr_shape == 'cube': # arr_pos = None 

450 # row_start,row_end,col_start,col_end, rows,cols = 0,self.rows-1,0,self.cols-1, self.rows,self.cols 

451 # elif arr_shape == 'block' or arr_shape == 'custom': 

452 # # arr_pos = [ (row_start,row_end),(col_start,col_end),(band_start,band_end) ] 

453 # row_start,row_end,col_start,col_end, rows,cols = \ 

454 # arr_pos[0][0],arr_pos[0][1],arr_pos[1][0],arr_pos[1][1], \ 

455 # arr_pos[0][1]+1-arr_pos[0][0],arr_pos[1][1]+1-arr_pos[1][0] 

456 # elif arr_shape == 'pixel': # arr_pos = (x,y) 

457 # row_start,row_end,col_start,col_end, rows,cols = arr_pos[0],arr_pos[0]+1,arr_pos[1],arr_pos[1]+1, 1,1 

458 # return row_start,row_end,col_start,col_end,rows,cols 

459 # except: 

460 # self.logger.error( 

461 # 'Error while setting row and column bounds. Got arr_shape = %s and arr_pos = %s' %(arr_shape,arr_pos)) 

462 

463 def get_projection_type(self): 

464 return 'LonLat' if osr.SpatialReference(self.projection).IsGeographic() else 'UTM' if osr.SpatialReference( 

465 self.projection).IsProjected() else None 

466 

467 def get_corner_coordinates(self, targetProj): 

468 """Returns corner coordinates of the entire GEOP object in lon/lat or UTM. 

469 

470 ATTENTION: coordinates represent PIXEL CORNERS: 

471 UL=UL-coordinate of (0,0) 

472 UR=UR-coordinate of (0,self.cols-1) 

473 => lonlat_arr always contains UL-coordinate of each pixel 

474 

475 :param targetProj: 'LonLat' or 'UTM' 

476 """ 

477 UL_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((0, 0), targetProj) 

478 # self.cols statt self.cols-1, um Koordinate am rechten Pixelrand zu berechnen: 

479 UR_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((0, self.cols), targetProj) 

480 LL_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((self.rows, 0), targetProj) 

481 LR_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection((self.rows, self.cols), targetProj) 

482 UL_LonLat = tuple([round(i, 10) for i in UL_LonLat]) 

483 UR_LonLat = tuple([round(i, 10) for i in UR_LonLat]) 

484 LL_LonLat = tuple([round(i, 10) for i in LL_LonLat]) 

485 LR_LonLat = tuple([round(i, 10) for i in LR_LonLat]) 

486 return [UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat] 

487 

488 def calc_mask_data_nodata(self, array=None, custom_nodataVal=-9999): 

489 """Berechnet den Bildbereich, der von allen Kanälen abgedeckt wird. 

490 

491 :param array: input numpy array to be used for mask calculation (otherwise read from disk) 

492 :param custom_nodataVal: 

493 """ 

494 nodataVal = get_outFillZeroSaturated(np.int16)[0] if custom_nodataVal is None else custom_nodataVal 

495 in_arr = array if array is not None else np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.desc), 0, 2), 0, 1) 

496 return np.all(np.where(in_arr == nodataVal, 0, 1), axis=2) 

497 

498 def pixelToWorldCoord_using_geotransform_and_projection(self, Pixel_row_col, targetProj): 

499 if targetProj not in ['UTM', 'LonLat']: 

500 self.logger.error("Failed to calculate world coordinates of true data corners. Target projection must be " 

501 "'UTM' or 'LonLat'. Got %s" % targetProj) 

502 row, col = Pixel_row_col 

503 XWorld = self.geotransform[0] + col * self.geotransform[1] 

504 YWorld = self.geotransform[3] + row * self.geotransform[5] 

505 src_srs = osr.SpatialReference() 

506 src_srs.ImportFromWkt(self.projection) 

507 if src_srs.IsGeographic() and targetProj == 'LonLat' or src_srs.IsProjected() and targetProj == 'UTM': 

508 return XWorld, YWorld 

509 elif src_srs.IsProjected() and targetProj == 'LonLat': 

510 return transform_utm_to_wgs84(XWorld, YWorld, src_srs.GetUTMZone()) # [Lon,Lat] 

511 else: # geographic to UTM 

512 return transform_wgs84_to_utm(XWorld, YWorld) # [HW,RW] 

513 

514 def tondarray(self, direction=1, startpix=None, extent=None, UL=None, LR=None, v=0): 

515 """Convert gdalobject to 3dimensional ndarray stack ([x,y,z]). 

516 

517 :param direction: 1: shape: [bands, rows, cols] 

518 -> same as theres read envi structure ([band1],[band2],..[bandn]) 

519 2: shape: [rows, bands, cols] 

520 3: shape: [rows, cols, bands] 

521 -> Structure used in PIL and IDL for example 

522 ([pixband1,pixband2,..,pixbandn],[pixband1,pixband2,..,pixbandn],...) 

523 :param startpix: [x,y] pixels->UL:pixelcoordinates for subset generation 

524 :param extent: [cols, rows] -> number of pixels in x and y-direction 

525 :param UL: [x,y] map coordinates -> UL map coordinates for subset generation 

526 :param LR: [x,y] map coordinates -> LR map coordinates for subset generation 

527 :param v: 

528 """ 

529 # -- test of given inputparameter 

530 test = [startpix is not None, extent is not None, UL is not None, LR is not None] 

531 

532 if test == [False, False, False, False]: 

533 startpix = [0, 0] 

534 extent = [self.cols, self.rows] 

535 

536 # only startpixel coordinates given. extent set to startcoordinates to LR of the image 

537 elif test == [True, False, False, False]: 

538 assert len(startpix) == 2 and startpix[0] < self.cols and startpix[1] < self.rows, \ 

539 "XXX-ERROR-XXX: Start coordinates x: %i, y: %i out of range" % (startpix[0], startpix[1]) 

540 extent = [self.cols - startpix[0], self.rows - startpix[1]] 

541 

542 # startpix and extent given 

543 elif test == [True, True, False, False]: 

544 assert len(startpix) == 2 and startpix[0] < self.cols and startpix[1] < self.rows and len(extent) == 2, \ 

545 "XXX-ERROR-XXX: Start coordinates x: %i, y: %i out of range" % (startpix[0], startpix[1]) 

546 if extent[0] > self.cols - startpix[0]: 

547 extent[0] = self.cols - startpix[0] 

548 elif extent[1] > self.rows - startpix[1]: 

549 extent[1] = self.rows - startpix[1] 

550 

551 # extent and UL given 

552 elif test == [False, True, True, False]: 

553 pass 

554 # only UL coordinates given. extent set to startcoordinates to LR of the image (use UL coordnates if they are 

555 # within the image otherwise UL=origin of the image) 

556 elif test == [False, False, True, False]: 

557 assert len(UL) == 2, "XXX-ERROR-XXX: A list of 2 arguments (numbers) for UL required" 

558 # convert coordinates to float 

559 UL = [float(UL[0]), float(UL[1])] 

560 # Test if x of UL is within the image 

561 if not self.originX <= UL[0] <= (self.originX + self.cols * self.pixelWidth): 

562 UL[0] = self.originX 

563 # Test if y of UL is within the image 

564 if not self.originY >= UL[1] >= (self.originY + self.rows * self.pixelHeight): 

565 UL[1] = self.originY 

566 # convert UL in pixelcoordinates 

567 startpix = [int(float((UL[0] - self.originX)) / float(self.pixelWidth)), 

568 int(float((UL[1] - self.originY)) / float(self.pixelHeight))] 

569 extent = [self.cols - startpix[0], self.rows - startpix[1]] 

570 

571 # UL and LR coordinates given (use UL and LR coordinates if they are within the image otherwise use the whole 

572 # image) 

573 elif test == [False, False, True, True]: 

574 assert len(UL) == 2 and len(LR) == 2, \ 

575 "XXX-ERROR-XXX: A list of 2 arguments (numbers) for UL and for LR is required" 

576 # convert coordinates to float 

577 UL = [float(UL[0]), float(UL[1])] 

578 LR = [float(LR[0]), float(LR[1])] 

579 # Test if x of UL is within the image 

580 if not self.originX <= UL[0] <= (self.originX + self.cols * self.pixelWidth): 

581 UL[0] = self.originX 

582 # Test if y of UL is within the image 

583 if not self.originY >= UL[1] >= (self.originY + self.rows * self.pixelHeight): 

584 UL[1] = self.originY 

585 # Test if x of LR is within the image 

586 if not self.originX <= LR[0] <= (self.originX + self.cols * self.pixelWidth): 

587 LR[0] = self.originX + self.cols * self.pixelWidth 

588 # Test if y of UL is within the image 

589 if not self.originY >= LR[1] >= (self.originY + self.rows * self.pixelHeight): 

590 LR[1] = self.originY + self.rows * self.pixelHeight 

591 # convert UL in pixelcoordinates 

592 startpix = [int(float((UL[0] - self.originX)) / float(self.pixelWidth)), 

593 int(float((UL[1] - self.originY)) / float(self.pixelHeight))] 

594 # get extent 

595 extent = [int(float((LR[0] - UL[0])) / self.pixelWidth + 0.5), 

596 int(float((LR[1] - UL[1])) / self.pixelHeight + 0.5)] 

597 

598 # if UL or LR are the same. only one pixel selected 

599 if extent[0] == 0: 

600 extent[0] = 1 

601 if extent[1] == 0: 

602 extent[1] = 1 

603 if v: 

604 print("\t startpix:", startpix) 

605 print("\t extent:", extent) 

606 

607 if self.subset is not None and self.subset != 'cube': # rasObj instanced as subset 

608 if [self.inDs.RasterYSize, self.inDs.RasterXSize] != [self.rows, 

609 self.cols]: # inDs does not represent subset 

610 startpix = [startpix[0] + self.colStart, startpix[1] + self.rowStart] 

611 extent = [self.cols, self.rows] 

612 

613 if self.subset is not None and self.subset[0] == 'custom' and \ 

614 self.target_radunit_optical == '' and self.target_radunit_thermal == '': 

615 # conversion to Rad or Ref overwrites self.inDs 

616 # => custom bandsList contains bands that are NOT in range(self.bands) 

617 bands2process = self.bandsList 

618 else: # normal case 

619 bands2process = range(self.bands) 

620 

621 np_dtype = convertGdalNumpyDataType(gdal.GetDataTypeName(self.inDs.GetRasterBand(1).DataType)) 

622 im = np.empty((self.rows, self.cols, len(bands2process)), np_dtype) 

623 for out_idx, x in enumerate(bands2process): 

624 im[:, :, out_idx] = self.inDs.GetRasterBand(x + 1).ReadAsArray(startpix[0], startpix[1], extent[0], 

625 extent[1]) 

626 if direction == 1: # bands, rows, cols; so wie Theres es gebastelt hat 

627 im = np.rollaxis(im, 2) 

628 elif direction == 2: # rows, bands, cols 

629 im = np.swapaxes(im, 1, 2) 

630 elif direction == 3: # rows, cols, bands; so wie in PIL bzw. IDL benötigt 

631 pass # bands, rows,cols 

632 return im 

633 

634 def Layerstacking(self, layers_pathlist, path_output=None): 

635 rows, cols, bands = self.rows, self.cols, len(layers_pathlist) 

636 

637 if path_output is None: 

638 dtype = gdalnumeric.LoadFile(layers_pathlist[0], 0, 0, 1, 1).dtype 

639 stacked = np.empty((self.rows, self.cols, len(layers_pathlist)), dtype) 

640 

641 for i, p in enumerate(layers_pathlist): 

642 self.logger.info('Adding band %s to Layerstack..' % os.path.basename(p)) 

643 if self.subset is None or self.subset[0] == 'cube': 

644 stacked[:, :, i] = gdalnumeric.LoadFile(p) 

645 else: 

646 stacked[:, :, i] = gdalnumeric.LoadFile(p, self.colStart, self.rowStart, cols, rows) 

647 

648 return stacked 

649 

650 elif path_output == 'MEMORY': # CFG.inmem_serialization is True 

651 stack_in_mem = gdal.GetDriverByName("MEM").Create("stack.mem", cols, rows, bands) 

652 

653 for idx, layer in enumerate(layers_pathlist): 

654 current_band = gdal.Open(layer, GA_ReadOnly) 

655 stack_in_mem.AddBand() # data type is dynamically assigned 

656 band_in_stack = stack_in_mem.GetRasterBand(idx + 1) 

657 

658 if [self.rows, self.cols] == [current_band.RasterYSize, current_band.RasterXSize]: 

659 self.logger.info('Adding band %s to Layerstack..' % os.path.basename(layer)) 

660 

661 if self.subset is None or self.subset[0] == 'cube': 

662 band_in_stack.WriteArray(current_band.GetRasterBand(1).ReadAsArray(), 0, 0) 

663 else: 

664 band_in_stack.WriteArray(current_band.GetRasterBand(1) 

665 .ReadAsArray(self.colStart, self.rowStart, self.cols, self.rows), 0, 0) 

666 else: 

667 self.logger.info( 

668 'Band %s skipped due to incompatible geometrical resolution.' % os.path.basename(layer)) 

669 del current_band, band_in_stack 

670 

671 self.bands = bands 

672 self.inDs = stack_in_mem 

673 

674 else: # CFG.inmem_serialization is False 

675 from ..misc.helper_functions import subcall_with_output 

676 

677 self.logger.info('Adding the following bands to Layerstack:') 

678 [self.logger.info(os.path.basename(i)) for i in layers_pathlist] 

679 

680 if not os.path.isdir(os.path.dirname(path_output)): 

681 os.makedirs(os.path.dirname(path_output)) 

682 if os.path.exists(path_output): 

683 os.remove(path_output) 

684 if os.path.exists(os.path.splitext(path_output)[0] + '.hdr'): 

685 os.remove(os.path.splitext(path_output)[0] + '.hdr') 

686 

687 str_layers_pathlist = ' '.join(layers_pathlist) 

688 

689 if self.subset is None: 

690 cmd = "gdal_merge.py -q -o %s -of ENVI -seperate %s" % (path_output, str_layers_pathlist) 

691 output, exitcode, err = subcall_with_output(cmd) 

692 if exitcode: 

693 raise RuntimeError(err) 

694 if output: 

695 return output.decode('UTF-8') 

696 # FIXME this changes the format of the projection (maybe a GDAL bug?) 

697 # FIXME normalize by EPSG2WKT(WKT2EPSG(WKT)) 

698 else: 

699 # convert Pixel Coords to Geo Coords 

700 orig_ds = gdal.Open(layers_pathlist[0], GA_ReadOnly) 

701 GT, PR = orig_ds.GetGeoTransform(), orig_ds.GetProjection() 

702 UL_Xgeo = GT[0] + self.colStart * GT[1] + self.rowStart * GT[2] 

703 UL_Ygeo = GT[3] + self.colStart * GT[4] + self.rowStart * GT[5] 

704 LR_Xgeo = GT[0] + self.colEnd * GT[1] + self.rowEnd * GT[2] 

705 LR_Ygeo = GT[3] + self.colEnd * GT[4] + self.rowEnd * GT[5] 

706 ullr = '%s %s %s %s' % (UL_Xgeo, UL_Ygeo, LR_Xgeo, LR_Ygeo) 

707 

708 cmd = "gdal_merge.py -q -o %s -ul_lr %s -of ENVI -seperate %s" \ 

709 % (path_output, ullr, str_layers_pathlist) 

710 output, exitcode, err = subcall_with_output(cmd) 

711 if exitcode: 

712 raise RuntimeError(err) 

713 if output: 

714 return output.decode('UTF-8') 

715 

716 if [GT, PR] == [(0.0, 1.0, 0.0, 0.0, 0.0, 1.0), '']: 

717 # delete output map info in case of arbitrary coordinate system 

718 with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF: 

719 lines = inF.readlines() 

720 

721 outContent = ''.join([i for i in lines if not re.search(r'map info', i, re.I)]) 

722 with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF: 

723 outF.write(outContent) 

724 

725 assert os.path.exists(path_output) and os.path.exists(os.path.splitext(path_output)[0] + '.hdr'), \ 

726 "Layerstacking failed because output cannot be found." 

727 

728 # validate the written format of the projection and change it if needed 

729 # NOT NEEDED ANYMORE (GeoArray always validates projection when reading from disk) 

730 # prj_written = GeoArray(path_output).prj 

731 # if self.projection != prj_written and WKT2EPSG(prj_written) == WKT2EPSG(self.projection): 

732 # 

733 # with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF: 

734 # lines = inF.readlines() 

735 # outContent = ''.join([line if not re.search(r'coordinate system string', line, re.I) else 

736 # 'coordinate system string = %s' % self.projection for line in lines]) 

737 # 

738 # with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF: 

739 # outF.write(outContent) 

740 # 

741 # self.inDs = gdal.Open(path_output) 

742 

743 count_outBands = GeoArray(path_output).bands 

744 assert count_outBands == len(layers_pathlist), "Layerstacking failed because its output has only %s " \ 

745 "instead of %s bands." % ( 

746 count_outBands, len(layers_pathlist)) 

747 

748 self.update_dataset_related_attributes() 

749 

750 # writing via envi memmap (tiling-fähig) 

751 # FileObj = envi.create_image(os.path.splitext(path_output)[0]+'.hdr',shape=(self.rows,self.cols, 

752 # len(layers_pathlist)),dtype='int16',interleave='bsq',ext='.bsq', force=True) # 8bit for muliple masks 

753 # in one file 

754 # File_memmap = FileObj.open_memmap(writable=True) 

755 # File_memmap[:,:,idx] = current_band.GetRasterBand(1).ReadAsArray() 

756 

757 

758"""********Help_functions**************************************************""" 

759 

760 

761def ndarray2gdal(ndarray, outPath=None, importFile=None, direction=1, GDAL_Type="ENVI", geotransform=None, 

762 projection=None, v=0): 

763 """Converts a numpy array to a Georasterdataset (default envi bsq). 

764 

765 bis jetzt nur georeferenzierung und projectionszuweisung durch import aus anderem file möglich 

766 

767 :param ndarray: [bands, rows, cols] 

768 :param outPath: Path were to store the outputFile 

769 :param importFile: path of a file, where to import the projection and mapinfos 

770 :param direction: 

771 :param GDAL_Type: 

772 :param geotransform: gdal list for geotransformation [originX, pixelWidth, rot1, originY, pixelHeight, rot2] 

773 :param projection: a gdal compatible projection string 

774 -> if importFile is not availabele geotransform and projection need to be set, 

775 otherwise a gdaldata set without coordinate system will be created 

776 :param v: 

777 :return: GDAL data File 

778 """ 

779 

780 if v: 

781 print("\n--------GEOPROCESSING--------\n##Function##" 

782 "\n**ndarray2gdal**") 

783 assert outPath is not None, "\n >>> ERROR: Name of outputfile required" 

784 if importFile is not None: 

785 assert os.path.isfile(importFile), \ 

786 "\n >>> ERROR: File for importing mapprojections does not exists: -> GEOPROCESSING.ndarray2gdal" 

787 if GDAL_Type != 'MEM': 

788 if not os.path.isdir(os.path.dirname(outPath)): 

789 os.makedirs(os.path.dirname(outPath)) 

790 if os.path.exists(outPath): 

791 os.remove(outPath) 

792 

793 # main 

794 if len(ndarray.shape) == 2: # convert 2 dimensional array to 3 dimensional array 

795 ndarray = np.reshape(ndarray, (1, ndarray.shape[0], ndarray.shape[1])) 

796 

797 if direction == 1: # [bands, rows, cols] 

798 pass 

799 if direction == 2: # [rows, bands, cols] 

800 ndarray = np.swapaxes(ndarray, 0, 1) 

801 if direction == 3: # [rows, cols, bands] 

802 ndarray = np.rollaxis(ndarray, 2) 

803 

804 bands, rows, cols = ndarray.shape 

805 

806 # get gdal dataType depending on numpy datatype 

807 gdal_dtype_name = convertGdalNumpyDataType(ndarray.dtype.type) 

808 gdal_dtype = gdal.GetDataTypeByName(gdal_dtype_name) 

809 

810 if v: 

811 print("\n\tnew dataset:\n\t name: %s\n\t bands: %s\n\t rows %s\n\t cols: %s\n\t GDALtype: %s" % ( 

812 os.path.split(outPath)[1], str(bands), str(rows), str(cols), gdal_dtype_name)) 

813 # register outputdriver 

814 driver = gdal.GetDriverByName(GDAL_Type) 

815 driver.Register() 

816 # Create new gdalfile: Create(<filename>, <xsize>, <ysize>, [<bands>], [<GDALDataType>]) bands is optional and 

817 # defaults to 1 GDALDataType is optional and defaults to GDT_Byte 

818 outDs = driver.Create(outPath, cols, rows, bands, gdal_dtype) 

819 assert outDs is not None, 'Could not create OutputData' 

820 

821 for i in range(bands): 

822 # write outputband 

823 outBand = outDs.GetRasterBand(i + 1) 

824 outBand.WriteArray(ndarray[i, :, :], 0, 0) 

825 

826 # flush data to disk, set the NoData value and calculate stats 

827 outBand.FlushCache() 

828 del outBand 

829 

830 # import georeference and projection from other dataset 

831 if importFile is not None: 

832 gdal.AllRegister() 

833 inDs = gdal.Open(importFile, GA_ReadOnly) 

834 outDs.SetGeoTransform(inDs.GetGeoTransform()) 

835 outDs.SetProjection(inDs.GetProjection()) 

836 elif geotransform is not None and projection is not None: 

837 outDs.SetGeoTransform(geotransform) 

838 outDs.SetProjection(projection) 

839 if GDAL_Type == 'MEM': # FIXME geändert von GDAL_Type != 'MEM' (sinnlos!) -> checken, ob L1AP noch läuft 

840 del outDs 

841 else: 

842 return outDs 

843 

844 

845def convertGdalNumpyDataType(dType): 

846 """convertGdalNumpyDataType 

847 input: 

848 dType: GDALdataType string or numpy dataType 

849 output: 

850 corresponding dataType 

851 """ 

852 # dictionary to translate GDAL data types (strings) in corresponding numpy data types 

853 dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32, 

854 "Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32} 

855 outdType = None 

856 

857 if dType in dTypeDic: 

858 outdType = dTypeDic[dType] 

859 elif dType in dTypeDic.values(): 

860 for i in dTypeDic.items(): 

861 if dType == i[1]: 

862 outdType = i[0] 

863 elif dType in [np.int8, np.int64, np.int]: 

864 outdType = "Int32" 

865 print(">>> Warning: %s is converted to GDAL_Type 'Int_32'\n" % dType) 

866 elif dType in [np.bool, np.bool_]: 

867 outdType = "Byte" 

868 print(">>> Warning: %s is converted to GDAL_Type 'Byte'\n" % dType) 

869 elif dType in [np.float]: 

870 outdType = "Float32" 

871 print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType) 

872 elif dType in [np.float16]: 

873 outdType = "Float32" 

874 print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType) 

875 else: 

876 raise Exception('GEOP.convertGdalNumpyDataType: Unexpected input data type %s.' % dType) 

877 return outdType 

878 

879 

880def get_point_bounds(points): 

881 mp_points = MultiPoint(points) 

882 (min_lon, min_lat, max_lon, max_lat) = mp_points.bounds 

883 return min_lon, min_lat, max_lon, max_lat 

884 

885 

886def get_common_extent(list_extentcoords, alg='outer', return_box=True): 

887 """Returns the common extent coordinates of all given coordinate extents. 

888 NOTE: this function asserts that all input coordinates belong to the same projection! 

889 

890 :param list_extentcoords: a list of coordinate sets, e.g. [ [ULxy1,URxy1,LRxy1,LLxy1], ULxy2,URxy2,LRxy2,LLxy2],] 

891 :param alg: 'outer': return the outer Polygon extent 

892 'inner': return the inner Polygon extent 

893 :param return_box: whether to return a coordinate box/envelope of the output Polygon 

894 :return: [ULxy,URxy,LRxy,LLxy] 

895 """ 

896 if alg != 'outer': 

897 raise NotImplementedError # TODO apply shapely intersection 

898 

899 allPolys = [Polygon(extentcoords).envelope for extentcoords in list_extentcoords] 

900 common_extent = cascaded_union(allPolys) 

901 

902 def get_coordsXY(shpPoly): return np.swapaxes(np.array(shpPoly.exterior.coords.xy), 0, 1).tolist() 

903 

904 return get_coordsXY(common_extent) if not return_box else get_coordsXY(common_extent.envelope) 

905 

906 

907def zoom_2Darray_to_shapeFullArr(arr2D, shapeFullArr, meshwidth=1, subset=None, method='linear'): 

908 from scipy.interpolate import RegularGridInterpolator 

909 

910 assert method in ['linear', 'nearest'] 

911 rS, rE, cS, cE = list(get_subsetProps_from_subsetArg(shapeFullArr, subset).values())[3:7] 

912 rowpos = np.linspace(0, shapeFullArr[0] - 1, arr2D.shape[0]) 

913 colpos = np.linspace(0, shapeFullArr[1] - 1, arr2D.shape[1]) 

914 

915 rgi = RegularGridInterpolator([rowpos, colpos], arr2D, method=method) 

916 out_rows_grid, out_cols_grid = np.meshgrid(range(rS, rE, meshwidth), range(cS, cE, meshwidth), indexing='ij') 

917 return rgi(np.dstack([out_rows_grid, out_cols_grid])) 

918 

919 

920def adjust_acquisArrProv_to_shapeFullArr(arrProv, shapeFullArr, meshwidth=1, subset=None, bandwise=False): 

921 assert isinstance(arrProv, dict) and isinstance(arrProv[list(arrProv.keys())[0]], list), \ 

922 'Expected a dictionary with band names (from LayerbandsAssignment) as keys and lists as ' \ 

923 'values. Got %s.' % type(arrProv) 

924 

925 arrProv = {k: np.array(v) for k, v in arrProv.items()} 

926 shapes = [v.shape for v in arrProv.values()] 

927 

928 assert len( 

929 list(set(shapes))) == 1, 'The arrays contained in the values of arrProv have different shapes. Got %s.' % shapes 

930 

931 if bandwise: 

932 outDict = {k: zoom_2Darray_to_shapeFullArr(arr, shapeFullArr, meshwidth, subset) for k, arr in arrProv.items()} 

933 return outDict 

934 else: 

935 arr2interp = np.mean(np.dstack(list(arrProv.values())), axis=2) 

936 interpolated = zoom_2Darray_to_shapeFullArr(arr2interp, shapeFullArr, meshwidth, subset).astype(np.float32) 

937 return interpolated 

938 

939 

940def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, cutNeg=True): 

941 # type: (np.ndarray,list,list,int,int,int,bool) -> np.ndarray 

942 """Convert DN to Radiance [W * m-2 * sr-1 * micrometer-1]. 

943 

944 NOTE: InputGains and Offsets should be in [W * m-2 * sr-1 * micrometer-1]!!! 

945 

946 :param ndarray: <np.ndarray> array of DNs to be converted into radiance 

947 :param offsets: [W * m-2 * sr-1 * micrometer-1]: 

948 list that includes the offsets of the individual rasterbands [offset_band1, offset_band2, 

949 ... ,offset_bandn] or optional input as number if Dataset has only 1 Band 

950 :param gains: [W * m-2 * sr-1 * micrometer-1]: 

951 list that includes the gains of the individual rasterbands [gain_band1, gain_band2, ... , 

952 gain_bandn] or optional input as number if Dataset has only 1 Band 

953 :param inFill: pixelvalues allocated to background/dummy/fill pixels 

954 :param inZero: pixelvalues allocated to zero radiance 

955 :param inSaturated: pixelvalues allocated to saturated pixels 

956 :param cutNeg: cutNegvalues -> all negative values set to 0 

957 """ 

958 assert isinstance(offsets, list) and isinstance(gains, list),"Offset and Gain parameters have to be provided as two lists containing gains and offsets for \ 

959 each band in ascending order. Got offsets as type '%s' and gains as type '%s'." % (type(offsets), type(gains)) 

960 

961 bands = 1 if ndarray.ndim == 2 else ndarray.shape[2] 

962 for arg, argname in zip([offsets, gains], ['offsets', 'gains']): 

963 assert len(arg) == bands, """Argument '%s' does not match the number of bands. 

964 Expected a list of %s %s values for each band in ascending order. Got %s.""" \ 

965 % (argname, bands, argname, len(arg)) 

966 for i in arg: 

967 assert isinstance(i, float) or isinstance(i, int), \ 

968 "DN2Rad: Expected float or integer values from argument '%s'. Got %s" % (argname, type(i)) 

969 

970 if [inFill, inZero, inSaturated] == [None, None, None]: 

971 inFill, inZero, inSaturated = get_outFillZeroSaturated(ndarray.dtype) 

972 # --Define default for Special Values of reflectanceband 

973 outFill, outZero, outSaturated = get_outFillZeroSaturated('int16') 

974 

975 off, gai = [np.array(param).reshape(1, 1, bands) for param in [offsets, gains]] 

976 rad = (10 * (off + ndarray * gai)).astype(np.int16) 

977 rad = np.where(rad < 0, 0, rad) if cutNeg else rad 

978 rad = np.where(ndarray == inFill, outFill, rad) if inFill is not None else rad 

979 rad = np.where(ndarray == inSaturated, outSaturated, rad) if inSaturated is not None else rad 

980 rad = np.where(ndarray == inZero, outZero, rad) if inZero is not None else rad 

981 # rad = np.piecewise(rad,[ndarray==inFill,ndarray==inSaturated,ndarray==inZero],[outFill,outSaturated,outZero]) 

982 

983 return rad 

984 

985 

986def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist, 

987 inFill=None, inZero=None, inSaturated=None, cutNeg=True): 

988 # type: (np.ndarray,list,list,list,float,float,int,int,int,bool) -> np.ndarray 

989 """Converts DN data to TOA Reflectance. 

990 

991 :param ndarray: <np.ndarray> array of DNs to be converted into TOA reflectance 

992 :param offsets: list: of offsets of each rasterband [W * m-2 * sr-1 * micrometer-1] 

993 [offset_band1, offset_band2, ... ,offset_bandn] or optional as number if Dataset has 

994 only 1 Band 

995 :param gains: list: of gains of each rasterband [W * m-2 * sr-1 * micrometer-1] 

996 [gain_band1, gain_band2, ... ,gain_bandn] or optional as number if Dataset has 

997 only 1 Band 

998 :param irradiances: list: of irradiance of each band [W * m-2 * micrometer-1] 

999 [irradiance_band1, irradiance_band2, ... ,irradiance_bandn] 

1000 :param zenith: number: sun zenith angle 

1001 :param earthSunDist: earth-sun- distance for a certain day 

1002 :param inFill: number: pixelvalues allocated to background/dummy/fill pixels 

1003 :param inZero: number: pixelvalues allocated to zero radiance 

1004 :param inSaturated: number: pixelvalues allocated to saturated pixles 

1005 :param cutNeg: bool: if true. all negative values turned to zero. default: True 

1006 :return: Int16 TOA_Reflectance in [0-10000] 

1007 """ 

1008 assert isinstance(offsets, list) and isinstance(gains, list) and isinstance(irradiances, list), \ 

1009 "Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \ 

1010 "irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \ 

1011 "type '%s'." % (type(offsets), type(gains), type(irradiances)) 

1012 

1013 bands = 1 if len(ndarray.shape) == 2 else ndarray.shape[2] 

1014 for arg, argname in zip([offsets, gains, irradiances], ['offsets', 'gains', 'irradiance']): 

1015 assert len(arg) == bands, "Argument '%s' does not match the number of bands. Expected a list of %s %s values " \ 

1016 "for each band in ascending order. Got %s." % (argname, bands, argname, len(arg)) 

1017 for i in arg: 

1018 assert isinstance(i, float) or isinstance(i, int), \ 

1019 "DN2TOARef: Expected float or integer values from argument '%s'. Got %s" % (argname, type(i)) 

1020 

1021 if [inFill, inZero, inSaturated] == [None, None, None]: 

1022 inFill, inZero, inSaturated = get_outFillZeroSaturated(ndarray.dtype) 

1023 # --Define default for Special Values of reflectanceband 

1024 outFill, outZero, outSaturated = get_outFillZeroSaturated('int16') 

1025 

1026 constant = 10000 * math.pi * earthSunDist ** 2 / math.cos(math.radians(zenith)) 

1027 off, gai, irr = [np.array(param).reshape(1, 1, bands) for param in [offsets, gains, irradiances]] 

1028 TOA_ref = (constant * (off + ndarray * gai) / irr).astype(np.int16) 

1029 TOA_ref = np.where(TOA_ref < 0, 0, TOA_ref) if cutNeg else TOA_ref 

1030 TOA_ref = np.where(ndarray == inFill, outFill, TOA_ref) if inFill is not None else TOA_ref 

1031 TOA_ref = np.where(ndarray == inSaturated, outSaturated, TOA_ref) if inSaturated is not None else TOA_ref 

1032 TOA_ref = np.where(ndarray == inZero, outZero, TOA_ref) if inZero is not None else TOA_ref 

1033 # TOA_ref = \ 

1034 # np.piecewise(TOA_ref,[ndarray==inFill,ndarray==inSaturated,ndarray==inZero],[outFill,outSaturated,outZero]) 

1035 

1036 return TOA_ref 

1037 

1038 

1039def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None): 

1040 # type: (np.ndarray,list,list,float,int,int,int) -> np.ndarray 

1041 """Convert top-of-atmosphere radiances of thermal bands to temperatures in Kelvin 

1042 by applying the inverse of the Planck function. 

1043 

1044 :param ndarray: <np.ndarray> array of TOA radiance values to be converted into Kelvin 

1045 :param K1: 

1046 :param K2: 

1047 :param emissivity: 

1048 :param inFill: 

1049 :param inZero: 

1050 :param inSaturated: 

1051 """ 

1052 bands = 1 if len(ndarray.shape) == 2 else ndarray.shape[2] 

1053 for arg, argname in zip([K1, K2], ['K1', 'K2']): 

1054 assert isinstance(arg[0], float) or isinstance(arg[0], int), "TOARad2Kelvin_fastforward: Expected float or " \ 

1055 "integer values from argument '%s'. Got type %s" \ 

1056 % (argname, type(arg)) 

1057 assert len(arg) == bands, """Argument '%s' does not match the number of bands. Expected a list of %s %s values 

1058 for each band in ascending order. Got %s.""" % (argname, bands, argname, len(arg)) 

1059 for i in arg: 

1060 assert isinstance(i, float) or isinstance(i, int), "TOARad2Kelvin_fastforward: Expected float or \ 

1061 integer values from argument '%s'. Got %s." % (argname, type(i)) 

1062 assert type(emissivity) in [float, int], "TOARad2Kelvin_fastforward: Expected float or integer \ 

1063 values from argument emissivity. Got %s" % type(emissivity) 

1064 

1065 if [inFill, inZero, inSaturated] == [None, None, None]: 

1066 inFill, inZero, inSaturated = get_outFillZeroSaturated(ndarray.dtype) 

1067 # --Define default for Special Values of reflectanceband 

1068 outFill, outZero, outSaturated = get_outFillZeroSaturated('int16') 

1069 

1070 K1, K2 = [np.array(param).reshape(1, 1, bands) for param in [K1, K2]] 

1071 Kelvin = (K2 / np.log(K1 / ndarray + 1)).astype(np.int16) 

1072 Kelvin = np.where(ndarray == inFill, outFill, Kelvin) if inFill is not None else Kelvin 

1073 Kelvin = np.where(ndarray == inSaturated, outSaturated, Kelvin) if inSaturated is not None else Kelvin 

1074 Kelvin = np.where(ndarray == inZero, outZero, Kelvin) if inZero is not None else Kelvin 

1075 # Kelvin = \ 

1076 # np.piecewise(Kelvin,[ndarray==inFill,ndarray==inSaturated,ndarray==inZero],[outFill,outSaturated,outZero]) 

1077 

1078 return Kelvin 

1079 

1080 

1081def DN2DegreesCelsius_fastforward(ndarray, offsets, gains, K1, K2, emissivity=0.95, 

1082 inFill=None, inZero=None, inSaturated=None): 

1083 """Convert thermal DNs to temperatures in degrees Celsius 

1084 by calculating TOARadiance and applying the inverse of the Planck function. 

1085 

1086 :param ndarray: <np.ndarray> array of DNs to be converted into Degrees Celsius 

1087 :param offsets: 

1088 :param gains: 

1089 :param K1: 

1090 :param K2: 

1091 :param emissivity: 

1092 :param inFill: 

1093 :param inZero: 

1094 :param inSaturated: 

1095 """ 

1096 bands = 1 if len(ndarray.shape) == 2 else ndarray.shape[2] 

1097 for arg, argname in zip([offsets, gains, K1, K2], ['Offset', 'Gain', 'K1', 'K2']): 

1098 assert isinstance(offsets, list) and isinstance(gains, list),"%s parameters have to be provided as a list containing individual values for each band in \ 

1099 ascending order. Got type '%s'." % (argname, type(arg)) 

1100 assert len(arg) == bands, """Argument '%s' does not match the number of bands. Expected a list of %s %s values 

1101 for each band in ascending order. Got %s.""" % (argname, bands, argname, len(arg)) 

1102 for i in arg: 

1103 assert isinstance(i, float) or isinstance(i, int), "DN2DegreesCelsius_fastforward: Expected float or \ 

1104 integer values from argument '%s'. Got %s." % (argname, type(i)) 

1105 assert type(emissivity) in [float, int], "DN2DegreesCelsius_fastforward: Expected float or integer \ 

1106 values from argument emissivity. Got %s" % type(emissivity) 

1107 

1108 if [inFill, inZero, inSaturated] == [None, None, None]: 

1109 inFill, inZero, inSaturated = get_outFillZeroSaturated(ndarray.dtype) 

1110 # --Define default for Special Values of reflectanceband 

1111 outFill, outZero, outSaturated = get_outFillZeroSaturated('int16') 

1112 

1113 off, gai, K1, K2 = [np.array(param).reshape(1, 1, bands) for param in [offsets, gains, K1, K2]] 

1114 degCel = (100 * ((K2 / np.log(K1 * emissivity / (off + ndarray * gai) + 1)) - 273.15)).astype(np.int16) 

1115 degCel = np.where(ndarray == inFill, outFill, degCel) if inFill is not None else degCel 

1116 degCel = np.where(ndarray == inSaturated, outSaturated, degCel) if inSaturated is not None else degCel 

1117 degCel = np.where(ndarray == inZero, outZero, degCel) if inZero is not None else degCel 

1118 # degCel = \ 

1119 # np.piecewise(degCel,[ndarray==inFill,ndarray==inSaturated,ndarray==inZero],[outFill,outSaturated,outZero]) 

1120 return degCel 

1121 

1122 

1123def is_granule(trueCornerPos): # TODO 

1124 """Idee: testen, ob es sich um Granule handelt oder um die volle Szene - 

1125 dazu Winkel der Kanten zu Nord oder Ost berechnen""" 

1126 

1127 pass 

1128 

1129 

1130# def get_corner_coordinates(gt, prj, rows, cols, targetProj=None): 

1131# """Returns corner coordinates of the entire GEOP object in lon/lat or UTM. 

1132# ATTENTION: coordinates represent PIXEL CORNERS: 

1133# UL=UL-coordinate of (0,0) 

1134# UR=UR-coordinate of (0,self.cols-1) 

1135# => lonlat_arr always contains UL-coordinate of each pixel 

1136# :param targetProj: 'LonLat' or 'UTM' """# 

1137# 

1138# assert targetProj in [None,'LonLat','UTM'], "Target projection %s currently not supported." %targetProj 

1139# ULxy,URxy,LLxy,LRxy = [(0,0),(cols,0),(0,rows),(cols,rows)] 

1140# LonLat = [tuple(reversed(pixelToLatLon(XY,geotransform=gt,projection=prj))) for XY in [ULxy,URxy,LLxy,LRxy]] 

1141# return LonLat if targetProj in [None,'LonLat'] else \ 

1142# [transform_wgs84_to_utm(LL[0],LL[1],get_UTMzone(prj=prj)) for LL in LonLat] 

1143 

1144 

1145def get_lonlat_coord_array(shape_fullArr, arr_pos, geotransform, projection, meshwidth=1, nodata_mask=None, 

1146 outFill=None): 

1147 """Returns numpy array containing longitude pixel coordinates (band 0) and latitude pixel coordinates (band 1). 

1148 

1149 :param shape_fullArr: 

1150 :param arr_pos: 

1151 :param geotransform: 

1152 :param projection: 

1153 :param meshwidth: <int> defines the density of the mesh used for generating the output 

1154 (1: full resolution; 10: one point each 10 pixels) 

1155 :param nodata_mask: <numpy array>, used for declaring nodata values in the output lonlat array 

1156 :param outFill: the value that is assigned to nodata area in the output lonlat array 

1157 """ 

1158 rows, cols, bands, rowStart, rowEnd, colStart, colEnd = get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr, 

1159 arr_pos) 

1160 

1161 xcoord_arr, ycoord_arr = np.meshgrid(range(colStart, colStart + cols, meshwidth), 

1162 range(rowStart, rowStart + rows, meshwidth)) 

1163 xcoord_arr = xcoord_arr * geotransform[1] + geotransform[0] 

1164 ycoord_arr = ycoord_arr * geotransform[5] + geotransform[3] 

1165 

1166 assert projection, 'Unable to calculate LonLat array. Invalid projection. Got %s.' % projection 

1167 src_srs = osr.SpatialReference() 

1168 src_srs.ImportFromWkt(projection) 

1169 assert src_srs.IsProjected() or src_srs.IsGeographic(), \ 

1170 'Unable to calculate LonLat array. Unsupported projection (got %s)' % projection 

1171 

1172 if src_srs.IsProjected(): 

1173 proj = pyproj.Proj(src_srs.ExportToProj4()) 

1174 xcoord_arr, ycoord_arr = proj(xcoord_arr, ycoord_arr, inverse=True) 

1175 

1176 xcoord_ycoord_arr = np.dstack((xcoord_arr, ycoord_arr)) 

1177 

1178 if nodata_mask is not None: 

1179 outFill = outFill if outFill else get_outFillZeroSaturated('float32')[0] 

1180 xcoord_ycoord_arr[nodata_mask.astype(np.int8) == 0] = outFill 

1181 

1182 UL_lonlat = (round(xcoord_ycoord_arr[0, 0, 0], 10), round(xcoord_ycoord_arr[0, 0, 1], 10)) 

1183 UR_lonlat = (round(xcoord_ycoord_arr[0, -1, 0], 10), round(xcoord_ycoord_arr[0, -1, 1], 10)) 

1184 # TODO validate that coordinate: 

1185 LL_lonlat = (round(xcoord_ycoord_arr[-1, 0, 0], 10), round(xcoord_ycoord_arr[-1, 0, 1], 10)) 

1186 # TODO validate that coordinate: 

1187 LR_lonlat = (round(xcoord_ycoord_arr[-1, -1, 0], 10), round(xcoord_ycoord_arr[-1, -1, 1], 10)) 

1188 # self.logger.info(self.filename) 

1189 # self.logger.info('UL', UL_lat) 

1190 # self.logger.info('UR', UR_lat) 

1191 # self.logger.info('LL', LL_lat) 

1192 # self.logger.info('LR', LR_lat) 

1193 # self.logger.info('1000/1000', round(xcoord_ycoord_arr[999,999,0],10),round(xcoord_ycoord_arr[999,999,1],10)) 

1194 return xcoord_ycoord_arr.astype(np.float32), [UL_lonlat, UR_lonlat, LL_lonlat, LR_lonlat] 

1195 

1196 

1197def calc_VAA_using_fullSceneCornerLonLat(fullSceneCornerLonLat, orbit_params): 

1198 # type: (list, list) -> float 

1199 """Calculates the Viewing azimuth angle (defined as 90 degrees from the flight line), 

1200 e.g. if flight line is 8 degrees from North -> VAA will be 98 degrees. 

1201 

1202 :param fullSceneCornerLonLat: UL, UR, LL, LR 

1203 :param orbit_params: list of [altitude, inclination, period] => inclination is used as fallback 

1204 """ 

1205 assert len(fullSceneCornerLonLat) == 4, \ 

1206 'VAA can only be calculated with fullSceneCornerLonLat representing 4 coordinates (UL, UR, LL, LR).' 

1207 

1208 UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat = fullSceneCornerLonLat 

1209 forward_az_left = pyproj.Geod(ellps='WGS84').inv(*LL_LonLat, *UL_LonLat)[0] 

1210 forward_az_right = pyproj.Geod(ellps='WGS84').inv(*LR_LonLat, *UR_LonLat)[0] 

1211 VAA_mean = float(np.mean([forward_az_left, forward_az_right])) + 90 

1212 

1213 # validation: 

1214 if abs(VAA_mean - 90) < 1: 

1215 # fullSceneCornerLonLat obviously don't belong to a full scene but a granule 

1216 assert orbit_params 

1217 warnings.warn('Derivation of mean VAA angle from flight line delivered a non reasonable value (%s degrees).' 

1218 'Using sensor inclination (%s degrees) as fallback.' % (VAA_mean, orbit_params[1])) 

1219 VAA_mean = float(orbit_params[1]) # inclination # FIXME is this correct? 

1220 

1221 return VAA_mean 

1222 

1223 

1224def calc_VZA_array(shape_fullArr, arr_pos, fullSceneCornerPos, viewing_angle, FOV, logger, meshwidth=1, 

1225 nodata_mask=None, outFill=None): 

1226 """Calculate viewing zenith angles for each pixel in the dataset. 

1227 

1228 By solving an equation system with 4 variables for each image corner: VZA = a + b*col + c*row + d*col*row. 

1229 

1230 :param shape_fullArr: 

1231 :param arr_pos: 

1232 :param fullSceneCornerPos: 

1233 :param viewing_angle: 

1234 :param FOV: 

1235 :param logger: 

1236 :param meshwidth: <int> defines the density of the mesh used for generating the output 

1237 (1: full resolution; 10: one point each 10 pixels) 

1238 :param nodata_mask: <numpy array>, used for declaring nodata values in the output VZA array 

1239 :param outFill: the value that is assigned to nodata area in the output VZA array 

1240 """ 

1241 # FIXME in case of Sentinel-2 the viewing_angle corresponds to the center point of the image footprint 

1242 # FIXME (trueDataCornerPos) 

1243 # FIXME => the algorithm must use the center viewing angle + orbit inclination and must calculate the FOV to be used 

1244 # FIXME via the widths of the footprint at the center position 

1245 # FIXME since S2 brings its own VZA array this is only relevant other scenes provided as granules (e.g. RapidEye) 

1246 

1247 if nodata_mask is not None: 

1248 assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \ 

1249 "'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask) 

1250 colsFullArr, rowsFullArr, bandsFullArr = shape_fullArr 

1251 rows, cols, bands, rowStart, rowEnd, colStart, colEnd = get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr, 

1252 arr_pos) 

1253 ul, ur, ll, lr = fullSceneCornerPos 

1254 

1255 cols_arr, rows_arr = np.meshgrid(range(colStart, colStart + cols, meshwidth), 

1256 range(rowStart, rowStart + rows, meshwidth)) 

1257 

1258 if list(fullSceneCornerPos) == list( 

1259 ([0, 0], [0, colsFullArr - 1], [rowsFullArr - 1, 0], [rowsFullArr - 1, colsFullArr - 1])): 

1260 logger.warning('No precise calculation of VZA array possible because orbit track cannot be ' 

1261 'reconstructed from dataset since full scene corner positions are unknown. VZA array is ' 

1262 'filled with the value provided in metadata.') 

1263 VZA_array = np.full(cols_arr.shape, viewing_angle, np.int64) # respects meshwidth 

1264 else: 

1265 coeff_matrix = np.array([[1, ul[1], ul[0], ul[1] * ul[0]], 

1266 [1, ur[1], ur[0], ur[1] * ur[0]], 

1267 [1, ll[1], ll[0], ll[1] * ll[0]], 

1268 [1, lr[1], lr[0], lr[1] * lr[0]]]) 

1269 const_matrix = np.array( 

1270 [viewing_angle + FOV / 2., # VZA@UL # +- aligning seems to match with Sentinel-2 # TODO correct? 

1271 viewing_angle - FOV / 2., # VZA@UR 

1272 viewing_angle + FOV / 2., # VZA@LL 

1273 viewing_angle - FOV / 2.]) # VZA@LR 

1274 factors = np.linalg.solve(coeff_matrix, const_matrix) 

1275 

1276 VZA_array = (factors[0] + factors[1] * cols_arr + factors[2] * rows_arr + factors[3] * 

1277 cols_arr * rows_arr).astype(np.float32) 

1278 

1279 if nodata_mask is not None: 

1280 outFill = outFill if outFill else get_outFillZeroSaturated('float32')[0] 

1281 VZA_array[nodata_mask.astype(np.int8) == 0] = outFill 

1282 

1283 return VZA_array 

1284 

1285 

1286def calc_AcqTime_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, fullSceneCornerPos, overpassDurationSec, 

1287 meshwidth=1): 

1288 ul, ur, ll, lr = fullSceneCornerPos 

1289 

1290 rows, cols, bands, rowStart, rowEnd, colStart, colEnd = get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr, 

1291 arr_pos) 

1292 

1293 cols_arr, rows_arr = np.meshgrid(range(colStart, colStart + cols, meshwidth), 

1294 range(rowStart, rowStart + rows, meshwidth)) 

1295 

1296 time_center = datetime.datetime.strptime('%s %s' % (AcqDate, CenterAcqTime), '%Y-%m-%d %H:%M:%S') 

1297 time_start = time_center - datetime.timedelta(seconds=overpassDurationSec) 

1298 time_end = time_center + datetime.timedelta(seconds=overpassDurationSec) 

1299 

1300 coeff_matrix = np.array([[1, ul[1], ul[0], ul[1] * ul[0]], 

1301 [1, ur[1], ur[0], ur[1] * ur[0]], 

1302 [1, ll[1], ll[0], ll[1] * ll[0]], 

1303 [1, lr[1], lr[0], lr[1] * lr[0]]]) 

1304 const_matrix = np.array([mdates.date2num(time_end), # AcqTime@UL 

1305 mdates.date2num(time_end), # AcqTime@UR 

1306 mdates.date2num(time_start), # AcqTime@LL 

1307 mdates.date2num(time_start)]) # AcqTime@LR 

1308 

1309 factors = np.linalg.solve(coeff_matrix, const_matrix) 

1310 

1311 mdate_numarray = factors[0] + factors[1] * cols_arr + factors[2] * rows_arr + factors[3] * cols_arr * rows_arr 

1312 flat_mdate_arr = np.array(mdates.num2date(mdate_numarray.flatten())) 

1313 datetime_arr = np.array([flat_mdate_arr[i].replace(tzinfo=None) 

1314 for i in range(len(flat_mdate_arr))]).reshape(-1, cols) 

1315 return datetime_arr 

1316 

1317 

1318def calc_SZA_SAA(date, lon, lat): # not used anymore since pyorbital is more precise 

1319 """Calculates solar zenith and azimuth angle using pyephem. 

1320 

1321 :param date: 

1322 :param lon: 

1323 :param lat: 

1324 """ 

1325 obsv = ephem.Observer() 

1326 obsv.lon, obsv.lat = str(lon), str(lat) 

1327 obsv.date = date 

1328 sun = ephem.Sun() 

1329 sun.compute(obsv) 

1330 alt, az = [("%s" % i).split(':') for i in [sun.alt, sun.az]] 

1331 SZA = np.float32(90 - float(alt[0]) + float(alt[1]) / 60. + float(alt[2]) / 3600.) 

1332 SAA = np.float32(float(az[0]) + float(az[1]) / 60. + float(az[2]) / 3600.) 

1333 return SZA, SAA 

1334 

1335 

1336def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, fullSceneCornerPos, fullSceneCornerLonLat, 

1337 overpassDurationSec, logger, meshwidth=1, nodata_mask=None, outFill=None, accurracy='coarse', 

1338 lonlat_arr=None): 

1339 """Calculates solar zenith and azimuth angles for each pixel in the dataset using pyorbital. 

1340 

1341 :param shape_fullArr: 

1342 :param arr_pos: 

1343 :param AcqDate: 

1344 :param CenterAcqTime: 

1345 :param fullSceneCornerPos: 

1346 :param fullSceneCornerLonLat: UL, UR, LL, LR 

1347 :param overpassDurationSec: 

1348 :param logger: 

1349 :param meshwidth: <int> defines the density of the mesh used for generating the output 

1350 (1: full resolution; 10: one point each 10 pixels) 

1351 :param nodata_mask: <numpy array>, used for declaring nodata values in the output SZA/SAA array 

1352 :param outFill: the value that is assigned to nodata area in the output SZA/SAA array 

1353 :param accurracy: 'fine' or 'coarse' 

1354 - 'fine' : pixelwise acquisition time is used to calculate SZA/SAA 

1355 - requires lonlat_arr to be specified 

1356 - 'coarse: SZA/SAA is calculated for image corners then interpolated by solving 

1357 an equation system with 4 variables for each image corner: 

1358 SZA/SAA = a + b*col + c*row + d*col*row. 

1359 :param lonlat_arr: 

1360 """ 

1361 if nodata_mask is not None: 

1362 assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \ 

1363 "'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask) 

1364 if accurracy == 'fine': 

1365 assert lonlat_arr is not None, "Calculating SZA/SAA with accurracy set to 'fine' requires lonlat_arr to be " \ 

1366 "specified. SZA/SAA is calculated with accurracy = 'coarse'." 

1367 assert accurracy in ['fine', 'coarse'], "The keyword accuracy accepts only 'fine' or 'coarse'. Got %s" % accurracy 

1368 

1369 ul, ur, ll, lr = fullSceneCornerPos 

1370 colsFullArr, rowsFullArr, bandsFullArr = shape_fullArr 

1371 

1372 if list(fullSceneCornerPos) == list( 

1373 ([0, 0], [0, colsFullArr - 1], [rowsFullArr - 1, 0], [rowsFullArr - 1, colsFullArr - 1])): 

1374 logger.warning('No precise calculation of SZA/SAA array possible because orbit track cannot ' 

1375 'be reconstructed from dataset since full scene corner positions are unknown. Same ' 

1376 'acquisition time for each pixel is assumed for SZA/SAA calculation.') 

1377 time_center = datetime.datetime.strptime('%s %s' % (AcqDate, CenterAcqTime), '%Y-%m-%d %H:%M:%S') 

1378 time_start = time_center 

1379 time_end = time_center 

1380 else: 

1381 # overpassDurationSec = self.get_overpass_duration(fullSceneCornerLonLat, orbitParams)[0] 

1382 time_center = datetime.datetime.strptime('%s %s' % (AcqDate, CenterAcqTime), '%Y-%m-%d %H:%M:%S') 

1383 time_start = time_center - datetime.timedelta(seconds=float(overpassDurationSec) / 2) 

1384 time_end = time_center + datetime.timedelta(seconds=float(overpassDurationSec) / 2) 

1385 

1386 if accurracy == 'fine': 

1387 datetime_arr = calc_AcqTime_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, 

1388 fullSceneCornerPos, overpassDurationSec, meshwidth) 

1389 sol_alt_rad, sol_az_rad = astronomy.get_alt_az(datetime_arr, lonlat_arr[:, :, 0], lonlat_arr[:, :, 1]) 

1390 SZA_array, SAA_array = 90 - (180 * sol_alt_rad / math.pi), 180 * sol_az_rad / math.pi 

1391 

1392 else: # accurracy == 'coarse' 

1393 rows, cols, bands, rowStart, rowEnd, colStart, colEnd = \ 

1394 get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr, arr_pos) 

1395 

1396 cols_arr, rows_arr = np.meshgrid(range(colStart, colStart + cols, meshwidth), 

1397 range(rowStart, rowStart + rows, meshwidth)) 

1398 

1399 alt_UL_rad, az_UL_rad = astronomy.get_alt_az(time_end, fullSceneCornerLonLat[0][0], fullSceneCornerLonLat[0][1]) 

1400 alt_UR_rad, az_UR_rad = astronomy.get_alt_az(time_end, fullSceneCornerLonLat[1][0], fullSceneCornerLonLat[1][1]) 

1401 alt_LL_rad, az_LL_rad = astronomy.get_alt_az(time_start, fullSceneCornerLonLat[2][0], 

1402 fullSceneCornerLonLat[2][1]) 

1403 alt_LR_rad, az_LR_rad = astronomy.get_alt_az(time_start, fullSceneCornerLonLat[3][0], 

1404 fullSceneCornerLonLat[3][1]) 

1405 

1406 SZA_UL, SAA_UL = 90 - (180 * alt_UL_rad / math.pi), 180 * az_UL_rad / math.pi 

1407 SZA_UR, SAA_UR = 90 - (180 * alt_UR_rad / math.pi), 180 * az_UR_rad / math.pi 

1408 SZA_LL, SAA_LL = 90 - (180 * alt_LL_rad / math.pi), 180 * az_LL_rad / math.pi 

1409 SZA_LR, SAA_LR = 90 - (180 * alt_LR_rad / math.pi), 180 * az_LR_rad / math.pi 

1410 SZA_SAA_coeff_matrix = np.array([[1, ul[1], ul[0], ul[1] * ul[0]], 

1411 [1, ur[1], ur[0], ur[1] * ur[0]], 

1412 [1, ll[1], ll[0], ll[1] * ll[0]], 

1413 [1, lr[1], lr[0], lr[1] * lr[0]]]) 

1414 SZA_const_matrix = np.array([SZA_UL, SZA_UR, SZA_LL, SZA_LR]) 

1415 SZA_factors = np.linalg.solve(SZA_SAA_coeff_matrix, SZA_const_matrix) 

1416 SZA_array = (SZA_factors[0] + SZA_factors[1] * cols_arr + SZA_factors[2] * rows_arr + 

1417 SZA_factors[3] * cols_arr * rows_arr).astype(np.float32) 

1418 

1419 SAA_const_matrix = np.array([SAA_UL, SAA_UR, SAA_LL, SAA_LR]) 

1420 SAA_factors = np.linalg.solve(SZA_SAA_coeff_matrix, SAA_const_matrix) 

1421 SAA_array = (SAA_factors[0] + SAA_factors[1] * cols_arr + SAA_factors[2] * rows_arr + 

1422 SAA_factors[3] * cols_arr * rows_arr).astype(np.float32) 

1423 

1424 if nodata_mask is not None: 

1425 SZA_array[nodata_mask.astype(np.int8) == 0] = outFill 

1426 SAA_array[nodata_mask.astype(np.int8) == 0] = outFill 

1427 return SZA_array, SAA_array 

1428 

1429 

1430def calc_RAA_array(SAA_array, VAA_array, nodata_mask=None, outFill=None): 

1431 """Calculate relative azimuth angle between solar azimuth and viewing azimuth in degrees. 

1432 

1433 :param SAA_array: 

1434 :param VAA_array: 

1435 :param nodata_mask: 

1436 :param outFill: the value to be used to fill areas outside the actual image bounds 

1437 :return: 

1438 """ 

1439 if nodata_mask is not None: 

1440 assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \ 

1441 "'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask) 

1442 

1443 RAA_array = SAA_array - VAA_array 

1444 

1445 if nodata_mask is not None: 

1446 RAA_array[nodata_mask.astype(np.int8) == 0] = outFill 

1447 return RAA_array 

1448 

1449 

1450def get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr, arr_pos): 

1451 """Returns array dims with respect to possible subsetting.""" 

1452 

1453 rows, cols, bands = shape_fullArr 

1454 rows, cols = [arr_pos[0][1] - arr_pos[0][0] + 1, arr_pos[1][1] - arr_pos[1][0] + 1] if arr_pos else (rows, cols) 

1455 rowStart, colStart = [arr_pos[0][0], arr_pos[1][0]] if arr_pos else [0, 0] 

1456 rowEnd, colEnd = rowStart + rows, colStart + cols 

1457 return rows, cols, bands, rowStart, rowEnd, colStart, colEnd 

1458 

1459 

1460def get_subsetProps_from_subsetArg(shape_fullArr, subset): 

1461 if subset: 

1462 assert subset[0] in ['cube', 'row', 'col', 'band', 'block', 'pixel', 'custom'], \ 

1463 'Unexpected subset shape %s.' % subset[0] 

1464 else: 

1465 subset = ('cube', None) 

1466 if subset is None or subset[0] == 'cube': 

1467 rows, cols, bands = shape_fullArr 

1468 rowStart, colStart, bandStart = 0, 0, 0 

1469 rowEnd, colEnd, bandEnd = rows - 1, cols - 1, bands - 1 

1470 elif subset[0] == 'row': 

1471 cols, rows, bands = shape_fullArr[1], 1, shape_fullArr[2] 

1472 colStart, rowStart, bandStart = 0, subset[1], 0 

1473 colEnd, rowEnd, bandEnd = cols - 1, subset[1], bands - 1 

1474 elif subset[0] == 'col': 

1475 cols, rows, bands = 1, shape_fullArr[0], shape_fullArr[2] 

1476 colStart, rowStart, bandStart = subset[1], 0, 0 

1477 colEnd, rowEnd, bandEnd = subset[1], rows - 1, bands - 1 

1478 elif subset[0] == 'band': 

1479 cols, rows, bands = shape_fullArr[1], shape_fullArr[0], 1 

1480 colStart, rowStart, bandStart = 0, 0, subset[1] 

1481 colEnd, rowEnd, bandEnd = cols - 1, rows - 1, subset[1] 

1482 elif subset[0] == 'block': 

1483 cols, rows, bands = subset[1][1][1] - subset[1][1][0] + 1, subset[1][0][1] - \ 

1484 subset[1][0][0] + 1, shape_fullArr[2] 

1485 colStart, rowStart, bandStart = subset[1][1][0], subset[1][0][0], 0 

1486 colEnd, rowEnd, bandEnd = subset[1][1][1], subset[1][0][1], bands - 1 

1487 elif subset[0] == 'pixel': 

1488 cols, rows, bands = 1, 1, shape_fullArr[2] 

1489 colStart, rowStart, bandStart = subset[1][1], subset[1][0], 0 

1490 colEnd, rowEnd, bandEnd = subset[1][1], subset[1][0], bands - 1 

1491 else: # subset[0] == 'custom' 

1492 cols, rows, bands = subset[1][1][1] - subset[1][1][0] + 1, subset[1][0][1] - subset[1][0][0] + 1, \ 

1493 len(subset[1][2]) 

1494 colStart, rowStart, bandStart = subset[1][1][0], subset[1][0][0], min(subset[1][2]) 

1495 colEnd, rowEnd, bandEnd = subset[1][1][1], subset[1][0][1], max(subset[1][2]) 

1496 bandsList = subset[1][2] if subset[0] == 'custom' else range(bands) 

1497 

1498 return collections.OrderedDict(zip( 

1499 ['rows', 'cols', 'bands', 'rowStart', 'rowEnd', 'colStart', 'colEnd', 'bandStart', 'bandEnd', 'bandsList'], 

1500 [rows, cols, bands, rowStart, rowEnd, colStart, colEnd, bandStart, bandEnd, bandsList])) 

1501 

1502 

1503def clip_array_using_mapBounds(array, bounds, im_prj, im_gt, fillVal=0): 

1504 """ 

1505 

1506 :param array: 

1507 :param bounds: 

1508 :param im_prj: 

1509 :param im_gt: 

1510 :param fillVal: 

1511 """ 

1512 print(bounds) 

1513 # get target bounds on the same grid like the input array 

1514 tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax = snap_bounds_to_pixGrid(bounds, im_gt) 

1515 print(tgt_xmin, tgt_ymax, tgt_xmax, tgt_ymin) 

1516 

1517 # get target dims 

1518 tgt_rows = int(abs((tgt_ymax - tgt_ymin) / im_gt[5])) + 1 

1519 tgt_cols = int(abs((tgt_xmax - tgt_xmin) / im_gt[1])) + 1 

1520 tgt_bands = array.shape[2] if len(array.shape) == 3 else None 

1521 tgt_shape = (tgt_rows, tgt_cols, tgt_bands) if len(array.shape) == 3 else (tgt_rows, tgt_cols) 

1522 

1523 # get array pos to fill 

1524 R, C = array.shape[:2] 

1525 tgt_gt = [tgt_xmin, im_gt[1], 0., tgt_ymax, 0., im_gt[5]] 

1526 cS, rS = [int(i) for i in mapXY2imXY(imXY2mapXY((0, 0), im_gt), tgt_gt)] 

1527 cE, rE = [int(i) for i in mapXY2imXY(imXY2mapXY((C - 1, R - 1), im_gt), tgt_gt)] 

1528 print(rS, rE, cS, cE) 

1529 

1530 # create target array and fill it with the given pixel values at the correct geo position 

1531 tgt_arr = np.full(tgt_shape, fillVal, array.dtype) 

1532 tgt_arr[rS:rE + 1, cS:cE + 1] = array 

1533 

1534 return tgt_arr, tgt_gt, im_prj