Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2 

3# gms_preprocessing, spatial and spectral homogenization of satellite remote sensing data 

4# 

5# Copyright (C) 2020 Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz-potsdam.de) 

6# 

7# This software was developed within the context of the GeoMultiSens project funded 

8# by the German Federal Ministry of Education and Research 

9# (project grant code: 01 IS 14 010 A-C). 

10# 

11# This program is free software: you can redistribute it and/or modify it under 

12# the terms of the GNU General Public License as published by the Free Software 

13# Foundation, either version 3 of the License, or (at your option) any later version. 

14# Please note the following exception: `gms_preprocessing` depends on tqdm, which 

15# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files 

16# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore". 

17# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE. 

18# 

19# This program is distributed in the hope that it will be useful, but WITHOUT 

20# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

21# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

22# details. 

23# 

24# You should have received a copy of the GNU Lesser General Public License along 

25# with this program. If not, see <http://www.gnu.org/licenses/>. 

26# -*- coding: utf-8 -*- 

27"""GeoMultiSens Input Reader: Universal reader for all kinds of GeoMultiSens intermediate results.""" 

28 

29import collections 

30import fnmatch 

31import json 

32import os 

33import re 

34import tarfile 

35import zipfile 

36from tempfile import NamedTemporaryFile as tempFile 

37from logging import Logger 

38from typing import List, Tuple, TYPE_CHECKING # noqa F401 # flake8 issue 

39from datetime import datetime 

40import logging 

41 

42import dill 

43import numpy as np 

44import spectral 

45from spectral.io import envi as envi 

46from pyproj import CRS 

47 

48from geoarray import GeoArray 

49from py_tools_ds.geo.coord_calc import corner_coord_to_minmax 

50from py_tools_ds.geo.vector.geometry import boxObj 

51from py_tools_ds.geo.coord_trafo import transform_any_prj 

52from py_tools_ds.geo.projection import isProjectedOrGeographic 

53from pyrsr.rsr import RSR_reader 

54 

55from ..options.config import GMS_config as CFG 

56from ..misc import path_generator as PG 

57from ..misc import helper_functions as HLP_F 

58from ..misc.logging import GMS_logger, close_logger 

59from ..misc.database_tools import get_overlapping_scenes_from_postgreSQLdb 

60from ..misc.path_generator import path_generator 

61from ..misc.spatial_index_mediator import SpatialIndexMediator 

62from ..misc.locks import DatabaseLock 

63 

64if TYPE_CHECKING: 

65 from ..model.gms_object import GMS_identifier # noqa F401 # flake8 issue 

66 

67 

68def read_ENVIfile(path, arr_shape, arr_pos, logger=None, return_meta=False, q=0): 

69 hdr_path = os.path.splitext(path)[0] + '.hdr' if not os.path.splitext(path)[1] == '.hdr' else path 

70 return read_ENVI_image_data_as_array(hdr_path, arr_shape, arr_pos, logger=logger, return_meta=return_meta, q=q) 

71 

72 

73def read_ENVIhdr_to_dict(hdr_path, logger=None): 

74 # type: (str, Logger) -> dict 

75 if not os.path.isfile(hdr_path): 

76 if logger: 

77 logger.critical('read_ENVIfile: Input data not found at %s.' % hdr_path) 

78 else: 

79 print('read_ENVIfile: Input data not found at %s.' % hdr_path) 

80 raise FileNotFoundError(hdr_path) 

81 else: 

82 SpyFileheader = envi.open(hdr_path) 

83 return SpyFileheader.metadata 

84 

85 

86def read_ENVI_image_data_as_array(path, arr_shape, arr_pos, logger=None, return_meta=False, q=0): 

87 """Read ENVI image data as array using a specified read pattern. 

88 

89 :param path: <str> Path of the ENVI image or header file 

90 :param arr_shape: <str> 'cube','row','col','band','block','pixel' or 'custom' 

91 :param arr_pos: None, <int> or <list>. The content of the list depends on the chosen arr_shape as follows: 

92 - 'cube': No array position neccessary. Set arr_pos to None. 

93 - 'row': single int. -> Row with this index will be read. 

94 - 'col': single int. -> Column with this index will be read. 

95 - 'band': single int. -> Band with this index will be read. 

96 - 'block': row_bounds (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. 

97 col_bounds (2-tuple of ints): (a, b) -> Columns a through b-1 will be read. 

98 - 'pixel': row, col (int): Indices of the row & column for the pixel to be read. 

99 - 'custom': row_bounds (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. 

100 col_bounds (2-tuple of ints): (a, b) -> Columns a through b-1 will be read. 

101 bands (list of ints): Optional list of bands to read. 

102 If not specified, all bands are read. 

103 :param logger: <instance> of logging.logger (optional) 

104 :param return_meta: <bool> whether to return not only raster data but also meta data (optional, default=False) 

105 :param q: <bool> quiet mode (supresses all console or logging output) (optional, default=False) 

106 """ 

107 hdr_path = os.path.splitext(path)[0] + '.hdr' if not os.path.splitext(path)[1] == '.hdr' else path 

108 if not os.path.isfile(hdr_path): 

109 if not q: 

110 if logger: 

111 logger.critical('read_ENVIfile: Input data not found at %s.' % hdr_path) 

112 else: 

113 print('read_ENVIfile: Input data not found at %s.' % hdr_path) 

114 else: 

115 assert arr_shape in ['cube', 'row', 'col', 'band', 'block', 'pixel', 'custom'], \ 

116 "Array shape '%s' is not known. Known array shapes are cube, row, col, band, block, pixel, custom." \ 

117 % arr_shape 

118 if logger and not q: 

119 logger.info('Reading %s ...' % (os.path.basename(hdr_path))) 

120 File_obj = spectral.open_image(hdr_path) 

121 SpyFileheader = envi.open(hdr_path) 

122 image_data = File_obj.read_bands(range(SpyFileheader.nbands)) if arr_shape == 'cube' else \ 

123 File_obj.read_subimage([arr_pos], range(SpyFileheader.ncols)) if arr_shape == 'row' else \ 

124 File_obj.read_subimage(range(SpyFileheader.nrows), [arr_pos]) if arr_shape == 'col' else \ 

125 File_obj.read_band(arr_pos) if arr_shape == 'band' else \ 

126 File_obj.read_subregion((arr_pos[0][0], arr_pos[0][1] + 1), 

127 (arr_pos[1][0], arr_pos[1][1] + 1)) if arr_shape == 'block' else \ 

128 File_obj.read_pixel(arr_pos[0], arr_pos[1]) if arr_shape == 'pixel' else \ 

129 File_obj.read_subregion((arr_pos[0][0], arr_pos[0][1] + 1), 

130 (arr_pos[1][0], arr_pos[1][1] + 1), arr_pos[2]) # custom 

131 return (image_data, SpyFileheader.metadata) if return_meta else image_data 

132 

133 

134def read_mask_subset(path_masks, bandname, logger, subset=None): 

135 subset = subset if subset else ['cube', None] 

136 assert subset[0] in ['cube', 'block', 'MGRS_tile'], \ 

137 "INP_R.read_mask_subset(): '%s' subset is not supported." % subset[0] 

138 path_masks_hdr = \ 

139 os.path.splitext(path_masks)[0] + '.hdr' if not os.path.splitext(path_masks)[1] == '.hdr' else path_masks 

140 hdrDict = read_ENVIhdr_to_dict(path_masks_hdr, logger) 

141 (rS, rE), (cS, cE) = ((0, hdrDict['lines']), (0, hdrDict['samples'])) if subset[0] == 'cube' else subset[1] 

142 band_idx = hdrDict['band names'].index(bandname) if bandname in hdrDict['band names'] else None 

143 if band_idx is None: 

144 logger.warning("No band called '%s' in %s. Attribute set to <None>." % (bandname, path_masks)) 

145 mask_sub = None 

146 else: 

147 if subset is None or subset[0] == 'cube': 

148 mask_sub = read_ENVI_image_data_as_array(path_masks_hdr, 'band', band_idx, logger=logger, q=1) 

149 else: 

150 mask_sub = read_ENVI_image_data_as_array( 

151 path_masks_hdr, 'custom', ((rS, rE), (cS, cE), [band_idx]), logger=logger, q=1) 

152 mask_sub = mask_sub[:, :, 0] if len(mask_sub.shape) == 3 and mask_sub.shape[2] == 1 else mask_sub 

153 return mask_sub 

154 

155 

156def GMSfile2dict(path_GMSfile): 

157 """ Converts a JSON file (like the GMS file) to a Python dictionary with keys and values. 

158 

159 :param path_GMSfile: absolute path on disk 

160 :return: the corresponding Python dictionary 

161 """ 

162 with open(path_GMSfile) as inF: 

163 GMSdict = json.load(inF) 

164 return GMSdict 

165 

166 

167def unify_envi_header_keys(header_dict): 

168 """Ensures the compatibility of ENVI header keys written by Spectral-Python the code internal attribute names. 

169 (ENVI header keys are always lowercase in contrast to the attribute names used in code). 

170 

171 :param header_dict: 

172 """ 

173 refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit', 

174 'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle'] 

175 unified_header_dict = header_dict 

176 for key in header_dict.keys(): 

177 for refkey in refkeys: 

178 if re.match(key, refkey, re.I) and key != refkey: 

179 unified_header_dict[refkey] = header_dict[key] 

180 del unified_header_dict[key] 

181 return unified_header_dict 

182 

183 

184def get_list_GMSfiles(dataset_list, target): 

185 """Returns a list of absolute paths pointing to gms-files of truely written datasets that fullfill certain criteria. 

186 

187 :param dataset_list: [dataset1_dictionary, dataset2_dictionary] 

188 :param target: target GMS processing level 

189 :return [/path/to/gms_file1.gms, /path/to/gms_file1.gms] 

190 """ 

191 dataset_list = [dataset_list] if not isinstance(dataset_list, list) else dataset_list 

192 

193 def get_gmsP(ds, tgt): 

194 return PG.path_generator(ds, proc_level=tgt).get_path_gmsfile() 

195 

196 GMS_list = [p for p in [get_gmsP(ds, target) for ds in dataset_list] if os.path.exists(p)] 

197 

198 return GMS_list 

199 

200 

201def SRF_Reader(GMS_id, no_thermal=None, no_pan=None, v=False): 

202 # type: (GMS_identifier, bool, bool, bool) -> collections.OrderedDict 

203 """Read SRF for any sensor and return a dictionary containing band names as keys and SRF numpy arrays as values. 

204 

205 :param GMS_id: 

206 :param no_thermal: whether to exclude thermal bands from the returned bands list 

207 (default: CFG.skip_thermal) 

208 :param no_pan: whether to exclude panchromatic bands from the returned bands list 

209 (default: CFG.skip_pan) 

210 :param v: verbose mode 

211 """ 

212 # set defaults 

213 # NOTE: these values cannot be set in function signature because CFG is not present at library import time 

214 no_thermal = no_thermal if no_thermal is not None else CFG.skip_thermal 

215 no_pan = no_pan if no_pan is not None else CFG.skip_pan 

216 

217 logger = GMS_id.logger or Logger(__name__) 

218 

219 SRF_dict = RSR_reader(satellite=GMS_id.satellite, 

220 sensor=GMS_id.sensor, 

221 subsystem=GMS_id.subsystem, 

222 no_thermal=no_thermal, 

223 no_pan=no_pan, 

224 after_ac=False, 

225 sort_by_cwl=True, 

226 tolerate_missing=True, 

227 logger=logger, 

228 v=v) 

229 

230 return SRF_dict 

231 

232 

233def pickle_SRF_DB(L1A_Instances, dir_out): 

234 list_GMS_identifiers = [i.GMS_identifier for i in L1A_Instances] 

235 out_dict = collections.OrderedDict() 

236 logger = GMS_logger('log__SRF2PKL', path_logfile=os.path.join(dir_out, 'log__SRF2PKL.log'), 

237 log_level=CFG.log_level, append=False) 

238 for Id, Inst in zip(list_GMS_identifiers, L1A_Instances): 

239 Id['logger'] = logger 

240 out_dict[ 

241 Inst.satellite + '_' + Inst.sensor + (('_' + Inst.subsystem) if Inst.subsystem not in ['', None] else '')] \ 

242 = SRF_Reader(Id) 

243 print(list(out_dict.keys())) 

244 outFilename = os.path.join(dir_out, 'SRF_DB.pkl') 

245 with open(outFilename, 'wb') as outFile: 

246 dill.dump(out_dict, outFile) 

247 print('Saved SRF dictionary to %s' % outFilename) 

248 # with open(outFilename, 'rb') as inFile: 

249 # readFile = pickle.load(inFile) 

250 # [print(i) for i in readFile.keys()] 

251 logger.close() 

252 

253 

254def Solar_Irradiance_reader(resol_nm=None, wvl_min_nm=None, wvl_max_nm=None): 

255 """Read the solar irradiance file and return an array of irradiances. 

256 

257 :param resol_nm: spectral resolution for returned irradiances [nanometers] 

258 :param wvl_min_nm: minimum wavelength of returned irradiances [nanometers] 

259 :param wvl_max_nm: maximum wavelength of returned irradiances [nanometers] 

260 :return: 

261 """ 

262 from scipy.interpolate import interp1d 

263 

264 sol_irr = np.loadtxt(CFG.path_solar_irr, skiprows=1) 

265 if resol_nm is not None and isinstance(resol_nm, (float, int)): 

266 wvl_min = (np.min(sol_irr[:, 0]) if wvl_min_nm is None else wvl_min_nm) 

267 wvl_max = (np.max(sol_irr[:, 0]) if wvl_max_nm is None else wvl_max_nm) 

268 wvl_rsp = np.arange(wvl_min, wvl_max, resol_nm) 

269 sol_irr = interp1d(sol_irr[:, 0], sol_irr[:, 1], kind='linear')(wvl_rsp) 

270 return sol_irr 

271 

272 

273def open_specific_file_within_archive(path_archive, matching_expression, read_mode='r'): 

274 # type: (str, str, str) -> (str, str) 

275 """Finds a specific file within an archive using a given matching expression and returns its content as string. 

276 

277 :param path_archive: the file path of the archive 

278 :param matching_expression: the matching expession to find the file within the archive 

279 :param read_mode: the read mode used to open the archive (default: 'r') 

280 :return: tuple(content_file, filename_file) 

281 """ 

282 

283 file_suffix = os.path.splitext(path_archive)[1][1:] 

284 file_suffix = 'tar.gz' if path_archive.endswith('tar.gz') else file_suffix 

285 assert file_suffix in ['zip', 'tar', 'gz', 'tgz', 'tar.gz'], '*.%s files are not supported.' % file_suffix 

286 

287 if file_suffix == 'zip': 

288 archive = zipfile.ZipFile(path_archive, 'r') 

289 # [print(i) for i in archive.namelist()] 

290 matching_files = fnmatch.filter(archive.namelist(), matching_expression) 

291 

292 # NOTE: flink deactivates assertions via python -O flag. So, a usual 'assert matching_files' does NOT work here. 

293 if not matching_files: 

294 raise RuntimeError('Matching expression matches no file. Please revise your expression!') 

295 if len(matching_files) > 1: 

296 raise RuntimeError('Matching expression matches more than 1 file. Please revise your expression!') 

297 

298 content_file = archive.read(matching_files[0]) 

299 filename_file = os.path.join(path_archive, matching_files[0]) 

300 

301 else: # 'tar','gz','tgz', 'tar.gz' 

302 archive = tarfile.open(path_archive, 'r|gz') # open in stream mode is much faster than normal mode 

303 count_matching_files = 0 

304 for F in archive: 

305 if fnmatch.fnmatch(F.name, matching_expression): 

306 content_file = archive.extractfile(F) 

307 content_file = content_file.read() 

308 filename_file = os.path.join(path_archive, F.name) 

309 count_matching_files += 1 

310 

311 # NOTE: flink deactivates assertions via python -O flag. So, a usual 'assert matching_files' does NOT work here. 

312 if count_matching_files == 0: 

313 raise RuntimeError('Matching expression matches no file. Please revise your expression!') 

314 if count_matching_files > 1: 

315 raise RuntimeError('Matching expression matches more than 1 file. Please revise your expression!') 

316 

317 archive.close() 

318 content_file = content_file.decode('latin-1') \ 

319 if isinstance(content_file, bytes) and read_mode == 'r' else content_file # Python3 

320 

321 return content_file, filename_file 

322 

323 

324class DEM_Creator(object): 

325 def __init__(self, dem_sensor='SRTM', db_conn='', logger=None): 

326 """Creator class for digital elevation models based on ASTER or SRTM. 

327 

328 :param dem_sensor: 'SRTM' or 'ASTER' 

329 :param db_conn: database connection string 

330 """ 

331 if dem_sensor not in ['SRTM', 'ASTER']: 

332 raise ValueError('%s is not a supported DEM sensor. Choose between SRTM and ASTER (both 30m native GSD).' 

333 % dem_sensor) 

334 

335 self.dem_sensor = dem_sensor 

336 self.db_conn = db_conn if db_conn else CFG.conn_database 

337 self.logger = logger or logging.getLogger('DEM_Creator') 

338 

339 self.project_dir = os.path.abspath(os.path.curdir) 

340 self.rootpath_DEMs = '' 

341 self.imNames = [] 

342 self.dsID_dic = dict(ASTER=2, SRTM=225) 

343 self.DEM = None 

344 

345 def __getstate__(self): 

346 """Defines how the attributes of DEM_Creator are pickled.""" 

347 

348 if self.logger not in [None, 'not set']: 

349 close_logger(self.logger) 

350 self.logger = None 

351 return self.__dict__ 

352 

353 def __del__(self): 

354 close_logger(self.logger) 

355 self.logger = None 

356 

357 @staticmethod 

358 def _get_corner_coords_lonlat(cornerCoords_tgt, prj): 

359 # transform to Longitude/Latitude coordinates 

360 tgt_corner_coord_lonlat = [transform_any_prj(prj, 4326, X, Y) for X, Y in cornerCoords_tgt] 

361 

362 # handle coordinates crossing the 180 degrees meridian 

363 xvals = [x for x, y in tgt_corner_coord_lonlat] 

364 if max(xvals) - min(xvals) > 180: 

365 tgt_corner_coord_lonlat = [(x, y) if x > 0 else (x + 360, y) for x, y in tgt_corner_coord_lonlat] 

366 

367 return tgt_corner_coord_lonlat 

368 

369 def get_overlapping_DEM_tiles(self, tgt_corner_coord_lonlat, timeout_sec=30, use_index_mediator=True): 

370 # type: (List[Tuple], int, bool) -> list 

371 """Get the overlapping DEM tiles for the given extent. 

372 

373 :param tgt_corner_coord_lonlat: list of longitude/latitude target coordinates [[X,Y], [X,Y], ...]] 

374 :param timeout_sec: database query timeout (seconds) 

375 :param use_index_mediator: whether to use or not to use the SpatialIndexMediator (default: True) 

376 :return: list of matching DEM tile scene IDs 

377 """ 

378 if use_index_mediator: 

379 SpIM = SpatialIndexMediator(host=CFG.spatial_index_server_host, port=CFG.spatial_index_server_port, 

380 timeout=timeout_sec, retries=10) 

381 with DatabaseLock(allowed_slots=1, logger=self.logger): 

382 scenes = SpIM.getFullSceneDataForDataset(envelope=corner_coord_to_minmax(tgt_corner_coord_lonlat), 

383 timeStart=datetime(1970, 1, 1, 0, 0, 0), 

384 timeEnd=datetime(2100, 12, 31, 0, 0, 0), 

385 minCloudCover=0, maxCloudCover=100, 

386 datasetid=self.dsID_dic[self.dem_sensor]) 

387 sceneIDs_srtm = [scene.sceneid for scene in scenes] 

388 

389 else: 

390 sceneIDs_srtm = get_overlapping_scenes_from_postgreSQLdb(self.db_conn, 

391 table='scenes', 

392 tgt_corners_lonlat=tgt_corner_coord_lonlat, 

393 conditions=['datasetid=%s' 

394 % self.dsID_dic[self.dem_sensor]], 

395 timeout=timeout_sec*1000) # milliseconds 

396 sceneIDs_srtm = [i[0] for i in sceneIDs_srtm] 

397 

398 return sceneIDs_srtm 

399 

400 def _get_DEMPathes_to_include(self, tgt_corner_coord_lonlat, timeout_sec=30): 

401 # type: (List[Tuple], int) -> list 

402 """Return the paths of DEM files to merge in order to generate a DEM covering the given area of interest. 

403 

404 :param tgt_corner_coord_lonlat: list of longitude/latitude target coordinates [(X,Y), (X,Y), ...]] 

405 :param timeout_sec: database query timeout (seconds) 

406 :return: list of GDAL readable paths 

407 """ 

408 # get overlapping SRTM scene IDs from GMS database 

409 try: 

410 # try to use the SpatialIndexMediator 

411 # noinspection PyBroadException 

412 try: 

413 sceneIDs_srtm = self.get_overlapping_DEM_tiles(tgt_corner_coord_lonlat, timeout_sec) 

414 except ConnectionRefusedError: 

415 # fallback to plain pgSQL 

416 self.logger.warning('SpatialIndexMediator refused connection. Falling back to plain postgreSQL query.') 

417 sceneIDs_srtm = self.get_overlapping_DEM_tiles(tgt_corner_coord_lonlat, use_index_mediator=False) 

418 except Exception as err: 

419 # fallback to plain pgSQL 

420 self.logger.warning('Error while running SpatialIndexMediator database query. ' 

421 'Falling back to plain postgreSQL query. ' 

422 'Error message was: %s' % str(repr(err))) 

423 sceneIDs_srtm = self.get_overlapping_DEM_tiles(tgt_corner_coord_lonlat, use_index_mediator=False) 

424 

425 if not sceneIDs_srtm: 

426 # fallback to plain pgSQL 

427 self.logger.warning('SpatialIndexMediator did not return matching DEM tiles. ' 

428 'Trying plain postgreSQL query..') 

429 sceneIDs_srtm = self.get_overlapping_DEM_tiles(tgt_corner_coord_lonlat, use_index_mediator=False) 

430 

431 except TimeoutError: 

432 raise TimeoutError('Spatial database query for %s DEM generation timed out after %s seconds.' 

433 % (self.dem_sensor, timeout_sec)) 

434 

435 if not sceneIDs_srtm: 

436 raise RuntimeError('No matching %s scene IDs for DEM generation found.' % self.dem_sensor) 

437 

438 # get corresponding filenames on disk and generate GDALvsiPathes pointing to raster files within archives 

439 archivePaths = [path_generator(scene_ID=ID).get_local_archive_path_baseN() for ID in sceneIDs_srtm] 

440 self.rootpath_DEMs = os.path.dirname(archivePaths[0]) 

441 imNameMatchExp = '*.bil' if self.dem_sensor == 'SRTM' else '*dem.tif' 

442 self.imNames = [fnmatch.filter(HLP_F.get_zipfile_namelist(aP), imNameMatchExp)[0] for aP in archivePaths] 

443 gdalImPaths = [os.path.join(HLP_F.convert_absPathArchive_to_GDALvsiPath(aP), bN) 

444 for aP, bN in zip(archivePaths, self.imNames)] 

445 

446 return gdalImPaths 

447 

448 def _run_cmd(self, cmd): 

449 output, exitcode, err = HLP_F.subcall_with_output(cmd) 

450 if exitcode: 

451 self.logger.error('\nAn error occurred during DEM creation.') 

452 self.logger.warning("Print traceback in case you care:") 

453 self.logger.warning(err.decode('latin-1')) 

454 if output: 

455 return output.decode('UTF-8') 

456 

457 def from_extent(self, cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd): 

458 """Returns a GeoArray of a DEM according to the given target coordinates 

459 

460 :param cornerCoords_tgt: list of target coordinates [[X,Y], [X,Y], ...]] (at least 2 coordinates) 

461 :param prj: WKT string of the projection belonging cornerCoords_tgt 

462 :param tgt_xgsd: output X GSD 

463 :param tgt_ygsd: output Y GSD 

464 :return: DEM GeoArray 

465 """ 

466 

467 # generate at least 4 coordinates in case less coords have been given in order to avoid nodata triangles in DEM 

468 if len(cornerCoords_tgt) < 4 and isProjectedOrGeographic(prj) == 'projected': 

469 co_yx = [(y, x) for x, y in cornerCoords_tgt] 

470 cornerCoords_tgt = boxObj(boxMapYX=co_yx).boxMapXY 

471 

472 # handle coordinate infos 

473 tgt_corner_coord_lonlat = self._get_corner_coords_lonlat(cornerCoords_tgt, prj) 

474 

475 # get GDAL readable pathes 

476 pathes = self._get_DEMPathes_to_include(tgt_corner_coord_lonlat) 

477 

478 # Build GDAL VRT from pathes and create output DEM 

479 if not os.path.exists(CFG.path_tempdir): 

480 os.makedirs(CFG.path_tempdir) # directory where tempfile is created must exist (CentOS) 

481 with tempFile(dir=CFG.path_tempdir, prefix='GeoMultiSens_', suffix='_dem_merged.vrt') as tFm, \ 

482 tempFile(dir=CFG.path_tempdir, prefix='GeoMultiSens_', suffix='_dem_out.vrt') as tFo: 

483 

484 try: 

485 os.chdir(self.rootpath_DEMs) 

486 

487 # create a merged VRT of all input DEMs 

488 t_xmin, t_xmax, t_ymin, t_ymax = corner_coord_to_minmax(tgt_corner_coord_lonlat) 

489 self._run_cmd('gdalbuildvrt %s %s -te %s %s %s %s -vrtnodata 0' 

490 % (tFm.name, ' '.join(pathes), t_xmin, t_ymin, t_xmax, t_ymax)) 

491 

492 # run gdalwarp to match target grid and extent 

493 merged_prj = GeoArray(tFm.name).prj 

494 t_xmin, t_xmax, t_ymin, t_ymax = corner_coord_to_minmax(cornerCoords_tgt) 

495 self._run_cmd('gdalwarp -r average -of VRT -srcnodata 0 -dstnodata 0 ' 

496 '-tr %s %s -s_srs EPSG:%s -t_srs EPSG:%s -te %s %s %s %s %s %s' 

497 % (tgt_xgsd, tgt_ygsd, CRS(merged_prj).to_epsg(), CRS(prj).to_epsg(), 

498 t_xmin, t_ymin, t_xmax, t_ymax, tFm.name, tFo.name)) 

499 assert os.path.exists(tFo.name) 

500 

501 self.DEM = GeoArray(tFo.name).to_mem() 

502 

503 finally: 

504 os.chdir(self.project_dir) 

505 

506 return self.DEM