Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2 

3# gms_preprocessing, spatial and spectral homogenization of satellite remote sensing data 

4# 

5# Copyright (C) 2020 Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz-potsdam.de) 

6# 

7# This software was developed within the context of the GeoMultiSens project funded 

8# by the German Federal Ministry of Education and Research 

9# (project grant code: 01 IS 14 010 A-C). 

10# 

11# This program is free software: you can redistribute it and/or modify it under 

12# the terms of the GNU General Public License as published by the Free Software 

13# Foundation, either version 3 of the License, or (at your option) any later version. 

14# Please note the following exception: `gms_preprocessing` depends on tqdm, which 

15# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files 

16# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore". 

17# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE. 

18# 

19# This program is distributed in the hope that it will be useful, but WITHOUT 

20# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

21# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

22# details. 

23# 

24# You should have received a copy of the GNU Lesser General Public License along 

25# with this program. If not, see <http://www.gnu.org/licenses/>. 

26 

27"""Output writer: Universal writer for all kinds of GeoMultiSens intermediate results.""" 

28 

29# from __future__ import (division, print_function, absolute_import) 

30# unicode literals cause writing errors 

31import collections 

32import datetime 

33import gzip 

34import os 

35from typing import TYPE_CHECKING 

36import pickle 

37 

38import dill 

39import numpy as np 

40from osgeo import gdal_array 

41from spectral.io import envi 

42from spectral.io.envi import check_compatibility, check_new_filename, write_envi_header, _write_header_param 

43 

44try: 

45 from osgeo import ogr 

46 from osgeo import osr 

47 from osgeo import gdal 

48 from osgeo import gdalnumeric 

49except ImportError: 

50 import ogr 

51 import osr 

52 import gdal 

53 import gdalnumeric 

54import builtins 

55from itertools import chain 

56 

57from ..options.config import GMS_config as CFG 

58from ..misc import helper_functions as HLP_F 

59from ..misc.definition_dicts import \ 

60 get_mask_classdefinition, get_mask_colormap, get_outFillZeroSaturated, dtype_lib_Python_IDL 

61from ..misc.logging import GMS_logger 

62 

63if TYPE_CHECKING: 

64 from ..model.gms_object import GMS_object # noqa F401 # flake8 issue 

65 

66enviHdr_keyOrder = \ 

67 ['ENVI', 'description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type', 

68 'interleave', 'data ignore value', 'sensor type', 'byte order', 'file compression', 'version_gms_preprocessing', 

69 'versionalias_gms_preprocessing', 'reflectance scale factor', 'class lookup', 'classes', 'class names', 'map info', 

70 'coordinate system string', 'CS_TYPE', 'CS_EPSG', 'CS_DATUM', 'CS_UTM_ZONE', 'corner coordinates lonlat', 

71 'image_type', 'Satellite', 'Sensor', 'Subsystem', 'SceneID', 'EntityID', 'arr_pos', 'arr_shape', 'Metafile', 

72 'gResolution', 'AcqDate', 'AcqTime', 'wavelength', 'bandwidths', 'band names', 'LayerBandsAssignment', 

73 'data gain values', 'data offset values', 'reflectance gain values', 'reflectance offset values', 'ThermalConstK1', 

74 'ThermalConstK2', 'ProcLCode', 'PhysUnit', 'ScaleFactor', 'wavelength units', 'SunElevation', 'SunAzimuth', 

75 'SolIrradiance', 'EarthSunDist', 'ViewingAngle', 'IncidenceAngle', 'FieldOfView', 'scene length', 

76 'overpass duraction sec', 'Quality', 'Additional'] 

77 

78 

79def silent_envi_write_image(hdr_file, data, header, **kwargs): 

80 """ 

81 Monkeypatch for spectral.io.envi._write_image in order to silence output stream. 

82 """ 

83 from ..misc.helper_functions import safe_str 

84 # force unicode strings 

85 header = dict((k, v) if not isinstance(v, str) else (k, safe_str(v)) for k, v in header.items()) 

86 

87 check_compatibility(header) 

88 force = kwargs.get('force', False) 

89 img_ext = kwargs.get('ext', '.img') 

90 

91 (hdr_file, img_file) = check_new_filename(hdr_file, img_ext, force) 

92 write_envi_header(hdr_file, header, is_library=False) 

93 

94 # bufsize = data.shape[0] * data.shape[1] * np.dtype(dtype).itemsize 

95 bufsize = data.shape[0] * data.shape[1] * data.dtype.itemsize 

96 fout = builtins.open(img_file, 'wb', bufsize) 

97 fout.write(data.tostring()) 

98 fout.close() 

99 

100 

101def write_ordered_envi_header(fileName, header_dict, is_library=False): 

102 """ 

103 Monkeypatch for spectral.io.envi.write_envi_header in order to write ordered output headers 

104 """ 

105 fout = builtins.open(fileName, 'w') 

106 if isinstance(header_dict, collections.OrderedDict): 

107 d = header_dict 

108 else: 

109 d = {} 

110 d.update(header_dict) 

111 

112 if is_library: 

113 d['file type'] = 'ENVI Spectral Library' 

114 elif 'file type' not in d: 

115 d['file type'] = 'ENVI Standard' 

116 fout.write('ENVI\n') 

117 # Write the standard parameters at the top of the file 

118 std_params = ['description', 'samples', 'lines', 'bands', 'header offset', 

119 'file type', 'data type', 'interleave', 'sensor type', 

120 'byte order', 'reflectance scale factor', 'map info'] 

121 for k in std_params: 

122 if k in d.keys(): 

123 _write_header_param(fout, k, d[k]) 

124 for k in d.keys(): 

125 if k not in std_params: 

126 _write_header_param(fout, k, d[k]) 

127 fout.close() 

128 

129 

130# monkey patch header writer function by a version that respects item order in meta dict 

131envi.write_envi_header = write_ordered_envi_header 

132 

133 

134def write_ENVI_compressed(outPath_hdr, ndarray, meta, interleave='bsq'): 

135 assert interleave in ['bsq', 'bil', 'bip'] 

136 if len(ndarray.shape) == 3: # 3D 

137 if interleave == 'bsq': 

138 arr2write = np.rollaxis(ndarray, 2) # => bands, rows, cols 

139 elif interleave == 'bil': 

140 arr2write = np.swapaxes(ndarray, 1, 2) # => rows, bands, cols 

141 else: # 'bip' 

142 arr2write = ndarray # => rows, cols, bands 

143 else: # 2D 

144 arr2write = ndarray 

145 

146 # write array 

147 outpathBinary = os.path.join('%s.%s' % (os.path.splitext(outPath_hdr)[0], interleave)) 

148 with gzip.open(outpathBinary, 'wb', compresslevel=1) as f: # compresslevel can be increased until 9 

149 f.write(arr2write.tostring()) 

150 

151 # write header 

152 meta['file compression'] = 1 

153 write_ordered_envi_header(outPath_hdr, meta) 

154 

155 # check if output is GDAL readable 

156 if gdalnumeric.LoadFile(outpathBinary, 0, 0, 1, 1) is None: 

157 return 0 

158 else: 

159 return 1 

160 

161 

162def HDR_writer(meta_dic, outpath_hdr, logger=None): 

163 if logger is not None: 

164 logger.info('Writing %s header ...' % os.path.basename(outpath_hdr)) 

165 envi.write_envi_header(outpath_hdr, meta_dic) 

166 reorder_ENVI_header(outpath_hdr, enviHdr_keyOrder) 

167 

168 

169def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interleave, out_meta=None, 

170 arr_pos=None, overwrite=True): 

171 """Write tiles to disk using numpy.memmap. 

172 

173 :param tileList_or_Array: <list of dicts> each dict has keys 'row_start','row_end','col_start','col_end','data' 

174 <numpy array> representing a subset of a full array. requires arr_pos 

175 :param out_path: <str> path to ENVI header file *.hdr 

176 :param out_shape: <tuple or list> (rows,cols,bands) 

177 :param out_dtype: <object> numpy data type object 

178 :param out_interleave: <str> 'bsq','bil' or 'bip' 

179 :param out_meta: <dict> metadata dictionary to be written to header 

180 :param arr_pos: <tuple> ((row_start,row_end),(col_start,col_end)) 

181 :param overwrite: <bool> 

182 """ 

183 

184 assert isinstance(tileList_or_Array, (list, np.ndarray)) 

185 if isinstance(tileList_or_Array, np.ndarray): 

186 assert arr_pos and isinstance(arr_pos, (list, tuple)) 

187 

188 oP_hdr = out_path 

189 oP_ext = os.path.splitext(oP_hdr)[0] + '.%s' % out_interleave 

190 rows, cols, bands = out_shape 

191 

192 # write binary file 

193 if not os.path.exists(oP_ext): 

194 open(oP_ext, 'a').close() # create empty binary file 

195 elif overwrite: 

196 open(oP_ext, 'w').close() # overwrite binary file with empty one 

197 

198 if out_interleave == 'bsq': 

199 memmap = np.memmap(oP_ext, dtype=out_dtype, mode='r+', offset=0, shape=(bands, rows, cols)) 

200 memmap = np.swapaxes(np.swapaxes(memmap, 0, 2), 0, 1) # rows,cols,bands 

201 elif out_interleave == 'bil': 

202 memmap = np.memmap(oP_ext, dtype=out_dtype, mode='r+', offset=0, shape=(rows, bands, cols)) 

203 memmap = np.swapaxes(memmap, 1, 2) 

204 else: # bip 

205 memmap = np.memmap(oP_ext, dtype=out_dtype, mode='r+', offset=0, shape=(rows, cols, bands)) 

206 

207 if isinstance(tileList_or_Array, list): 

208 tileList_or_Array = [dict(i) for i in tileList_or_Array] 

209 is_3D = True if len(tileList_or_Array[0]['data'].shape) == 3 else False 

210 for tile in tileList_or_Array: 

211 data = tile['data'] if is_3D else tile['data'][:, :, None] 

212 memmap[tile['row_start']:tile['row_end'] + 1, tile['col_start']:tile['col_end'] + 1, :] = data 

213 else: 

214 (rS, rE), (cS, cE) = arr_pos 

215 is_3D = True if len(tileList_or_Array.shape) == 3 else False 

216 data = tileList_or_Array if is_3D else tileList_or_Array[:, :, None] 

217 memmap[rS:rE + 1, cS:cE + 1, :] = data 

218 

219 # write header 

220 std_meta = {'lines': rows, 'samples': cols, 'bands': bands, 'header offset': 0, 'byte order': 0, 

221 'interleave': out_interleave, 'data type': dtype_lib_Python_IDL[out_dtype]} 

222 out_meta = out_meta if out_meta else {} 

223 out_meta.update(std_meta) 

224 from ..model.metadata import metaDict_to_metaODict 

225 out_meta = metaDict_to_metaODict(out_meta) 

226 

227 if not os.path.exists(oP_hdr) or overwrite: 

228 write_envi_header(oP_hdr, out_meta) 

229 

230 

231def reorder_ENVI_header(path_hdr, tgt_keyOrder): 

232 # type: (str,list) -> None 

233 """Reorders the keys of an ENVI header file according to the implemented order. Keys given in the target order list 

234 but missing the ENVI fileare skipped. 

235 This function is a workaround for envi.write_envi_header of Spectral Python Library that always writes the given 

236 metadata dictinary as an unordered dict. 

237 

238 :param path_hdr: <str> path of the target ENVI file 

239 :param tgt_keyOrder: <list> list of target keys in the correct order 

240 """ 

241 with open(path_hdr, 'r') as inF: 

242 items = inF.read().split('\n') 

243 HLP_F.silentremove(path_hdr) 

244 

245 with open(path_hdr, 'w') as outFile: 

246 for paramName in tgt_keyOrder: 

247 for item in items: 

248 if item.startswith(paramName) or item.startswith(paramName.lower()): 

249 outFile.write(item + '\n') 

250 items.remove(item) 

251 continue 

252 # write remaining header items 

253 [outFile.write(item + '\n') for item in items] 

254 

255 

256def mask_to_ENVI_Classification(InObj, maskname): 

257 # type: (GMS_object, str) -> (np.ndarray, dict, list, list) 

258 cd = get_mask_classdefinition(maskname, InObj.satellite) 

259 if cd is None: 

260 InObj.logger.warning("No class definition available for mask '%s'." % maskname) 

261 else: 

262 temp = np.empty_like(getattr(InObj, maskname)) 

263 temp[:] = getattr(InObj, maskname) 

264 # deep copy: converts view to its own array in order to avoid overwriting of InObj.maskname 

265 classif_array = temp 

266 rows, cols = classif_array.shape[:2] 

267 bands = 1 if classif_array.ndim == 2 else classif_array.shape[2] 

268 

269 mapI, CSS = InObj.MetaObj.map_info, InObj.MetaObj.projection 

270 mask_md = {'file type': 'ENVI Classification', 'map info': mapI, 'coordinate system string': CSS, 'lines': rows, 

271 'samples': cols, 'bands': bands, 'header offset': 0, 'byte order': 0, 'interleave': 'bsq', 

272 'data type': 1, 'data ignore value': get_outFillZeroSaturated(classif_array.dtype)[0]} 

273 

274 pixelVals_in_mask = list(np.unique(classif_array)) 

275 fillVal = get_outFillZeroSaturated(classif_array.dtype)[0] 

276 pixelVals_expected = sorted(list(cd.values()) + ([fillVal] if fillVal is not None else [])) 

277 

278 pixelVals_unexpected = [i for i in pixelVals_in_mask if i not in pixelVals_expected] 

279 if pixelVals_unexpected: 

280 InObj.logger.warning('The cloud mask contains unexpected pixel values: %s ' 

281 % ', '.join(str(i) for i in pixelVals_unexpected)) 

282 mask_md['classes'] = len(pixelVals_in_mask) + 1 # 1 class for no data pixels 

283 

284 class_names = ['No data' if i == fillVal else 

285 'Unknown Class' if i in pixelVals_unexpected else 

286 list(cd.keys())[list(cd.values()).index(i)] for i in pixelVals_in_mask] 

287 

288 clr_mp_allClasses = get_mask_colormap(maskname) 

289 class_colors = collections.OrderedDict(zip(class_names, [clr_mp_allClasses[cN] for cN in class_names])) 

290 

291 # pixel values for object classes must be numbered like 0,1,2,3,4,... 

292 if fillVal is not None: 

293 classif_array[classif_array == fillVal] = 0 

294 if fillVal in pixelVals_in_mask: 

295 pixelVals_in_mask.remove(fillVal) 

296 remaining_pixelVals = range(1, len(pixelVals_in_mask) + 1) 

297 else: 

298 remaining_pixelVals = range(len(pixelVals_in_mask)) 

299 del mask_md['data ignore value'] 

300 for in_val, out_val in zip(pixelVals_in_mask, remaining_pixelVals): 

301 classif_array[classif_array == in_val] = out_val 

302 

303 classif_array = classif_array.astype(np.uint8) # contains only 0,1,2,3,4,... 

304 classif_meta = add_ENVIclassificationMeta_to_meta(mask_md, class_names, class_colors, classif_array) 

305 return classif_array, classif_meta 

306 

307 

308def add_ENVIclassificationMeta_to_meta(meta, class_names, class_colors=None, data=None): 

309 # type: (dict,list,dict,np.ndarray) -> dict 

310 """Prepare ENVI metadata dict. to be written as ENVI classification file by adding custom class names and colors. 

311 

312 :param meta: <dict> ENVI metadata dictionary 

313 :param class_names: <list> of strings with the class names 

314 :param class_colors: <dict> with keys representing class names and values representing 3-tuples with RGB codes 

315 :param data: <numpy array> only used to estimate number of classes if class_names is None""" 

316 

317 from spectral import spy_colors 

318 if class_names is None: 

319 assert data is not None, "'data' must be given if 'class_names' is None." 

320 meta['file type'] = "ENVI Classification" 

321 n_classes = len(class_names) if class_names else int(np.max(data) + 1) 

322 meta['classes'] = str(n_classes) 

323 meta['class names'] = class_names if class_names else \ 

324 (['Unclassified'] + ['Class %s' % i for i in range(1, n_classes)]) 

325 colors = list(chain.from_iterable([class_colors[class_name] for class_name in class_names])) \ 

326 if class_colors else [] 

327 meta['class lookup'] = colors if len(colors) == n_classes * 3 else \ 

328 [list(spy_colors[i % len(spy_colors)]) for i in range(n_classes)] 

329 return meta 

330 

331 

332def check_header_not_empty(hdr): 

333 with open(hdr, 'r') as inF: 

334 content = inF.read() 

335 return True if content else False 

336 

337 

338def set_output_nodataVal(arr_path, nodataVal=None): 

339 """Sets the no data value of an already written file 

340 

341 :param arr_path: 

342 :param nodataVal: 

343 :return: 

344 """ 

345 ds = gdal.Open(arr_path) 

346 

347 if nodataVal is None: 

348 dtype = gdal_array.GDALTypeCodeToNumericTypeCode(ds.GetRasterBand(1).DataType) 

349 nodataVal = get_outFillZeroSaturated(dtype)[0] 

350 

351 for bandIdx in range(ds.RasterCount): 

352 band = ds.GetRasterBand(bandIdx + 1) 

353 band.SetNoDataValue(nodataVal) 

354 band.FlushCache() 

355 del band 

356 del ds 

357 

358 

359def add_attributes_to_ENVIhdr(attr2add_dict, hdr_path): 

360 Spyfileheader = envi.open(hdr_path) 

361 attr_dict = Spyfileheader.metadata 

362 attr_dict.update(attr2add_dict) 

363 HDR_writer(attr_dict, hdr_path) 

364 

365 

366def export_VZA_SZA_SAA_RAA_stats(L1A_object): 

367 outdict = collections.OrderedDict() 

368 for i in ['VZA', 'SZA', 'SAA', 'RAA']: 

369 if hasattr(L1A_object, i + '_arr'): 

370 arr = getattr(L1A_object, i + '_arr') 

371 outdict[i + '_mean'] = np.mean(arr[arr != -9999]) 

372 outdict[i + '_std'] = np.std(arr[arr != -9999]) 

373 with open(os.path.join(L1A_object.path_procdata, L1A_object.baseN + '_stats__VZA_SZA_SAA_RAA.dill'), 'wb') as outF: 

374 # json.dump(outdict, outF,skipkeys=True,sort_keys=True,separators=(',', ': '),indent =4) 

375 dill.dump(outdict, outF) 

376 with open(os.path.join(L1A_object.path_procdata, L1A_object.baseN + '_stats__VZA_SZA_SAA_RAA.txt'), 'w') as outF: 

377 for k, v in outdict.items(): 

378 outF.write(k + ' = ' + str(v) + '\n') 

379 

380 

381def write_global_benchmark_output(list__processing_time__all_runs, list__IO_time__all_runs, data_list): 

382 dict_sensorcodes2write = {} 

383 for i in data_list: 

384 sensorcode = '%s_%s_%s' % (i['satellite'], i['sensor'], i['subsystem']) if i['subsystem'] is not None \ 

385 else '%s_%s' % (i['satellite'], i['sensor']) 

386 if sensorcode not in dict_sensorcodes2write.keys(): 

387 dict_sensorcodes2write.update({sensorcode: 1}) 

388 else: 

389 dict_sensorcodes2write[sensorcode] += 1 

390 str2write_sensors = ', '.join(['%s x %s' % (v, k) for k, v in dict_sensorcodes2write.items()]) 

391 count_processed_data = len(data_list) 

392 str2write_totaltimes = '\t'.join([str(i) for i in list__processing_time__all_runs]) 

393 str2write_IOtimes = '\t'.join([str(i) for i in list__IO_time__all_runs]) 

394 list_mean_time_per_ds = [str(processing_time / count_processed_data) for processing_time in 

395 list__processing_time__all_runs] 

396 str2write_times_per_ds = '\t'.join(list_mean_time_per_ds) 

397 str2write = '\t'.join([str(CFG.CPUs), str(count_processed_data), str2write_sensors, str2write_totaltimes, 

398 str2write_IOtimes, str2write_times_per_ds, '\n']) 

399 if not os.path.isdir(CFG.path_benchmarks): 

400 os.makedirs(CFG.path_benchmarks) 

401 with open(os.path.join(CFG.path_benchmarks, CFG.ID), 'a') as outF: 

402 outF.write(str2write) 

403 

404 

405def write_shp_OLD(shapely_poly, path_out, prj=None): 

406 assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out) 

407 

408 print('Writing %s ...' % path_out) 

409 HLP_F.silentremove(path_out) 

410 ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out) 

411 if prj is not None: 

412 srs = osr.SpatialReference() 

413 srs.ImportFromWkt(prj) 

414 layer = ds.CreateLayer('', srs, ogr.wkbPolygon) 

415 else: 

416 layer = ds.CreateLayer('', None, ogr.wkbPolygon) 

417 layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) # Add one attribute 

418 defn = layer.GetLayerDefn() 

419 

420 # Create a new feature (attribute and geometry) 

421 feat = ogr.Feature(defn) 

422 feat.SetField('id', 123) 

423 

424 # Make a geometry, from Shapely object 

425 geom = ogr.CreateGeometryFromWkb(shapely_poly.wkb) 

426 feat.SetGeometry(geom) 

427 layer.CreateFeature(feat) 

428 

429 # Save and close everything 

430 del ds, layer, feat, geom 

431 

432 

433def get_dtypeStr(val): 

434 is_numpy = 'numpy' in str(type(val)) 

435 DType = \ 

436 str(np.dtype(val)) if is_numpy else \ 

437 'int' if isinstance(val, int) else \ 

438 'float' if isinstance(val, float) else \ 

439 'str' if isinstance(val, str) else \ 

440 'complex' if isinstance(val, complex) else \ 

441 'date' if isinstance(val, datetime.datetime) else None 

442 assert DType, 'data type not understood' 

443 return DType 

444 

445 

446def write_shp(path_out, shapely_geom, prj=None, attrDict=None): 

447 shapely_geom = [shapely_geom] if not isinstance(shapely_geom, list) else shapely_geom 

448 attrDict = [attrDict] if not isinstance(attrDict, list) else attrDict 

449 # print(len(shapely_geom)) 

450 # print(len(attrDict)) 

451 assert len(shapely_geom) == len(attrDict), "'shapely_geom' and 'attrDict' must have the same length." 

452 assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out) 

453 

454 print('Writing %s ...' % path_out) 

455 if os.path.exists(path_out): 

456 os.remove(path_out) 

457 ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out) 

458 

459 if prj is not None: 

460 srs = osr.SpatialReference() 

461 srs.ImportFromWkt(prj) 

462 else: 

463 srs = None 

464 

465 geom_type = list(set([gm.type for gm in shapely_geom])) 

466 assert len(geom_type) == 1, 'All shapely geometries must belong to the same type. Got %s.' % geom_type 

467 

468 layer = \ 

469 ds.CreateLayer('', srs, ogr.wkbPoint) if geom_type[0] == 'Point' else \ 

470 ds.CreateLayer('', srs, ogr.wkbLineString) if geom_type[0] == 'LineString' else \ 

471 ds.CreateLayer('', srs, ogr.wkbPolygon) if geom_type[0] == 'Polygon' else \ 

472 None # FIXME 

473 

474 if isinstance(attrDict[0], dict): 

475 for attr in attrDict[0].keys(): 

476 assert len(attr) <= 10, "ogr does not support fieldnames longer than 10 digits. '%s' is too long" % attr 

477 DTypeStr = get_dtypeStr(attrDict[0][attr]) 

478 FieldType = ogr.OFTInteger if DTypeStr.startswith('int') else ogr.OFTReal if DTypeStr.startswith( 

479 'float') else \ 

480 ogr.OFTString if DTypeStr.startswith('str') else ogr.OFTDateTime if DTypeStr.startswith( 

481 'date') else None 

482 FieldDefn = ogr.FieldDefn(attr, FieldType) 

483 if DTypeStr.startswith('float'): 

484 FieldDefn.SetPrecision(6) 

485 layer.CreateField(FieldDefn) # Add one attribute 

486 

487 for i in range(len(shapely_geom)): 

488 # Create a new feature (attribute and geometry) 

489 feat = ogr.Feature(layer.GetLayerDefn()) 

490 feat.SetGeometry(ogr.CreateGeometryFromWkb(shapely_geom[i].wkb)) # Make a geometry, from Shapely object 

491 

492 list_attr2set = attrDict[0].keys() if isinstance(attrDict[0], dict) else [] 

493 

494 for attr in list_attr2set: 

495 val = attrDict[i][attr] 

496 DTypeStr = get_dtypeStr(val) 

497 val = int(val) if DTypeStr.startswith('int') else float(val) if DTypeStr.startswith('float') else \ 

498 str(val) if DTypeStr.startswith('str') else val 

499 feat.SetField(attr, val) 

500 

501 layer.CreateFeature(feat) 

502 feat.Destroy() 

503 

504 # Save and close everything 

505 del ds, layer 

506 

507 

508def dump_all_SRFs(outpath_dump=os.path.abspath(os.path.join(os.path.dirname(__file__), '../sandbox/out/SRF_DB.pkl')), 

509 outpath_log=os.path.abspath(os.path.join(os.path.dirname(__file__), '../sandbox/out/SRF_DB.log'))): 

510 from .input_reader import SRF_Reader 

511 out_dict = {} 

512 logger = GMS_logger('log__SRF_DB', path_logfile=outpath_log, append=True) 

513 for sensorcode, out_sensorcode in zip( 

514 ['AST_V1', 'AST_V2', 'AST_S', 'AST_T', 'TM5', 'TM7', 'LDCM', 'RE5', 'S1', 'S4', 'S5'], 

515 ['ASTER_VNIR1', 'ASTER_VNIR2', 'ASTER_SWIR', 'ASTER_TIR', 'LANDSAT_TM5', 'LANDSAT_TM7', 'LANDSAT_LDCM', 

516 'RapidEye_5', 'Spot_1', 'Spot_4', 'Spot_5']): 

517 

518 out_dict[out_sensorcode] = SRF_Reader(sensorcode, logger) 

519 

520 with open(outpath_dump, 'wb') as outFile: 

521 pickle.dump(out_dict, outFile) 

522 

523 print('Saved SRF dictionary to %s' % outpath_dump) 

524 

525 with open(outpath_dump, 'rb') as inFile: 

526 readFile = pickle.load(inFile) 

527 

528 print(readFile == out_dict) 

529 

530 for i in readFile.items(): 

531 print(i)