output_writer.py 23 KB
Newer Older
1
# -*- coding: utf-8 -*-
2
3
4

# gms_preprocessing, spatial and spectral homogenization of satellite remote sensing data
#
5
# Copyright (C) 2020  Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz-potsdam.de)
6
7
8
9
10
11
#
# This software was developed within the context of the GeoMultiSens project funded
# by the German Federal Ministry of Education and Research
# (project grant code: 01 IS 14 010 A-C).
#
# This program is free software: you can redistribute it and/or modify it under
12
# the terms of the GNU General Public License as published by the Free Software
13
14
# Foundation, either version 3 of the License, or (at your option) any later version.
# Please note the following exception: `gms_preprocessing` depends on tqdm, which
15
16
17
# is distributed under the Mozilla Public Licence (MPL) v2.0 except for the files
# "tqdm/_tqdm.py", "setup.py", "README.rst", "MANIFEST.in" and ".gitignore".
# Details can be found here: https://github.com/tqdm/tqdm/blob/master/LICENCE.
18
19
20
21
22
23
24
25
26
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program.  If not, see <http://www.gnu.org/licenses/>.

27
28
29
"""Output writer: Universal writer for all kinds of GeoMultiSens intermediate results."""

# from __future__ import (division, print_function, absolute_import)
Daniel Scheffler's avatar
Daniel Scheffler committed
30
# unicode literals cause writing errors
31
32
import collections
import datetime
33
import gzip
34
import os
Daniel Scheffler's avatar
Daniel Scheffler committed
35
from typing import TYPE_CHECKING
36
import pickle
37
38
39

import dill
import numpy as np
40
from osgeo import gdal_array
41
42
43
from spectral.io import envi
from spectral.io.envi import check_compatibility, check_new_filename, write_envi_header, _write_header_param

44
45
46
47
48
49
50
51
52
53
try:
    from osgeo import ogr
    from osgeo import osr
    from osgeo import gdal
    from osgeo import gdalnumeric
except ImportError:
    import ogr
    import osr
    import gdal
    import gdalnumeric
54
import builtins
55
from itertools import chain
56

57
from ..options.config import GMS_config as CFG
58
from ..misc import helper_functions as HLP_F
59
60
61
from ..misc.definition_dicts import \
    get_mask_classdefinition, get_mask_colormap, get_outFillZeroSaturated, dtype_lib_Python_IDL
from ..misc.logging import GMS_logger
Daniel Scheffler's avatar
Daniel Scheffler committed
62

63
if TYPE_CHECKING:
Daniel Scheffler's avatar
Daniel Scheffler committed
64
    from ..model.gms_object import GMS_object  # noqa F401  # flake8 issue
65

66
enviHdr_keyOrder = \
67
    ['ENVI', 'description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type',
68
69
70
71
72
73
74
75
76
     'interleave', 'data ignore value', 'sensor type', 'byte order', 'file compression', 'version_gms_preprocessing',
     'versionalias_gms_preprocessing', 'reflectance scale factor', 'class lookup', 'classes', 'class names', 'map info',
     'coordinate system string', 'CS_TYPE', 'CS_EPSG', 'CS_DATUM', 'CS_UTM_ZONE', 'corner coordinates lonlat',
     'image_type', 'Satellite', 'Sensor', 'Subsystem', 'SceneID', 'EntityID', 'arr_pos', 'arr_shape', 'Metafile',
     'gResolution', 'AcqDate', 'AcqTime', 'wavelength', 'bandwidths', 'band names', 'LayerBandsAssignment',
     'data gain values', 'data offset values', 'reflectance gain values', 'reflectance offset values', 'ThermalConstK1',
     'ThermalConstK2', 'ProcLCode', 'PhysUnit', 'ScaleFactor', 'wavelength units', 'SunElevation', 'SunAzimuth',
     'SolIrradiance', 'EarthSunDist', 'ViewingAngle', 'IncidenceAngle', 'FieldOfView', 'scene length',
     'overpass duraction sec', 'Quality', 'Additional']
77

78
79
80
81
82

def silent_envi_write_image(hdr_file, data, header, **kwargs):
    """
    Monkeypatch for spectral.io.envi._write_image in order to silence output stream.
    """
83
84
85
86
    from ..misc.helper_functions import safe_str
    # force unicode strings
    header = dict((k, v) if not isinstance(v, str) else (k, safe_str(v)) for k, v in header.items())

87
88
89
90
91
92
93
94
95
96
97
98
99
    check_compatibility(header)
    force = kwargs.get('force', False)
    img_ext = kwargs.get('ext', '.img')

    (hdr_file, img_file) = check_new_filename(hdr_file, img_ext, force)
    write_envi_header(hdr_file, header, is_library=False)

    # bufsize = data.shape[0] * data.shape[1] * np.dtype(dtype).itemsize
    bufsize = data.shape[0] * data.shape[1] * data.dtype.itemsize
    fout = builtins.open(img_file, 'wb', bufsize)
    fout.write(data.tostring())
    fout.close()

100

101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
def write_ordered_envi_header(fileName, header_dict, is_library=False):
    """
    Monkeypatch for spectral.io.envi.write_envi_header in order to write ordered output headers
    """
    fout = builtins.open(fileName, 'w')
    if isinstance(header_dict, collections.OrderedDict):
        d = header_dict
    else:
        d = {}
        d.update(header_dict)

    if is_library:
        d['file type'] = 'ENVI Spectral Library'
    elif 'file type' not in d:
        d['file type'] = 'ENVI Standard'
    fout.write('ENVI\n')
    # Write the standard parameters at the top of the file
    std_params = ['description', 'samples', 'lines', 'bands', 'header offset',
                  'file type', 'data type', 'interleave', 'sensor type',
                  'byte order', 'reflectance scale factor', 'map info']
    for k in std_params:
        if k in d.keys():
            _write_header_param(fout, k, d[k])
    for k in d.keys():
        if k not in std_params:
            _write_header_param(fout, k, d[k])
    fout.close()

129
130
131

# monkey patch header writer function by a version that respects item order in meta dict
envi.write_envi_header = write_ordered_envi_header
132
133


134
def write_ENVI_compressed(outPath_hdr, ndarray, meta, interleave='bsq'):
135
136
137
138
    assert interleave in ['bsq', 'bil', 'bip']
    if len(ndarray.shape) == 3:  # 3D
        if interleave == 'bsq':
            arr2write = np.rollaxis(ndarray, 2)  # => bands, rows, cols
139
        elif interleave == 'bil':
140
141
142
143
            arr2write = np.swapaxes(ndarray, 1, 2)  # => rows, bands, cols
        else:  # 'bip'
            arr2write = ndarray  # => rows, cols, bands
    else:  # 2D
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
        arr2write = ndarray

    # write array
    outpathBinary = os.path.join('%s.%s' % (os.path.splitext(outPath_hdr)[0], interleave))
    with gzip.open(outpathBinary, 'wb', compresslevel=1) as f:  # compresslevel can be increased until 9
        f.write(arr2write.tostring())

    # write header
    meta['file compression'] = 1
    write_ordered_envi_header(outPath_hdr, meta)

    # check if output is GDAL readable
    if gdalnumeric.LoadFile(outpathBinary, 0, 0, 1, 1) is None:
        return 0
    else:
        return 1


162
def HDR_writer(meta_dic, outpath_hdr, logger=None):
163
    if logger is not None:
164
165
166
        logger.info('Writing %s header ...' % os.path.basename(outpath_hdr))
    envi.write_envi_header(outpath_hdr, meta_dic)
    reorder_ENVI_header(outpath_hdr, enviHdr_keyOrder)
Daniel Scheffler's avatar
Daniel Scheffler committed
167

168

169
170
171
def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interleave, out_meta=None,
                 arr_pos=None, overwrite=True):
    """Write tiles to disk using numpy.memmap.
172

173
174
175
176
177
178
179
180
    :param tileList_or_Array:   <list of dicts> each dict has keys 'row_start','row_end','col_start','col_end','data'
                                <numpy array> representing a subset of a full array. requires arr_pos
    :param out_path:            <str> path to ENVI header file *.hdr
    :param out_shape:           <tuple or list> (rows,cols,bands)
    :param out_dtype:           <object> numpy data type object
    :param out_interleave:      <str> 'bsq','bil' or 'bip'
    :param out_meta:            <dict> metadata dictionary to be written to header
    :param arr_pos:             <tuple> ((row_start,row_end),(col_start,col_end))
181
182
    :param overwrite:           <bool>
    """
Daniel Scheffler's avatar
Daniel Scheffler committed
183

184
185
186
    assert isinstance(tileList_or_Array, (list, np.ndarray))
    if isinstance(tileList_or_Array, np.ndarray):
        assert arr_pos and isinstance(arr_pos, (list, tuple))
187
188

    oP_hdr = out_path
189
    oP_ext = os.path.splitext(oP_hdr)[0] + '.%s' % out_interleave
190
191
192
193
    rows, cols, bands = out_shape

    # write binary file
    if not os.path.exists(oP_ext):
194
        open(oP_ext, 'a').close()  # create empty binary file
195
    elif overwrite:
196
        open(oP_ext, 'w').close()  # overwrite binary file with empty one
197
198

    if out_interleave == 'bsq':
199
        memmap = np.memmap(oP_ext, dtype=out_dtype, mode='r+', offset=0, shape=(bands, rows, cols))
200
        memmap = np.swapaxes(np.swapaxes(memmap, 0, 2), 0, 1)  # rows,cols,bands
201
    elif out_interleave == 'bil':
202
        memmap = np.memmap(oP_ext, dtype=out_dtype, mode='r+', offset=0, shape=(rows, bands, cols))
203
204
        memmap = np.swapaxes(memmap, 1, 2)
    else:  # bip
205
206
        memmap = np.memmap(oP_ext, dtype=out_dtype, mode='r+', offset=0, shape=(rows, cols, bands))

207
    if isinstance(tileList_or_Array, list):
208
        tileList_or_Array = [dict(i) for i in tileList_or_Array]
209
        is_3D = True if len(tileList_or_Array[0]['data'].shape) == 3 else False
210
        for tile in tileList_or_Array:
211
212
            data = tile['data'] if is_3D else tile['data'][:, :, None]
            memmap[tile['row_start']:tile['row_end'] + 1, tile['col_start']:tile['col_end'] + 1, :] = data
213
    else:
214
215
216
217
        (rS, rE), (cS, cE) = arr_pos
        is_3D = True if len(tileList_or_Array.shape) == 3 else False
        data = tileList_or_Array if is_3D else tileList_or_Array[:, :, None]
        memmap[rS:rE + 1, cS:cE + 1, :] = data
218

219
    # write header
220
221
    std_meta = {'lines': rows, 'samples': cols, 'bands': bands, 'header offset': 0, 'byte order': 0,
                'interleave': out_interleave, 'data type': dtype_lib_Python_IDL[out_dtype]}
222
223
    out_meta = out_meta if out_meta else {}
    out_meta.update(std_meta)
224
    from ..model.metadata import metaDict_to_metaODict
225
226
227
    out_meta = metaDict_to_metaODict(out_meta)

    if not os.path.exists(oP_hdr) or overwrite:
228
        write_envi_header(oP_hdr, out_meta)
229
230


231
def reorder_ENVI_header(path_hdr, tgt_keyOrder):
232
    # type: (str,list) -> None
233
234
235
236
    """Reorders the keys of an ENVI header file according to the implemented order. Keys given in the target order list
    but missing the ENVI fileare skipped.
    This function is a workaround for envi.write_envi_header of Spectral Python Library that always writes the given
    metadata dictinary as an unordered dict.
237

238
239
240
    :param path_hdr:       <str> path of the target ENVI file
    :param tgt_keyOrder:    <list> list of target keys in the correct order
    """
241
242
243
244
    with open(path_hdr, 'r') as inF:
        items = inF.read().split('\n')
    HLP_F.silentremove(path_hdr)

Daniel Scheffler's avatar
Daniel Scheffler committed
245
    with open(path_hdr, 'w') as outFile:
246
        for paramName in tgt_keyOrder:
247
248
249
250
251
            for item in items:
                if item.startswith(paramName) or item.startswith(paramName.lower()):
                    outFile.write(item + '\n')
                    items.remove(item)
                    continue
252
253
254
        # write remaining header items
        [outFile.write(item + '\n') for item in items]

Daniel Scheffler's avatar
Daniel Scheffler committed
255

256
def mask_to_ENVI_Classification(InObj, maskname):
Daniel Scheffler's avatar
Daniel Scheffler committed
257
    # type: (GMS_object, str) -> (np.ndarray, dict, list, list)
258
    cd = get_mask_classdefinition(maskname, InObj.satellite)
259
260
261
    if cd is None:
        InObj.logger.warning("No class definition available for mask '%s'." % maskname)
    else:
262
263
264
265
266
267
268
        temp = np.empty_like(getattr(InObj, maskname))
        temp[:] = getattr(InObj, maskname)
        # deep copy: converts view to its own array in order to avoid overwriting of InObj.maskname
        classif_array = temp
        rows, cols = classif_array.shape[:2]
        bands = 1 if classif_array.ndim == 2 else classif_array.shape[2]

269
        mapI, CSS = InObj.MetaObj.map_info, InObj.MetaObj.projection
270
271
        mask_md = {'file type': 'ENVI Classification', 'map info': mapI, 'coordinate system string': CSS, 'lines': rows,
                   'samples': cols, 'bands': bands, 'header offset': 0, 'byte order': 0, 'interleave': 'bsq',
272
                   'data type': 1, 'data ignore value': get_outFillZeroSaturated(classif_array.dtype)[0]}
273

274
275
276
        pixelVals_in_mask = list(np.unique(classif_array))
        fillVal = get_outFillZeroSaturated(classif_array.dtype)[0]
        pixelVals_expected = sorted(list(cd.values()) + ([fillVal] if fillVal is not None else []))
277

278
        pixelVals_unexpected = [i for i in pixelVals_in_mask if i not in pixelVals_expected]
279
        if pixelVals_unexpected:
280
            InObj.logger.warning('The cloud mask contains unexpected pixel values: %s '
281
                                 % ', '.join(str(i) for i in pixelVals_unexpected))
282
        mask_md['classes'] = len(pixelVals_in_mask) + 1  # 1 class for no data pixels
283

284
285
286
        class_names = ['No data' if i == fillVal else
                       'Unknown Class' if i in pixelVals_unexpected else
                       list(cd.keys())[list(cd.values()).index(i)] for i in pixelVals_in_mask]
287

288
        clr_mp_allClasses = get_mask_colormap(maskname)
289
        class_colors = collections.OrderedDict(zip(class_names, [clr_mp_allClasses[cN] for cN in class_names]))
290

291
        # pixel values for object classes must be numbered like 0,1,2,3,4,...
292
        if fillVal is not None:
293
            classif_array[classif_array == fillVal] = 0
294
295
        if fillVal in pixelVals_in_mask:
            pixelVals_in_mask.remove(fillVal)
296
            remaining_pixelVals = range(1, len(pixelVals_in_mask) + 1)
297
298
        else:
            remaining_pixelVals = range(len(pixelVals_in_mask))
299
            del mask_md['data ignore value']
300
301
        for in_val, out_val in zip(pixelVals_in_mask, remaining_pixelVals):
            classif_array[classif_array == in_val] = out_val
302

303
304
        classif_array = classif_array.astype(np.uint8)  # contains only 0,1,2,3,4,...
        classif_meta = add_ENVIclassificationMeta_to_meta(mask_md, class_names, class_colors, classif_array)
305
        return classif_array, classif_meta
306

307

308
def add_ENVIclassificationMeta_to_meta(meta, class_names, class_colors=None, data=None):
309
    # type: (dict,list,dict,np.ndarray) -> dict
310
    """Prepare ENVI metadata dict. to be written as ENVI classification file by adding custom class names and colors.
311

312
313
314
315
    :param meta:            <dict> ENVI metadata dictionary
    :param class_names:     <list> of strings with the class names
    :param class_colors:    <dict> with keys representing class names and values representing 3-tuples with RGB codes
    :param data:            <numpy array> only used to estimate number of classes if class_names is None"""
316

317
    from spectral import spy_colors
318
319
320
321
322
323
324
325
326
327
328
    if class_names is None:
        assert data is not None, "'data' must be given if 'class_names' is None."
    meta['file type'] = "ENVI Classification"
    n_classes = len(class_names) if class_names else int(np.max(data) + 1)
    meta['classes'] = str(n_classes)
    meta['class names'] = class_names if class_names else \
        (['Unclassified'] + ['Class %s' % i for i in range(1, n_classes)])
    colors = list(chain.from_iterable([class_colors[class_name] for class_name in class_names])) \
        if class_colors else []
    meta['class lookup'] = colors if len(colors) == n_classes * 3 else \
        [list(spy_colors[i % len(spy_colors)]) for i in range(n_classes)]
329
330
    return meta

331
332

def check_header_not_empty(hdr):
333
    with open(hdr, 'r') as inF:
334
335
336
337
        content = inF.read()
    return True if content else False


338
339
340
341
342
343
344
345
346
347
def set_output_nodataVal(arr_path, nodataVal=None):
    """Sets the no data value of an already written file

    :param arr_path:
    :param nodataVal:
    :return:
    """
    ds = gdal.Open(arr_path)

    if nodataVal is None:
348
        dtype = gdal_array.GDALTypeCodeToNumericTypeCode(ds.GetRasterBand(1).DataType)
349
        nodataVal = get_outFillZeroSaturated(dtype)[0]
350
351

    for bandIdx in range(ds.RasterCount):
352
        band = ds.GetRasterBand(bandIdx + 1)
353
354
        band.SetNoDataValue(nodataVal)
        band.FlushCache()
355
356
        del band
    del ds
357
358


359
def add_attributes_to_ENVIhdr(attr2add_dict, hdr_path):
360
    Spyfileheader = envi.open(hdr_path)
361
    attr_dict = Spyfileheader.metadata
362
363
364
    attr_dict.update(attr2add_dict)
    HDR_writer(attr_dict, hdr_path)

365

366
367
def export_VZA_SZA_SAA_RAA_stats(L1A_object):
    outdict = collections.OrderedDict()
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
    for i in ['VZA', 'SZA', 'SAA', 'RAA']:
        if hasattr(L1A_object, i + '_arr'):
            arr = getattr(L1A_object, i + '_arr')
            outdict[i + '_mean'] = np.mean(arr[arr != -9999])
            outdict[i + '_std'] = np.std(arr[arr != -9999])
    with open(os.path.join(L1A_object.path_procdata, L1A_object.baseN + '_stats__VZA_SZA_SAA_RAA.dill'), 'wb') as outF:
        #        json.dump(outdict, outF,skipkeys=True,sort_keys=True,separators=(',', ': '),indent =4)
        dill.dump(outdict, outF)
    with open(os.path.join(L1A_object.path_procdata, L1A_object.baseN + '_stats__VZA_SZA_SAA_RAA.txt'), 'w') as outF:
        for k, v in outdict.items():
            outF.write(k + ' = ' + str(v) + '\n')


def write_global_benchmark_output(list__processing_time__all_runs, list__IO_time__all_runs, data_list):
    dict_sensorcodes2write = {}
383
    for i in data_list:
384
385
        sensorcode = '%s_%s_%s' % (i['satellite'], i['sensor'], i['subsystem']) if i['subsystem'] is not None \
            else '%s_%s' % (i['satellite'], i['sensor'])
386
        if sensorcode not in dict_sensorcodes2write.keys():
387
            dict_sensorcodes2write.update({sensorcode: 1})
388
        else:
389
            dict_sensorcodes2write[sensorcode] += 1
390
    str2write_sensors = ', '.join(['%s x %s' % (v, k) for k, v in dict_sensorcodes2write.items()])
391
    count_processed_data = len(data_list)
392
393
    str2write_totaltimes = '\t'.join([str(i) for i in list__processing_time__all_runs])
    str2write_IOtimes = '\t'.join([str(i) for i in list__IO_time__all_runs])
394
395
    list_mean_time_per_ds = [str(processing_time / count_processed_data) for processing_time in
                             list__processing_time__all_runs]
396
    str2write_times_per_ds = '\t'.join(list_mean_time_per_ds)
397
    str2write = '\t'.join([str(CFG.CPUs), str(count_processed_data), str2write_sensors, str2write_totaltimes,
398
                           str2write_IOtimes, str2write_times_per_ds, '\n'])
399
400
401
    if not os.path.isdir(CFG.path_benchmarks):
        os.makedirs(CFG.path_benchmarks)
    with open(os.path.join(CFG.path_benchmarks, CFG.ID), 'a') as outF:
402
403
        outF.write(str2write)

404

Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
405
def write_shp_OLD(shapely_poly, path_out, prj=None):
406
    assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out)
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
407

408
    print('Writing %s ...' % path_out)
409
    HLP_F.silentremove(path_out)
410
    ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out)
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
    if prj is not None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(prj)
        layer = ds.CreateLayer('', srs, ogr.wkbPolygon)
    else:
        layer = ds.CreateLayer('', None, ogr.wkbPolygon)
    layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger))  # Add one attribute
    defn = layer.GetLayerDefn()

    # Create a new feature (attribute and geometry)
    feat = ogr.Feature(defn)
    feat.SetField('id', 123)

    # Make a geometry, from Shapely object
    geom = ogr.CreateGeometryFromWkb(shapely_poly.wkb)
    feat.SetGeometry(geom)
    layer.CreateFeature(feat)

    # Save and close everything
430
    del ds, layer, feat, geom
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
431
432
433
434


def get_dtypeStr(val):
    is_numpy = 'numpy' in str(type(val))
435
436
437
438
439
440
441
    DType = \
        str(np.dtype(val)) if is_numpy else \
        'int' if isinstance(val, int) else \
        'float' if isinstance(val, float) else \
        'str' if isinstance(val, str) else \
        'complex' if isinstance(val, complex) else \
        'date' if isinstance(val, datetime.datetime) else None
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
442
443
444
    assert DType, 'data type not understood'
    return DType

445

446
447
448
def write_shp(path_out, shapely_geom, prj=None, attrDict=None):
    shapely_geom = [shapely_geom] if not isinstance(shapely_geom, list) else shapely_geom
    attrDict = [attrDict] if not isinstance(attrDict, list) else attrDict
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
449
450
451
    # print(len(shapely_geom))
    # print(len(attrDict))
    assert len(shapely_geom) == len(attrDict), "'shapely_geom' and 'attrDict' must have the same length."
452
    assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out)
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
453

454
455
456
    print('Writing %s ...' % path_out)
    if os.path.exists(path_out):
        os.remove(path_out)
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
457
458
459
460
461
462
463
464
465
    ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out)

    if prj is not None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(prj)
    else:
        srs = None

    geom_type = list(set([gm.type for gm in shapely_geom]))
466
    assert len(geom_type) == 1, 'All shapely geometries must belong to the same type. Got %s.' % geom_type
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
467

468
469
470
471
472
    layer = \
        ds.CreateLayer('', srs, ogr.wkbPoint) if geom_type[0] == 'Point' else \
        ds.CreateLayer('', srs, ogr.wkbLineString) if geom_type[0] == 'LineString' else \
        ds.CreateLayer('', srs, ogr.wkbPolygon) if geom_type[0] == 'Polygon' else \
        None  # FIXME
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
473

474
    if isinstance(attrDict[0], dict):
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
475
        for attr in attrDict[0].keys():
476
477
478
479
480
481
            assert len(attr) <= 10, "ogr does not support fieldnames longer than 10 digits. '%s' is too long" % attr
            DTypeStr = get_dtypeStr(attrDict[0][attr])
            FieldType = ogr.OFTInteger if DTypeStr.startswith('int') else ogr.OFTReal if DTypeStr.startswith(
                'float') else \
                ogr.OFTString if DTypeStr.startswith('str') else ogr.OFTDateTime if DTypeStr.startswith(
                    'date') else None
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
482
483
484
485
486
487
488
489
            FieldDefn = ogr.FieldDefn(attr, FieldType)
            if DTypeStr.startswith('float'):
                FieldDefn.SetPrecision(6)
            layer.CreateField(FieldDefn)  # Add one attribute

    for i in range(len(shapely_geom)):
        # Create a new feature (attribute and geometry)
        feat = ogr.Feature(layer.GetLayerDefn())
490
        feat.SetGeometry(ogr.CreateGeometryFromWkb(shapely_geom[i].wkb))  # Make a geometry, from Shapely object
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
491

492
        list_attr2set = attrDict[0].keys() if isinstance(attrDict[0], dict) else []
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
493
494

        for attr in list_attr2set:
495
496
            val = attrDict[i][attr]
            DTypeStr = get_dtypeStr(val)
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
497
            val = int(val) if DTypeStr.startswith('int') else float(val) if DTypeStr.startswith('float') else \
498
                str(val) if DTypeStr.startswith('str') else val
Daniel Scheffler's avatar
GEOP:    
Daniel Scheffler committed
499
500
501
502
503
504
            feat.SetField(attr, val)

        layer.CreateFeature(feat)
        feat.Destroy()

    # Save and close everything
505
    del ds, layer
506
507
508
509


def dump_all_SRFs(outpath_dump=os.path.abspath(os.path.join(os.path.dirname(__file__), '../sandbox/out/SRF_DB.pkl')),
                  outpath_log=os.path.abspath(os.path.join(os.path.dirname(__file__), '../sandbox/out/SRF_DB.log'))):
510
    from .input_reader import SRF_Reader
511
512
513
514
515
516
    out_dict = {}
    logger = GMS_logger('log__SRF_DB', path_logfile=outpath_log, append=True)
    for sensorcode, out_sensorcode in zip(
        ['AST_V1', 'AST_V2', 'AST_S', 'AST_T', 'TM5', 'TM7', 'LDCM', 'RE5', 'S1', 'S4', 'S5'],
        ['ASTER_VNIR1', 'ASTER_VNIR2', 'ASTER_SWIR', 'ASTER_TIR', 'LANDSAT_TM5', 'LANDSAT_TM7', 'LANDSAT_LDCM',
         'RapidEye_5', 'Spot_1', 'Spot_4', 'Spot_5']):
517
518

        out_dict[out_sensorcode] = SRF_Reader(sensorcode, logger)
519
520
521
522
523
524
525
526
527
528
529
530
531

    with open(outpath_dump, 'wb') as outFile:
        pickle.dump(out_dict, outFile)

    print('Saved SRF dictionary to %s' % outpath_dump)

    with open(outpath_dump, 'rb') as inFile:
        readFile = pickle.load(inFile)

    print(readFile == out_dict)

    for i in readFile.items():
        print(i)