Commit 4de2c5c8 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'bugfix/adapt_to_newer_env' into 'master'

Bugfix/adapt to newer env

See merge request !10
parents d48b5e78 2522b6c2
Pipeline #12658 passed with stages
in 13 minutes and 34 seconds
......@@ -32,6 +32,10 @@ test_gms_preprocessing:
# - cd ../../
# make tests
- pip install -U sicor
- python -c 'from sicor.tables import get_tables; get_tables(sensor="s2"); get_tables(sensor="l8"); get_tables(sensor="enmap", optional_downloads=("ch4",))'
- pip install 'scipy==1.1.0' # TODO remove as soon as pinning is included in SICOR requirements or SICOR issue #63 is fixed
- pip install "geoarray>=0.8.33" # TODO remove as soon as docker container is rebuilt
# run tests
- make nosetests
......
......@@ -2,6 +2,26 @@
History
=======
0.18.4 (coming soon)
--------------------
* Replaced calls of sorted_nicely with new dependency 'natsort'. Fixed circular imports.
* Fixed AssertionError caused by new version of pyproj.
* Fixed UnicodeEncodeError while writing ENVI headers.
* Moved scipy imports from module level to function level to avoid 'static TLS' ImportError.
* Fixed gdal_warp call within DEM_Creator class.
* Fixed issues while overwriting projections.
* Fixed logging issue.
* Pinned Python version to 3.7+.
* Fixed DeprecationsWarnings.
* Replaced GMS_object._numba_array_merger() with GMS_object._merge_arrays which does not use numba (and is much faster).
* Removed numba dependency.
* Recreated test dataset for Test_Landsat8_PreCollectionData.
* Replaced two os.system calls with subcall_with_out.
* Created a new job for Test_Landsat8_PreCollectionData.
* Updated minimal version of geoarray.
0.18.3 (2020-04-06)
-------------------
......
......@@ -209,7 +209,7 @@ The development of the gms_preprocessing package was funded by the German Federa
The package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
Landsat-5/7/8 satellite data and SRTM/ASTER digital elevation models have been have been provided by the US Geological
Landsat-5/7/8 satellite data and SRTM/ASTER digital elevation models have been provided by the US Geological
Survey. Sentinel-2 data have been provided by ESA.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
......
......@@ -32,7 +32,7 @@ import os
import matplotlib
matplotlib.use('Agg', warn=False) # switch matplotlib backend to 'Agg' and disable warning in case its already 'Agg'
matplotlib.use('Agg') # switch matplotlib backend to 'Agg' and disable warning in case its already 'Agg'
from gms_preprocessing import ProcessController, __version__ # noqa: E402
from gms_preprocessing.misc.database_tools import GMS_JOB # noqa: E402
......
......@@ -32,6 +32,7 @@ import warnings
import gdal
import gdalnumeric
import numpy as np
from natsort import natsorted
try:
from pyhdf import SD
......@@ -46,8 +47,6 @@ from py_tools_ds.geo.projection import EPSG2WKT, isProjectedOrGeographic
from ..options.config import GMS_config as CFG
from . import geoprocessing as GEOP
from ..io import output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene
from ..misc.locks import IOLock
from ..model.gms_object import GMS_object
......@@ -115,6 +114,7 @@ class L1A_object(GMS_object):
self.shape_fullArr = self.arr.shape
def archive_to_rasObj(self, path_archive, path_output=None, subset=None):
from ..misc.helper_functions import convert_absPathArchive_to_GDALvsiPath
assert subset is None or isinstance(subset, list) and len(subset) == 2, \
"Subset argument has be a list with 2 elements."
......@@ -124,7 +124,7 @@ class L1A_object(GMS_object):
"subsetting." % subset[0]
self.logger.info('Reading %s %s %s image data...' % (self.satellite, self.sensor, self.subsystem))
gdal_path_archive = HLP_F.convert_absPathArchive_to_GDALvsiPath(path_archive)
gdal_path_archive = convert_absPathArchive_to_GDALvsiPath(path_archive)
project_dir = os.path.abspath(os.curdir)
os.chdir(os.path.dirname(path_archive))
files_in_archive = gdal.ReadDirRecursive(gdal_path_archive) # needs ~12sek for Landsat-8
......@@ -142,7 +142,7 @@ class L1A_object(GMS_object):
re.search(r'Sentinel-2', self.satellite)
n_files2search = len(full_LayerBandsAssignment) if is_ALOS_Landsat_S2 else 1
for File in HLP_F.sorted_nicely(files_in_archive):
for File in natsorted(files_in_archive):
search_res = \
re.search(r"IMG-0[0-9]-[\s\S]*", File) if re.search(r'ALOS', self.satellite) else \
re.search(r"[\S]*_B[1-9][0-9]?[\S]*.TIF", File) if re.search(r'Landsat', self.satellite) else \
......@@ -182,7 +182,7 @@ class L1A_object(GMS_object):
# NOTE: image_files is a SORTED list of image filenames; self.LayerBandsAssignment may be sorted by CWL
filtered_files = []
for bN in self.LayerBandsAssignment: # unsorted, e.g., ['1', '2', '3', '4', '5', '9', '6', '7']
for fN, b in zip(image_files, HLP_F.sorted_nicely(full_LayerBandsAssignment)): # both sorted nicely
for fN, b in zip(image_files, natsorted(full_LayerBandsAssignment)): # both sorted nicely
if b == bN:
filtered_files.append(fN)
......@@ -492,6 +492,8 @@ class L1A_object(GMS_object):
Method is skipped if self.GeoAlign_ok and self.GeoTransProj_ok evaluate to 'True'. All attributes connected
with the georeference of self.arr are automatically updated."""
from ..io.output_writer import add_attributes_to_ENVIhdr
if False in [self.GeoAlign_ok, self.GeoTransProj_ok]:
previous_dataname = self.MetaObj.Dataname
if hasattr(self, 'arr') and isinstance(self.arr, (GeoArray, np.ndarray)) and \
......@@ -516,7 +518,7 @@ class L1A_object(GMS_object):
gResolution=self.MetaObj.gResolution,
shape_fullArr=self.shape_fullArr)
self.add_rasterInfo_to_MetaObj(rasObj)
OUT_W.add_attributes_to_ENVIhdr(
add_attributes_to_ENVIhdr(
{'map info': self.MetaObj.map_info, 'coordinate system string': self.MetaObj.projection},
os.path.splitext(self.MetaObj.Dataname)[0] + '.hdr')
self.arr = self.MetaObj.Dataname
......@@ -664,7 +666,9 @@ class L1A_object(GMS_object):
self.MetaObj.bands = 1 if len(self.arr.shape) == 2 else self.arr.shape[2]
self.arr.gt = mapinfo2geotransform(self.MetaObj.map_info)
self.arr.prj = self.MetaObj.projection
if not self.arr.prj:
self.arr.prj = self.MetaObj.projection
# must be set here because nodata mask has been computed from self.arr without geoinfos:
self.mask_nodata.gt = self.arr.gt
self.mask_nodata.prj = self.arr.prj
......
......@@ -739,8 +739,9 @@ class L1B_object(L1A_object):
# NOTE: mask_nodata and mask_clouds are updated later by L2A_map mapper function (module pipeline)
# update the GeoArray instance without loosing its inherent metadata (nodata, ...)
geoArr.arr, geoArr.gt, geoArr.prj = \
DS.GeoArray_shifted.arr, DS.GeoArray_shifted.gt, DS.GeoArray_shifted.prj
geoArr.arr, geoArr.gt = DS.GeoArray_shifted.arr, DS.GeoArray_shifted.gt
if not geoArr.prj:
geoArr.prj = DS.GeoArray_shifted.prj
# setattr(self,attrname, DS.GeoArray_shifted) # NOTE: don't set array earlier because setter will also
# # update arr.gt/.prj/.nodata from MetaObj
......
......@@ -995,7 +995,7 @@ class AtmCorr(object):
##################################################################################
oF_refl, oZ_refl, oS_refl = get_outFillZeroSaturated(inObj.arr.dtype)
surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs))
surf_refl = np.dstack([self.results.data_ac[bandN] for bandN in ac_bandNs])
surf_refl *= CFG.scale_factor_BOARef # scale using scale factor (output is float16)
# set AC nodata values to GMS outFill
......@@ -1044,7 +1044,7 @@ class AtmCorr(object):
out_nodata_val = get_outFillZeroSaturated(out_dtype)[0]
# generate raw ac_errors array
ac_errors = np.dstack((self.results.data_errors[bandN] for bandN in ac_bandNs))
ac_errors = np.dstack([self.results.data_errors[bandN] for bandN in ac_bandNs])
# apply scale factor from config to data pixels and overwrite nodata area with nodata value
ac_errors *= CFG.ac_scale_factor_errors # scale using scale factor (output is float16)
......
......@@ -29,7 +29,6 @@ Level 2B Processor: Spectral homogenization
"""
import numpy as np
from scipy.interpolate import interp1d
from typing import Union # noqa F401 # flake8 issue
from geoarray import GeoArray # noqa F401 # flake8 issue
from spechomo.prediction import SpectralHomogenizer
......@@ -164,6 +163,8 @@ class L2B_object(L2A_object):
#########################################################################################
if self.ac_errors and self.ac_errors.ndim == 3:
from scipy.interpolate import interp1d
self.logger.info("Performing linear interpolation for 'AC errors' array to match target sensor bands "
"number..")
outarr = interp1d(np.array(src_cwls), self.ac_errors,
......
......@@ -42,7 +42,6 @@ import warnings
import numpy as np
from matplotlib import dates as mdates
from scipy.interpolate import RegularGridInterpolator
# custom
try:
......@@ -673,6 +672,8 @@ class GEOPROCESSING(object):
self.inDs = stack_in_mem
else: # CFG.inmem_serialization is False
from ..misc.helper_functions import subcall_with_output
self.logger.info('Adding the following bands to Layerstack:')
[self.logger.info(os.path.basename(i)) for i in layers_pathlist]
......@@ -686,7 +687,12 @@ class GEOPROCESSING(object):
str_layers_pathlist = ' '.join(layers_pathlist)
if self.subset is None:
os.system("gdal_merge.py -q -o %s -of ENVI -seperate %s" % (path_output, str_layers_pathlist))
cmd = "gdal_merge.py -q -o %s -of ENVI -seperate %s" % (path_output, str_layers_pathlist)
output, exitcode, err = subcall_with_output(cmd)
if exitcode:
raise RuntimeError(err)
if output:
return output.decode('UTF-8')
# FIXME this changes the format of the projection (maybe a GDAL bug?)
# FIXME normalize by EPSG2WKT(WKT2EPSG(WKT))
else:
......@@ -699,8 +705,13 @@ class GEOPROCESSING(object):
LR_Ygeo = GT[3] + self.colEnd * GT[4] + self.rowEnd * GT[5]
ullr = '%s %s %s %s' % (UL_Xgeo, UL_Ygeo, LR_Xgeo, LR_Ygeo)
os.system(
"gdal_merge.py -q -o %s -ul_lr %s -of ENVI -seperate %s" % (path_output, ullr, str_layers_pathlist))
cmd = "gdal_merge.py -q -o %s -ul_lr %s -of ENVI -seperate %s" \
% (path_output, ullr, str_layers_pathlist)
output, exitcode, err = subcall_with_output(cmd)
if exitcode:
raise RuntimeError(err)
if output:
return output.decode('UTF-8')
if [GT, PR] == [(0.0, 1.0, 0.0, 0.0, 0.0, 1.0), '']:
# delete output map info in case of arbitrary coordinate system
......@@ -894,6 +905,8 @@ def get_common_extent(list_extentcoords, alg='outer', return_box=True):
def zoom_2Darray_to_shapeFullArr(arr2D, shapeFullArr, meshwidth=1, subset=None, method='linear'):
from scipy.interpolate import RegularGridInterpolator
assert method in ['linear', 'nearest']
rS, rE, cS, cE = list(get_subsetProps_from_subsetArg(shapeFullArr, subset).values())[3:7]
rowpos = np.linspace(0, shapeFullArr[0] - 1, arr2D.shape[0])
......@@ -919,7 +932,7 @@ def adjust_acquisArrProv_to_shapeFullArr(arrProv, shapeFullArr, meshwidth=1, sub
outDict = {k: zoom_2Darray_to_shapeFullArr(arr, shapeFullArr, meshwidth, subset) for k, arr in arrProv.items()}
return outDict
else:
arr2interp = np.mean(np.dstack(arrProv.values()), axis=2)
arr2interp = np.mean(np.dstack(list(arrProv.values())), axis=2)
interpolated = zoom_2Darray_to_shapeFullArr(arr2interp, shapeFullArr, meshwidth, subset).astype(np.float32)
return interpolated
......
......@@ -41,9 +41,9 @@ import logging
import dill
import numpy as np
import scipy.interpolate
import spectral
from spectral.io import envi as envi
from pyproj import CRS
from geoarray import GeoArray
from py_tools_ds.geo.coord_calc import corner_coord_to_minmax
......@@ -258,12 +258,14 @@ def Solar_Irradiance_reader(resol_nm=None, wvl_min_nm=None, wvl_max_nm=None):
:param wvl_max_nm: maximum wavelength of returned irradiances [nanometers]
:return:
"""
from scipy.interpolate import interp1d
sol_irr = np.loadtxt(CFG.path_solar_irr, skiprows=1)
if resol_nm is not None and isinstance(resol_nm, (float, int)):
wvl_min = (np.min(sol_irr[:, 0]) if wvl_min_nm is None else wvl_min_nm)
wvl_max = (np.max(sol_irr[:, 0]) if wvl_max_nm is None else wvl_max_nm)
wvl_rsp = np.arange(wvl_min, wvl_max, resol_nm)
sol_irr = scipy.interpolate.interp1d(sol_irr[:, 0], sol_irr[:, 1], kind='linear')(wvl_rsp)
sol_irr = interp1d(sol_irr[:, 0], sol_irr[:, 1], kind='linear')(wvl_rsp)
return sol_irr
......@@ -482,8 +484,8 @@ class DEM_Creator(object):
merged_prj = GeoArray(tFm.name).prj
t_xmin, t_xmax, t_ymin, t_ymax = corner_coord_to_minmax(cornerCoords_tgt)
self._run_cmd('gdalwarp -r average -of VRT -srcnodata 0 -dstnodata 0 '
'-tr %s %s -s_srs %s -t_srs %s -te %s %s %s %s %s %s'
% (tgt_xgsd, tgt_ygsd, merged_prj, prj,
'-tr %s %s -s_srs EPSG:%s -t_srs EPSG:%s -te %s %s %s %s %s %s'
% (tgt_xgsd, tgt_ygsd, CRS(merged_prj).to_epsg(), CRS(prj).to_epsg(),
t_xmin, t_ymin, t_xmax, t_ymax, tFm.name, tFo.name))
assert os.path.exists(tFo.name)
......
......@@ -80,6 +80,10 @@ def silent_envi_write_image(hdr_file, data, header, **kwargs):
"""
Monkeypatch for spectral.io.envi._write_image in order to silence output stream.
"""
from ..misc.helper_functions import safe_str
# force unicode strings
header = dict((k, v) if not isinstance(v, str) else (k, safe_str(v)) for k, v in header.items())
check_compatibility(header)
force = kwargs.get('force', False)
img_ext = kwargs.get('ext', '.img')
......
......@@ -32,15 +32,17 @@ import sys
import traceback
import warnings
from logging import getLogger
from typing import Union, List # noqa F401 # flake8 issue
from typing import Union, List, TYPE_CHECKING # noqa F401 # flake8 issue
from ..model.gms_object import GMS_object # noqa F401 # flake8 issue
from ..model.gms_object import failed_GMS_object
from ..options.config import GMS_config as CFG
from ..misc import database_tools as DB_T
from ..misc.helper_functions import is_proc_level_lower
from .definition_dicts import db_jobs_statistics_def, proc_chain
if TYPE_CHECKING:
from ..model.gms_object import GMS_object # noqa F401 # flake8 issue
from ..model.gms_object import failed_GMS_object
__author__ = 'Daniel Scheffler'
......@@ -163,6 +165,7 @@ class ExceptionHandler(object):
@staticmethod
def is_failed(GMS_objs):
from ..model.gms_object import failed_GMS_object
return isinstance(GMS_objs, failed_GMS_object) or \
(isinstance(GMS_objs, list) and isinstance(GMS_objs[0], failed_GMS_object))
......@@ -244,6 +247,8 @@ class ExceptionHandler(object):
timeout=30000)
def handle_failed(self):
from ..model.gms_object import failed_GMS_object
try:
_, exc_val, exc_tb = self.exc_details
......@@ -281,6 +286,8 @@ class ExceptionHandler(object):
# been updated by the first subsystem (that earlier reached L1A)
# FIXME proc_status_all_GMSobjs is not available if other subsystems are processed by another
# FIXME multiprocessing worker or on another machine (cluster node)
from ..model.gms_object import GMS_object
procstatus_other_ss = {k: v for k, v in GMS_object.proc_status_all_GMSobjs[failed_Obj.scene_ID].items()
if k != failed_Obj.subsystem}
for ss, statusentry in procstatus_other_ss.items():
......
......@@ -48,7 +48,6 @@ try:
from osgeo import ogr
except ImportError:
import ogr
from numba import jit
from multiprocessing import sharedctypes
from matplotlib import pyplot as plt
from subprocess import Popen, PIPE
......@@ -56,12 +55,6 @@ from xml.etree.ElementTree import QName
from ..options.config import GMS_config as CFG
from . import database_tools as DB_T
from ..algorithms.L1A_P import L1A_object
from ..algorithms.L1B_P import L1B_object
from ..algorithms.L1C_P import L1C_object
from ..algorithms.L2A_P import L2A_object
from ..algorithms.L2B_P import L2B_object
from ..algorithms.L2C_P import L2C_object
from ..misc.definition_dicts import proc_chain
from py_tools_ds.geo.coord_trafo import mapXY2imXY, reproject_shapelyGeometry
......@@ -69,8 +62,23 @@ from py_tools_ds.geo.coord_calc import corner_coord_to_minmax
__author__ = 'Daniel Scheffler'
parentObjDict = {'L1A': L1A_object, 'L1B': L1B_object, 'L1C': L1C_object,
'L2A': L2A_object, 'L2B': L2B_object, 'L2C': L2C_object}
def get_parentObjDict():
from ..algorithms.L1A_P import L1A_object
from ..algorithms.L1B_P import L1B_object
from ..algorithms.L1C_P import L1C_object
from ..algorithms.L2A_P import L2A_object
from ..algorithms.L2B_P import L2B_object
from ..algorithms.L2C_P import L2C_object
return dict(L1A=L1A_object,
L1B=L1B_object,
L1C=L1C_object,
L2A=L2A_object,
L2B=L2B_object,
L2C=L2C_object)
initArgsDict = {'L1A': (None,), 'L1B': (None,), 'L1C': (None,),
'L2A': (None,), 'L2B': (None,), 'L2C': (None,)}
......@@ -152,6 +160,11 @@ def sorted_nicely(iterable):
return sorted(iterable, key=alphanum_key)
def safe_str(obj):
"""Return a safe string that will not cause any UnicodeEncodeError issues."""
return obj.encode('ascii', 'ignore').decode('ascii')
def is_proc_level_lower(current_lvl, target_lvl):
# type: (str, str) -> bool
"""Return True if current_lvl is lower than target_lvl.
......@@ -173,26 +186,6 @@ def convert_absPathArchive_to_GDALvsiPath(path_archive):
return os.path.join(gdal_prefix_dict[file_suffix], os.path.basename(path_archive))
@jit
def _numba_array_merger(GMS_obj, list_arraynames, list_GMS_tiles):
"""private function, e.g. called by merge_GMS_tiles_to_GMS_obj() in order to fasten array merging"""
for arrname in list_arraynames:
samplearray = getattr(list_GMS_tiles[0], arrname)
assert isinstance(samplearray, np.ndarray), \
'Received a %s object for attribute %s. Expected a numpy array.' % (type(samplearray), arrname)
is_3d = len(samplearray.shape) == 3
bands = (samplearray.shape[2],) if is_3d else () # dynamic -> works for arr, cld_arr,...
target_shape = tuple(GMS_obj.shape_fullArr[:2]) + bands
target_dtype = samplearray.dtype
setattr(GMS_obj, arrname, np.empty(target_shape, dtype=target_dtype))
for idx, tile in enumerate(list_GMS_tiles):
rowStart, rowEnd = tile.arr_pos[0]
colStart, colEnd = tile.arr_pos[1]
getattr(GMS_obj, arrname)[rowStart:rowEnd + 1, colStart:colEnd + 1] = getattr(tile, arrname)
return GMS_obj
class mp_SharedNdarray(object):
"""
wrapper class, which collect all neccessary instances to make a numpy ndarray
......
......@@ -45,7 +45,6 @@ from pkg_resources import parse_version
import numpy as np
import spectral
from spectral.io import envi
from numba import jit
from pandas import DataFrame, read_csv
from nested_dict import nested_dict
......@@ -444,7 +443,7 @@ class GMS_object(object):
if hasattr(self, 'MetaObj') and self.MetaObj:
self._arr.nodata = self.MetaObj.spec_vals['fill']
self._arr.gt = mapinfo2geotransform(self.MetaObj.map_info) if self.MetaObj.map_info else [0, 1, 0, 0, 0, -1]
self._arr.prj = self.MetaObj.projection if self.MetaObj.projection else self._arr.projection
self._arr.prj = self.MetaObj.projection
else:
self._arr.nodata = DEF_D.get_outFillZeroSaturated(self._arr.dtype)[0]
......@@ -1104,7 +1103,7 @@ class GMS_object(object):
# merge arrays
def get_band(bandN):
return [gA[bandN] for gA in geoArrs_same_extent if gA and bandN in gA.bandnames][0]
full_geoArr = GeoArray(np.dstack((get_band(bandN) for bandN in bandnames)),
full_geoArr = GeoArray(np.dstack([get_band(bandN) for bandN in bandnames]),
geoArrs_same_extent[0].gt, geoArrs_same_extent[0].prj,
bandnames=bandnames,
nodata=geoArrs_same_extent[0].nodata)
......@@ -1242,7 +1241,7 @@ class GMS_object(object):
bands = (samplearray.shape[2],) if is_3d else () # dynamic -> works for arr, cld_arr,...
target_shape = tuple(GMS_obj.shape_fullArr[:2]) + bands
target_dtype = samplearray.dtype
merged_array = GMS_obj._numba_array_merger(list_GMS_tiles, arrname, target_shape, target_dtype)
merged_array = GMS_obj._merge_arrays(list_GMS_tiles, arrname, target_shape, target_dtype)
setattr(GMS_obj, arrname if not arrname.startswith('_') else arrname[1:],
merged_array) # use setters if possible
......@@ -1298,7 +1297,9 @@ class GMS_object(object):
self.logger.info(logmsg)
# copy object
sub_GMS_obj = HLP_F.parentObjDict[self.proc_level](*HLP_F.initArgsDict[self.proc_level]) # init
from ..misc.helper_functions import get_parentObjDict
parentObjDict = get_parentObjDict()
sub_GMS_obj = parentObjDict[self.proc_level](*HLP_F.initArgsDict[self.proc_level]) # init
sub_GMS_obj.__dict__.update(
{k: getattr(self, k) for k in self.__dict__.keys()
if not isinstance(getattr(self, k), (GeoArray, np.ndarray))}.copy())
......@@ -1418,11 +1419,9 @@ class GMS_object(object):
return sub_GMS_obj
@staticmethod
@jit
def _numba_array_merger(list_GMS_tiles, arrname2merge, target_shape, target_dtype):
def _merge_arrays(list_GMS_tiles, arrname2merge, target_shape, target_dtype):
# type: (list, str, tuple, np.dtype) -> np.ndarray
"""
private function, e.g. called by merge_GMS_tiles_to_GMS_obj() in order to fasten array merging
"""Merge multiple arrays into a single one.
:param list_GMS_tiles:
:param arrname2merge:
......@@ -1430,11 +1429,9 @@ class GMS_object(object):
:param target_dtype:
:return:
"""
out_arr = np.empty(target_shape, dtype=target_dtype)
for idx, tile in enumerate(list_GMS_tiles):
rowStart, rowEnd = tile.arr_pos[0]
colStart, colEnd = tile.arr_pos[1]
for tile in list_GMS_tiles:
(rowStart, rowEnd), (colStart, colEnd) = tile.arr_pos
out_arr[rowStart:rowEnd + 1, colStart:colEnd + 1] = getattr(tile, arrname2merge)
return out_arr
......@@ -1686,6 +1683,9 @@ class GMS_object(object):
GDF_MGRS_tiles = DB_T.get_overlapping_MGRS_tiles(CFG.conn_database,
tgt_corners_lonlat=self.trueDataCornerLonLat)
if GDF_MGRS_tiles.empty:
raise RuntimeError('Could not find an overlapping MGRS tile in the database for the current dataset.')
# calculate image coordinate bounds of the full GMS object for each MGRS tile within the GeoDataFrame
gt = mapinfo2geotransform(self.MetaObj.map_info)
......@@ -2159,17 +2159,19 @@ class GMS_object(object):
time.sleep(5)
def close_loggers(self):
if self._logger not in [None, 'not set']:
self.logger.close() # this runs misc.logging.GMS_logger.close()
self.logger = None # also adds current captured stream to self.log
# self.GMS_identifier and self.logger are getters - since self.GMS_identifier gets its logger from self.logger,
# self.logger has to be closed AFTER closing self.GMS_identifier.logger
if self.GMS_identifier and self.GMS_identifier.logger not in [None, 'not set']:
self.GMS_identifier.logger.close()
self.GMS_identifier.logger = None
if self.MetaObj and self.MetaObj.logger not in [None, 'not set']:
self.MetaObj.logger.close()
self.MetaObj.logger = None
if self.GMS_identifier and self.GMS_identifier.logger not in [None, 'not set']:
self.GMS_identifier.logger.close()
self.GMS_identifier.logger = None
if self._logger not in [None, 'not set']:
self._logger.close() # this runs misc.logging.GMS_logger.close()
self._logger = None # also adds current captured stream to self.log
def delete_previous_proc_level_results(self):
"""Deletes results of the previous processing level if the respective flag CFG.exec_L**P[2]) is set to True.
......
......@@ -44,6 +44,7 @@ import numpy as np
import pyproj
from matplotlib import dates as mdates
from pyorbital import astronomy
from natsort import natsorted
from py_tools_ds.geo.map_info import geotransform2mapinfo
from py_tools_ds.geo.projection import WKT2EPSG
......@@ -333,7 +334,7 @@ class METADATA(object):
# band specific metadata #
##########################
LBA_full_sorted = HLP_F.sorted_nicely(self.LayerBandsAssignment_full)
LBA_full_sorted = natsorted(self.LayerBandsAssignment_full)
# Gains and Offsets
h9 = re.search(r"<Image_Interpretation>[\s\S]*</Image_Interpretation>", dim_, re.I)
......@@ -606,7 +607,7 @@ class METADATA(object):
##########################
# band specific metadata #
##########################
LBA_full_sorted = HLP_F.sorted_nicely(self.LayerBandsAssignment_full)
LBA_full_sorted = natsorted(self.LayerBandsAssignment_full)
# Gains and Offsets
h4 = re.search(r"GROUP = MIN_MAX_RADIANCE[\s\S]*END_GROUP = MIN_MAX_PIXEL_VALUE", mtl_, re.I)
......@@ -967,7 +968,7 @@ class METADATA(object):
# band specific metadata #
##########################
LBA_full_sorted = HLP_F.sorted_nicely(self.LayerBandsAssignment_full)
LBA_full_sorted = natsorted(self.LayerBandsAssignment_full)
# Gains + Offsets
h9 = re.findall(r"<re:radiometricScaleFactor>([^<]*)</re:radiometricScaleFactor>",
......@@ -1300,7 +1301,7 @@ class METADATA(object):
# band specific metadata #
##########################
LBA_full_sorted = HLP_F.sorted_nicely(self.LayerBandsAssignment_full)
LBA_full_sorted = natsorted(self.LayerBandsAssignment_full)
# Gains/Offsets
h4 = re.findall(r"GROUP[\s]*=[\s]*"
......@@ -1482,7 +1483,7 @@ class METADATA(object):
# band specific metadata #
##########################
LBA_full_sorted = HLP_F.sorted_nicely(self.LayerBandsAssignment_full)
LBA_full_sorted = natsorted(self.LayerBandsAssignment_full)
# GainMode with corresponding coefficients + Offsets
gains_AVNIR = {'1': ['N/A', 'N/A', 'N/A', 'N/A'], '2': [0.5880, 0.5730, 0.5020, 0.5570],
......@@ -1548,7 +1549,7 @@ class METADATA(object):
open_specific_file_within_archive(self.FolderOrArchive, '*/*data*/LED-*', read_mode='rb')
# Gains & offsets
LBA_full_sorted = HLP_F.sorted_nicely(self.LayerBandsAssignment_full)
LBA_full_sorted = natsorted(self.LayerBandsAssignment_full)