Commit b5b31181 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Replaced deprecated gdalnumeric imports.



Signed-off-by: Daniel Scheffler's avatarDaniel Scheffler <danschef@gfz-potsdam.de>
parent 1d85a8d4
......@@ -10,6 +10,7 @@ History
* Replaced wrong conda command against 'source'.
* Moved sicor cache file download from build_testsuite_image.sh to new before_script.sh
and adjusted 'make gitlab_CI_docker' accordingly.
* Replaced deprecated gdalnumeric imports.
0.19.4 (2020-12-10)
......
......@@ -29,8 +29,7 @@ import os
import re
import warnings
from osgeo import gdal
from osgeo import gdalnumeric
from osgeo import gdal, gdal_array
import numpy as np
from natsort import natsorted
......@@ -226,8 +225,8 @@ class L1A_object(GMS_object):
# read a single file
with IOLock(allowed_slots=CFG.max_parallel_reads_writes, logger=self.logger):
if CFG.inmem_serialization and path_output is None: # numpy array output
self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \
gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows)
self.arr = gdal_array.LoadFile(path_file2load) if subset is None else \
gdal_array.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows)
self.path_InFilePreprocessor = path_file2load
else: # 'MEMORY' or physical output
GEOP.ndarray2gdal(rasObj.tondarray(), path_output,
......@@ -248,7 +247,7 @@ class L1A_object(GMS_object):
for bidx, b in enumerate(self.LayerBandsAssignment):
sds_name = [i for i in sds_md.values() if '%s_Band%s:ImageData' % (subsystem_identifier, b) in str(i) or
'%s_Swath:ImageData%s' % (subsystem_identifier, b) in str(i)][0]
data = gdalnumeric.LoadFile(sds_name)
data = gdal_array.LoadFile(sds_name)
if bidx == 0:
data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype)
data_arr[:, :, bidx] = data
......@@ -685,9 +684,9 @@ class L1A_object(GMS_object):
if hasattr(self, 'arr') and isinstance(self.arr, np.ndarray):
dtype = self.arr.dtype
else:
arr = gdalnumeric.LoadFile(self.arr, 0, 0, 1, 1) if hasattr(self, 'arr') and isinstance(self.arr,
str) else \
gdalnumeric.LoadFile(self.MetaObj.Dataname, 0, 0, 1, 1)
arr = gdal_array.LoadFile(
self.arr, 0, 0, 1, 1) if hasattr(self, 'arr') and isinstance(self.arr, str) else \
gdal_array.LoadFile(self.MetaObj.Dataname, 0, 0, 1, 1)
assert arr is not None
dtype = arr.dtype
......
......@@ -44,9 +44,7 @@ import numpy as np
from matplotlib import dates as mdates
# custom
from osgeo import osr
from osgeo import gdal
from osgeo import gdalnumeric
from osgeo import gdal, osr, gdal_array
from osgeo.gdal import GA_ReadOnly
import pyproj
......@@ -353,7 +351,7 @@ class GEOPROCESSING(object):
# [transform_wgs84_to_utm(LonLat[0], LonLat[1], get_UTMzone(prj=self.projection)) for LonLat in TieP]
# xmin, xmax, ymin, ymax = HLP_F.corner_coord_to_minmax(TieP)
# t0 = time.time()
# in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.desc), 0, 2), 0, 1)
# in_arr = np.swapaxes(np.swapaxes(gdal_array.LoadFile(self.desc), 0, 2), 0, 1)
# print('reading time', time.time() - t0)
# if inFill is None:
# inFill = get_outFillZeroSaturated(in_arr.dtype)[0]
......@@ -379,7 +377,7 @@ class GEOPROCESSING(object):
else: # mode == 'GDAL'
"""needs temporary files but does support multiprocessing and configuring cache size"""
t0 = time.time()
in_dtype = gdalnumeric.LoadFile(self.desc, 0, 0, 1, 1).dtype
in_dtype = gdal_array.LoadFile(self.desc, 0, 0, 1, 1).dtype
if inFill is None:
inFill = get_outFillZeroSaturated(in_dtype)[0]
out_nodataVal = get_outFillZeroSaturated(in_dtype)[0]
......@@ -487,7 +485,7 @@ class GEOPROCESSING(object):
:param custom_nodataVal:
"""
nodataVal = get_outFillZeroSaturated(np.int16)[0] if custom_nodataVal is None else custom_nodataVal
in_arr = array if array is not None else np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.desc), 0, 2), 0, 1)
in_arr = array if array is not None else np.swapaxes(np.swapaxes(gdal_array.LoadFile(self.desc), 0, 2), 0, 1)
return np.all(np.where(in_arr == nodataVal, 0, 1), axis=2)
def pixelToWorldCoord_using_geotransform_and_projection(self, Pixel_row_col, targetProj):
......@@ -630,15 +628,15 @@ class GEOPROCESSING(object):
rows, cols, bands = self.rows, self.cols, len(layers_pathlist)
if path_output is None:
dtype = gdalnumeric.LoadFile(layers_pathlist[0], 0, 0, 1, 1).dtype
dtype = gdal_array.LoadFile(layers_pathlist[0], 0, 0, 1, 1).dtype
stacked = np.empty((self.rows, self.cols, len(layers_pathlist)), dtype)
for i, p in enumerate(layers_pathlist):
self.logger.info('Adding band %s to Layerstack..' % os.path.basename(p))
if self.subset is None or self.subset[0] == 'cube':
stacked[:, :, i] = gdalnumeric.LoadFile(p)
stacked[:, :, i] = gdal_array.LoadFile(p)
else:
stacked[:, :, i] = gdalnumeric.LoadFile(p, self.colStart, self.rowStart, cols, rows)
stacked[:, :, i] = gdal_array.LoadFile(p, self.colStart, self.rowStart, cols, rows)
return stacked
......
......@@ -34,19 +34,14 @@ import gzip
import os
from typing import TYPE_CHECKING
import pickle
import builtins
from itertools import chain
import dill
import numpy as np
from osgeo import gdal_array
from spectral.io import envi
from spectral.io.envi import check_compatibility, check_new_filename, write_envi_header, _write_header_param
from osgeo import ogr
from osgeo import osr
from osgeo import gdal
from osgeo import gdalnumeric
import builtins
from itertools import chain
from osgeo import gdal, gdal_array, ogr, osr
from ..options.config import GMS_config as CFG
from ..misc import helper_functions as HLP_F
......@@ -147,7 +142,7 @@ def write_ENVI_compressed(outPath_hdr, ndarray, meta, interleave='bsq'):
write_ordered_envi_header(outPath_hdr, meta)
# check if output is GDAL readable
if gdalnumeric.LoadFile(outpathBinary, 0, 0, 1, 1) is None:
if gdal_array.LoadFile(outpathBinary, 0, 0, 1, 1) is None:
return 0
else:
return 1
......
......@@ -47,8 +47,7 @@ import spectral
from spectral.io import envi
from pandas import DataFrame, read_csv
from nested_dict import nested_dict
from osgeo import gdalnumeric
from osgeo import gdal_array
from geoarray import GeoArray, NoDataMask, CloudMask
from py_tools_ds.geo.coord_grid import is_coord_grid_equal
......@@ -1984,8 +1983,8 @@ class GMS_object(object):
if arrayname not in ['mask_clouds', 'mask_nodata']:
# read image data in subset
tempArr = gdalnumeric.LoadFile(path_to_array, cS, rS, cols,
rows) # bands, rows, columns OR rows, columns
tempArr = gdal_array.LoadFile(path_to_array, cS, rS, cols,
rows) # bands, rows, columns OR rows, columns
arr2write = tempArr if len(tempArr.shape) == 2 else \
np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1) # rows, columns, (bands)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment