Commit 3e0bbfe8 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Added spectral to dependencies. Added many functions from arosics:

- dtypes.convertGdalNumpyDataType()
- geo.get_coord_grid()
- io.raster.gdal.wait_if_used()
- io.raster.reader.init_SharedArray_in_globals()
- io.raster.reader.fill_arr()
- io.raster.reader.gdal_read_subset()
- io.raster.reader.gdal_ReadAsArray_mp()
- io.raster.writer.write_numpy_to_image()
- io.raster.writer.write_envi()
- io.raster.writer.init_SharedArray_on_disk()
- io.raster.writer.fill_arr_on_disk()
- io.raster.writer.convert_gdal_to_bsq__mp()
- io.vector.writer.write_shp()
parent 68113d92
Pipeline #1535 failed with stages
in 1 minute and 22 seconds
......@@ -54,3 +54,36 @@ def get_dtypeStr(val):
'date' if isinstance(val, datetime.datetime) else None
assert DType, 'data type not understood'
return DType
def convertGdalNumpyDataType(dType):
"""convertGdalNumpyDataType
:param dType: GDALdataType string or numpy dataType
:return: corresponding dataType
"""
# dictionary to translate GDAL data types (strings) in corresponding numpy data types
dTypeDic = {"Byte": np.uint8, "UInt16": np.uint16, "Int16": np.int16, "UInt32": np.uint32, "Int32": np.int32,
"Float32": np.float32, "Float64": np.float64, "GDT_UInt32": np.uint32}
outdType = None
if dType in dTypeDic:
outdType = dTypeDic[dType]
elif dType in dTypeDic.values():
for i in dTypeDic.items():
if dType == i[1]:
outdType = i[0]
elif dType in [np.int8, np.int64, np.int]:
outdType = "Int32"
print(">>> Warning: %s is converted to GDAL_Type 'Int_32'\n" % dType)
elif dType in [np.bool, np.bool_]:
outdType = "Byte"
print(">>> Warning: %s is converted to GDAL_Type 'Byte'\n" % dType)
elif dType in [np.float]:
outdType = "Float32"
print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType)
elif dType in [np.float16]:
outdType = "Float32"
print(">>> Warning: %s is converted to GDAL_Type 'Float32'\n" % dType)
else:
raise Exception('GEOP.convertGdalNumpyDataType: Unexpected input data type %s.' % dType)
return outdType
......@@ -10,6 +10,12 @@ from .coord_calc import get_corner_coordinates
__author__ = "Daniel Scheffler"
def get_coord_grid(ULxy, LRxy, out_resXY):
X_vec = np.arange(ULxy[0], LRxy[0], out_resXY[0])
Y_vec = np.arange(ULxy[1], LRxy[1], out_resXY[1])
return np.meshgrid(X_vec, Y_vec)
def snap_bounds_to_pixGrid(bounds, gt, roundAlg='auto'):
"""Snaps the given bounds to the given grid (defined by gt) under the use of the given round algorithm.
NOTE: asserts equal projections of source and target grid
......
# -*- coding: utf-8 -*-
__author__ = "Daniel Scheffler"
import time
import os
import numpy as np
from pandas import DataFrame
from osgeo import gdal_array
......@@ -71,3 +74,29 @@ def get_GDAL_driverList():
extensions[2] if len(extensions) > 2 else np.nan]
df = df.dropna(how='all')
return df
def wait_if_used(path_file, lockfile, timeout=100, try_kill=0):
globs = globals()
same_gdalRefs = [k for k, v in globs.items() if
isinstance(globs[k], gdal.Dataset) and globs[k].GetDescription() == path_file]
t0 = time.time()
def update_same_gdalRefs(sRs): return [sR for sR in sRs if sR in globals() and globals()[sR] is not None]
while same_gdalRefs != [] or os.path.exists(lockfile):
if os.path.exists(lockfile):
continue
if time.time() - t0 > timeout:
if try_kill:
for sR in same_gdalRefs:
globals()[sR] = None
print('had to kill %s' % sR)
else:
if os.path.exists(lockfile):
os.remove(lockfile)
raise TimeoutError('The file %s is permanently used by another variable.' % path_file)
same_gdalRefs = update_same_gdalRefs(same_gdalRefs)
# -*- coding: utf-8 -*-
import multiprocessing
import ctypes
import numpy as np
try:
import gdal
except ImportError:
from osgeo import gdal
from ...numeric.array import get_array_tilebounds
__author__ = "Daniel Scheffler"
shared_array = None
def init_SharedArray_in_globals(dims):
rows, cols = dims
global shared_array
shared_array_base = multiprocessing.Array(ctypes.c_double, rows * cols)
shared_array = np.ctypeslib.as_array(shared_array_base.get_obj())
shared_array = shared_array.reshape(rows, cols)
def fill_arr(argDict, def_param=shared_array):
pos = argDict.get('pos')
func = argDict.get('func2call')
args = argDict.get('func_args', [])
kwargs = argDict.get('func_kwargs', {})
(rS, rE), (cS, cE) = pos
shared_array[rS:rE + 1, cS:cE + 1] = func(*args, **kwargs)
def gdal_read_subset(fPath, pos, bandNr):
(rS, rE), (cS, cE) = pos
ds = gdal.Open(fPath)
data = ds.GetRasterBand(bandNr).ReadAsArray(cS, rS, cE - cS + 1, rE - rS + 1)
del ds
return data
def gdal_ReadAsArray_mp(fPath, bandNr, tilesize=1500):
ds = gdal.Open(fPath)
rows, cols = ds.RasterYSize, ds.RasterXSize
del ds
init_SharedArray_in_globals((rows, cols))
tilepos = get_array_tilebounds(array_shape=(rows, cols), tile_shape=[tilesize, tilesize])
fill_arr_argDicts = [{'pos': pos, 'func2call': gdal_read_subset, 'func_args': (fPath, pos, bandNr)} for pos in
tilepos]
with multiprocessing.Pool() as pool:
pool.map(fill_arr, fill_arr_argDicts)
return shared_array
# -*- coding: utf-8 -*-
import os
import multiprocessing
from spectral.io import envi
try:
import gdal
except ImportError:
from osgeo import gdal
from ...dtypes.conversion import convertGdalNumpyDataType
from ...geo.map_info import geotransform2mapinfo
from ...numeric.array import get_array_tilebounds
__author__ = "Daniel Scheffler"
def write_numpy_to_image(array, path_out, outFmt='GTIFF', gt=None, prj=None):
rows, cols, bands = list(array.shape) + [1] if len(array.shape) == 2 else array.shape
gdal_dtype = gdal.GetDataTypeByName(convertGdalNumpyDataType(array.dtype))
outDs = gdal.GetDriverByName(outFmt).Create(path_out, cols, rows, bands, gdal_dtype)
for b in range(bands):
band = outDs.GetRasterBand(b + 1)
arr2write = array if len(array.shape) == 2 else array[:, :, b]
band.WriteArray(arr2write)
del band
if gt:
outDs.SetGeoTransform(gt)
if prj:
outDs.SetProjection(prj)
del outDs
def write_envi(arr, outpath, gt=None, prj=None):
if gt or prj:
assert gt and prj, 'gt and prj must be provided together or left out.'
meta = {'map info': geotransform2mapinfo(gt, prj), 'coordinate system string': prj} if gt else None
shape = (arr.shape[0], arr.shape[1], 1) if len(arr.shape) == 3 else arr.shape
out = envi.create_image(outpath, metadata=meta, shape=shape, dtype=arr.dtype, interleave='bsq', ext='.bsq',
force=True) # 8bit for multiple masks in one file
out_mm = out.open_memmap(writable=True)
out_mm[:, :, 0] = arr
shared_array_on_disk__memmap = None
def init_SharedArray_on_disk(out_path, dims, gt=None, prj=None):
global shared_array_on_disk__memmap
global shared_array_on_disk__path
path = out_path if not os.path.splitext(out_path)[1] == '.bsq' else \
os.path.splitext(out_path)[0] + '.hdr'
Meta = {}
if gt and prj:
Meta['map info'] = geotransform2mapinfo(gt, prj)
Meta['coordinate system string'] = prj
shared_array_on_disk__obj = envi.create_image(path, metadata=Meta, shape=dims, dtype='uint16',
interleave='bsq', ext='.bsq', force=True)
shared_array_on_disk__memmap = shared_array_on_disk__obj.open_memmap(writable=True)
def fill_arr_on_disk(argDict):
pos = argDict.get('pos')
in_path = argDict.get('in_path')
band = argDict.get('band')
(rS, rE), (cS, cE) = pos
ds = gdal.Open(in_path)
band = ds.GetRasterBand(band)
data = band.ReadAsArray(cS, rS, cE - cS + 1, rE - rS + 1)
shared_array_on_disk__memmap[rS:rE + 1, cS:cE + 1, 0] = data
del ds, band
def convert_gdal_to_bsq__mp(in_path, out_path, band=1):
"""
Usage:
ref_ds,tgt_ds = gdal.Open(self.path_imref),gdal.Open(self.path_im2shift)
ref_pathTmp, tgt_pathTmp = None,None
if ref_ds.GetDriver().ShortName!='ENVI':
ref_pathTmp = IO.get_tempfile(ext='.bsq')
IO.convert_gdal_to_bsq__mp(self.path_imref,ref_pathTmp)
self.path_imref = ref_pathTmp
if tgt_ds.GetDriver().ShortName!='ENVI':
tgt_pathTmp = IO.get_tempfile(ext='.bsq')
IO.convert_gdal_to_bsq__mp(self.path_im2shift,tgt_pathTmp)
self.path_im2shift = tgt_pathTmp
ref_ds=tgt_ds=None
:param in_path:
:param out_path:
:param band:
:return:
"""
ds = gdal.Open(in_path)
dims = (ds.RasterYSize, ds.RasterXSize)
gt, prj = ds.GetGeoTransform(), ds.GetProjection()
del ds
init_SharedArray_on_disk(out_path, dims, gt, prj)
positions = get_array_tilebounds(array_shape=dims, tile_shape=[512, 512])
argDicts = [{'pos': pos, 'in_path': in_path, 'band': band} for pos in positions]
with multiprocessing.Pool() as pool:
pool.map(fill_arr_on_disk, argDicts)
# -*- coding: utf-8 -*-
import os
import ogr
import osr
from ...dtypes.conversion import get_dtypeStr
from ...geo.projection import EPSG2WKT
__author__ = "Daniel Scheffler"
def write_shp(path_out, shapely_geom, prj=None, attrDict=None):
shapely_geom = [shapely_geom] if not isinstance(shapely_geom, list) else shapely_geom
attrDict = [attrDict] if not isinstance(attrDict, list) else attrDict
# print(len(shapely_geom))
# print(len(attrDict))
assert len(shapely_geom) == len(attrDict), "'shapely_geom' and 'attrDict' must have the same length."
assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out)
print('Writing %s ...' % path_out)
if os.path.exists(path_out):
os.remove(path_out)
ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out)
if prj is not None:
prj = prj if not isinstance(prj, int) else EPSG2WKT(prj)
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
else:
srs = None
geom_type = list(set([gm.type for gm in shapely_geom]))
assert len(geom_type) == 1, 'All shapely geometries must belong to the same type. Got %s.' % geom_type
layer = \
ds.CreateLayer('', srs, ogr.wkbPoint) if geom_type[0] == 'Point' else\
ds.CreateLayer('', srs, ogr.wkbLineString) if geom_type[0] == 'LineString' else \
ds.CreateLayer('', srs, ogr.wkbPolygon) if geom_type[0] == 'Polygon' else None # FIXME
if isinstance(attrDict[0], dict):
for attr in attrDict[0].keys():
assert len(attr) <= 10, "ogr does not support fieldnames longer than 10 digits. '%s' is too long" % attr
DTypeStr = get_dtypeStr(attrDict[0][attr])
FieldType = \
ogr.OFTInteger if DTypeStr.startswith('int') else \
ogr.OFTReal if DTypeStr.startswith('float') else \
ogr.OFTString if DTypeStr.startswith('str') else \
ogr.OFTDateTime if DTypeStr.startswith('date') else None
FieldDefn = ogr.FieldDefn(attr, FieldType)
if DTypeStr.startswith('float'):
FieldDefn.SetPrecision(6)
layer.CreateField(FieldDefn) # Add one attribute
for i in range(len(shapely_geom)):
# Create a new feature (attribute and geometry)
feat = ogr.Feature(layer.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkb(shapely_geom[i].wkb)) # Make a geometry, from Shapely object
list_attr2set = attrDict[0].keys() if isinstance(attrDict[0], dict) else []
for attr in list_attr2set:
val = attrDict[i][attr]
DTypeStr = get_dtypeStr(val)
val = int(val) if DTypeStr.startswith('int') else float(val) if DTypeStr.startswith('float') else \
str(val) if DTypeStr.startswith('str') else val
feat.SetField(attr, val)
layer.CreateFeature(feat)
feat.Destroy()
# Save and close everything
del ds, layer
......@@ -12,7 +12,7 @@ with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = ['gdal', 'numpy', 'shapely', 'six', 'rasterio', 'pandas', 'geopandas',
'scikit-image', 'pyproj']
'scikit-image', 'pyproj', 'spectral']
setup_requirements = [] # TODO(danschef): put setup requirements (distutils extensions, etc.) here
test_requirements = requirements + ["coverage", "nose", "nose2", "nose-htmloutput", "rednose"]
......
......@@ -12,7 +12,7 @@ RUN /bin/bash -i -c "source /root/anaconda3/bin/activate ; \
conda install --yes pyqt ; \
conda install --yes -c conda-forge numpy gdal scikit-image rasterio pyproj geopandas; \
conda install --yes -c conda-forge 'icu=58.*' lxml ; \
pip install dicttoxml jsmin cerberus pyprind pint iso8601 tqdm mpld3 sphinx-argparse six \
pip install dicttoxml jsmin cerberus pyprind pint iso8601 tqdm mpld3 sphinx-argparse six spectral \
flake8 pycodestyle pylint pydocstyle nose nose2 nose-htmloutput coverage rednose"
# copy some needed stuff to /root
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment