Commit 60385ce8 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'master' into dev

Conflicts:
	tests/test_COREG.py
parents d6c8e5bb c7fda6b8
Pipeline #2914 passed with stages
in 9 minutes and 51 seconds
......@@ -12,18 +12,16 @@ variables:
stages:
- test
- deploy_pages
- deploy_to_pypi
- deploy
- cleanup
test_arosics:
stage: test
script:
- source /root/anaconda3/bin/activate
- export GDAL_DATA=/root/anaconda3/share/gdal
- source /root/miniconda3/bin/activate
- export GDAL_DATA=/root/miniconda3/share/gdal
- export PYTHONPATH=$PYTHONPATH:/root # /root <- directory needed later
- pip install rednose # remove as soon as it is included in test runner
- make nosetests
- make docs
artifacts:
......@@ -32,35 +30,69 @@ test_arosics:
- docs/_build/html/
- nosetests.html
- nosetests.xml
when: always
test_styles:
stage: test
script:
- source /root/miniconda3/bin/activate
- export GDAL_DATA=/root/miniconda3/share/gdal
- export PYTHONPATH=$PYTHONPATH:/root # /root <- directory needed later
- make lint
artifacts:
paths:
- tests/linting/flake8.log
- tests/linting/pycodestyle.log
- tests/linting/pydocstyle.log
when: always
test_arosics_install:
stage: test
script:
- source /root/anaconda3/bin/activate
- conda create -y -q --name arosics python=3.5
- source /root/miniconda3/bin/activate
- export GDAL_DATA=/root/miniconda3/share/gdal
- conda create -y -q --name arosics python=3
- source activate arosics
- conda install --yes -q -c conda-forge numpy gdal scikit-image matplotlib # resolve these requirements with conda
# resolve some requirements with conda
- conda install --yes -q -c conda-forge numpy gdal scikit-image matplotlib pyproj rasterio shapely geopandas
# run installer
- python setup.py install
# test if its importable
- cd ..
- pwd
- ls
- python -c "import arosics; print(arosics)"
- python -c "from arosics import COREG, COREG_LOCAL"
only:
- master
pages:
stage: deploy_pages
pages: # this job must be called 'pages' to advise GitLab to upload content to GitLab Pages
stage: deploy
dependencies:
- test_arosics
script:
# Create the public directory
- rm -rf public
- mkdir public
- mkdir -p public/doc
- mkdir -p public/images/
- mkdir -p public/coverage
- cp -r htmlcov/* public/coverage/
- mkdir -p public/nosetests_reports
- cp nosetests.* public/nosetests_reports/
- mkdir -p public/doc
# Copy over the docs
- cp -r docs/_build/html/* public/doc/
- cp -r docs/images/* public/images/
# Copy over the coverage reports
- cp -r htmlcov/* public/coverage/
# Copy over the nosetests reports
- cp nosetests.* public/nosetests_reports/
# Check if everything is working great
- ls -al public
- ls -al public/doc
- ls -al public/coverage
- ls -al public/nosetests_reports
artifacts:
paths:
- public
......@@ -70,11 +102,11 @@ pages:
deploy_pypi:
stage: deploy_to_pypi
stage: deploy
dependencies:
- test_arosics
script: # Configure the PyPI credentials, then push the package, and cleanup the creds.
- source /root/anaconda3/bin/activate
- source /root/miniconda3/bin/activate
- mkdir -p public/images/
- cp -r docs/images/* public/images/
- printf "[distutils]\nindex-servers =\n pypi\n\n" >> ~/.pypirc
......
......@@ -28,11 +28,49 @@ Fixes and improvements:
* fixed warping issues in case only very few tie points could be identified
0.5.0 (coming soon)
-------------------
0.5.0 (2017-09-19)
------------------
New features:
* Added two test cases for local co-registration and the respective test data.
* Added test cases for global co-registration
* Added test of output writer and tie point grid visualization.
* Added nosetests. Resolved some setup requirements by conda during test_arosics_install.
* PEP8 code style now checked with automatic style checkers
Fixes and improvements:
* Coverage now also working in multiprocessing.
* Replaced test data of test case INTER1 with LZW compressed GeoTIFFs to speed up testing.
* Revised docker container builder.
* Bugfix for unexpected FFTW return value that caused the matching to fail
* Added some docstrings.
* Refactored command line interface 'arosics.py' to 'arosics_cli.py' to fix import issues.
* Added usage documentation for command line interface.
* Removed pykrige from automatically installed libraries during setup. It is now optional (Fixes issue #12)
* Bugfix in connection with optional library pyfftw.
* Revised installation guidelines within README.rst, README.md and installation.rst. Added link for nosetests HTML report.
* Fixed exception in case no arguments are provided to command line interface.
* Revised error handling and added additional check for projection.
* GDAL_DATA environment variable is now handled within py_tools_ds. Updated minimal version of py_tools_ds in setup.py.
* Fixed pickling error when running COREG_LOCAL in multiprocessing under a Windows environment.
* Replaced all occurrences of "quality grid" with "tie point grid". Updated version info.
......@@ -52,7 +52,9 @@ clean-test: ## remove test and coverage artifacts
rm -fr nosetests.xml
lint: ## check style with flake8
flake8 arosics tests
flake8 --max-line-length=120 arosics tests > ./tests/linting/flake8.log
pycodestyle arosics --exclude="*.ipynb,*.ipynb*" --max-line-length=120 > ./tests/linting/pycodestyle.log
-pydocstyle arosics > ./tests/linting/pydocstyle.log
test: ## run tests quickly with the default Python
python setup.py test
......
......@@ -16,6 +16,9 @@
[![build status](https://gitext.gfz-potsdam.de/danschef/arosics/badges/master/build.svg)](https://gitext.gfz-potsdam.de/danschef/arosics/commits/master)
[![coverage report](https://gitext.gfz-potsdam.de/danschef/arosics/badges/master/coverage.svg)](http://danschef.gitext.gfz-potsdam.de/arosics/coverage/)
[![pypi_status](https://img.shields.io/pypi/v/arosics.svg)](https://pypi.python.org/pypi/arosics)
[![license](https://img.shields.io/pypi/l/arosics.svg)](https://gitext.gfz-potsdam.de/danschef/arosics/blob/master/LICENSE)
[![python versions](https://img.shields.io/pypi/pyversions/arosics.svg)](https://img.shields.io/pypi/pyversions/arosics.svg)
See also the latest [coverage](http://danschef.gitext.gfz-potsdam.de/arosics/coverage/) and the [nosetests](http://danschef.gitext.gfz-potsdam.de/arosics/nosetests_reports/nosetests.html) HTML report.
......@@ -58,8 +61,12 @@ Using [conda](https://conda.io/docs/), the recommended approach is:
# create virtual environment for arosics, this is optional
conda create -y -q --name arosics python=3
source activate arosics
conda install -y -q -c conda-forge numpy gdal scikit-image matplotlib pyproj rasterio
conda install -y -q -c conda-forge pyfftw basemap pykrige # these libraries are optional
conda install -y -q -c conda-forge numpy gdal scikit-image matplotlib pyproj rasterio shapely geopandas
# optional libraries:
conda install -y -q -c conda-forge basemap pykrige
conda install -y -q -c conda-forge pyfftw # Linux and MacOS
conda install -y -q -c jesserobertson pyfftw # Windows
```
To install AROSICS, use the pip installer:
......@@ -83,9 +90,6 @@ PATH=$PATH:/path/to/your/installation/folder/arosics:/path/to/your/installation/
AROSICS has been tested with Python 3.4+ and Python 2.7. It should be fully compatible to all Python versions above 2.7.
# Modules
## CoReg
......@@ -330,7 +334,7 @@ CRL.correct_shifts()
Corner coordinates of image to be shifted:
[[319460.0, 5790510.0], [352270.0, 5900040.0], [409790.0, 5900040.0], [409790.0, 5790250.0], [319460.0, 5790250.0]]
Matching window position (X,Y): 372220.10753674706/5841066.947109019
Calculating geometric quality grid (1977 points) in mode 'multiprocessing'...
Calculating tie point grid (1977 points) in mode 'multiprocessing'...
progress: |==================================================| 100.0% [1977/1977] Complete 9.75 sek
Found 1144 valid GCPs.
Correcting geometric shifts...
......@@ -382,9 +386,9 @@ CRL = COREG_LOCAL(GeoArray(ref_ndarray, ref_gt, ref_prj),GeoArray(tgt_ndarray, t
CRL.correct_shifts()
```
#### visualize geometric quality grid with INITIAL shifts present in your input target image
#### visualize tie point grid with INITIAL shifts present in your input target image
Use the function COREG_LOCAL.view_CoRegPoints() to visualize the geometric quality grid with the calculated absolute lenghts of the shift vectors (the unit corresponds to the input projection - UTM in the shown example, thus the unit is 'meters'.).
Use the function COREG_LOCAL.view_CoRegPoints() to visualize the tie point grid with the calculated absolute lenghts of the shift vectors (the unit corresponds to the input projection - UTM in the shown example, thus the unit is 'meters'.).
NOTE: a calculation of reliable shifts above cloud covered areas is not possible. In the current version of AROSICS these areas are not masked. A proper masking is planned.
......@@ -404,7 +408,7 @@ CRL.view_CoRegPoints(figsize=(15,15),backgroundIm='ref')
The output figure shows the calculated absolute lenghts of the shift vectors - in this case with shifts up to ~25 meters.
#### visualize geometric quality grid with shifts present AFTER shift correction
#### visualize tie point grid with shifts present AFTER shift correction
The remaining shifts after local correction can be calculated and visualized by instanciating COREG_LOCAL with the output path of the above instance of COREG_LOCAL.
......@@ -422,7 +426,7 @@ CRL_after_corr.view_CoRegPoints(figsize=(15,15),backgroundIm='ref')
[[319460.0, 5790540.0], [352270.0, 5900030.0], [409780.0, 5900030.0], [409780.0, 5790260.0], [322970.0, 5790250.0], [319460.0, 5790280.0]]
Matching window position (X,Y): 372216.38593955856/5841068.390957352
Note: array has been downsampled to 1000 x 1000 for faster visualization.
Calculating geometric quality grid (1977 points) in mode 'multiprocessing'...
Calculating tie point grid (1977 points) in mode 'multiprocessing'...
progress: |==================================================| 100.0% [1977/1977] Complete 10.78 sek
......@@ -432,7 +436,7 @@ CRL_after_corr.view_CoRegPoints(figsize=(15,15),backgroundIm='ref')
The output figure shows a significant reduction of geometric shifts.
#### show the points table of the calculated geometric quality grid
#### show the points table of the calculated tie point grid
NOTE: Point records where no valid match has been found are filled with -9999.
......@@ -1510,11 +1514,11 @@ CRL.CoRegPoints_table
#### export geometric quality grid to an ESRI point shapefile
#### export tie point grid to an ESRI point shapefile
```python
CRL.quality_grid.to_PointShapefile(path_out='/path/to/your/output_shapefile.shp')
CRL.tiepoint_grid.to_PointShapefile(path_out='/path/to/your/output_shapefile.shp')
```
......
......@@ -29,6 +29,10 @@ Status
:target: http://danschef.gitext.gfz-potsdam.de/arosics/coverage/
.. image:: https://img.shields.io/pypi/v/arosics.svg
:target: https://pypi.python.org/pypi/arosics
.. image:: https://img.shields.io/pypi/l/arosics.svg
:target: https://gitext.gfz-potsdam.de/danschef/arosics/blob/master/LICENSE
.. image:: https://img.shields.io/pypi/pyversions/arosics.svg
:target: https://img.shields.io/pypi/pyversions/arosics.svg
See also the latest coverage_ report and the nosetests_ HTML report.
......@@ -52,8 +56,12 @@ Using conda_, the recommended approach is:
# create virtual environment for arosics, this is optional
conda create -y -q --name arosics python=3
source activate arosics
conda install -y -q -c conda-forge numpy gdal scikit-image matplotlib pyproj rasterio
conda install -y -q -c conda-forge pyfftw basemap pykrige # these libraries are optional
conda install -y -q -c conda-forge numpy gdal scikit-image matplotlib pyproj rasterio shapely geopandas
# optional libraries:
conda install -y -q -c conda-forge basemap pykrige
conda install -y -q -c conda-forge pyfftw # Linux and MacOS
conda install -y -q -c jesserobertson pyfftw # Windows
To install AROSICS, use the pip installer:
......@@ -73,7 +81,6 @@ Or clone the repository via GIT and update the PATH environment variable:
git clone https://gitext.gfz-potsdam.de/danschef/py_tools_ds.git
PATH=$PATH:/path/to/your/installation/folder/arosics:/path/to/your/installation/folder/geoarray:/path/to/your/installation/folder/py_tools_ds
Credits
-------
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -2,24 +2,26 @@
"""Top-level package for arosics."""
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.4.22'
__versionalias__ = '2017-09-05_02'
import warnings
from importlib import util
from arosics.CoReg import COREG
from arosics.CoReg_local import COREG_LOCAL
from arosics.DeShifter import DESHIFTER
from arosics.Tie_Point_Grid import Tie_Point_Grid
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.8.4'
__versionalias__ = '2018-03-08_01'
__all__ = ['COREG',
'COREG_LOCAL',
'DESHIFTER',
'Tie_Point_Grid']
# check optional dependencies
try:
import pyfftw
except ImportError:
pyfftw = None
if not util.find_spec('pyfftw'):
warnings.warn('PYFFTW library is missing. However, coregistration works. But in some cases it can be much slower.')
del warnings, pyfftw
del util, warnings
# -*- coding: utf-8 -*-
__author__='Daniel Scheffler'
import warnings
import sys
......@@ -8,22 +6,12 @@ import sys
import numpy as np
from geopandas import GeoDataFrame
try:
import gdal
import osr
import ogr
except ImportError:
from osgeo import gdal
from osgeo import osr
from osgeo import ogr
# internal modules
from py_tools_ds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.geo.coord_trafo import pixelToMapYX, imYX2mapYX
from geoarray import GeoArray
__author__ = 'Daniel Scheffler'
def angle_to_north(XY):
......@@ -31,11 +19,11 @@ def angle_to_north(XY):
[origin:[0,0],pointXY:[X,Y]] in clockwise direction. Returns values between 0 and 360 degrees.
"""
XY = np.array(XY)
XYarr = XY if len(XY.shape)==2 else XY.reshape((1,2))
return np.abs(np.degrees(np.array(np.arctan2(XYarr[:,1],XYarr[:,0])-np.pi/2))%360)
XYarr = XY if len(XY.shape) == 2 else XY.reshape((1, 2))
return np.abs(np.degrees(np.array(np.arctan2(XYarr[:, 1], XYarr[:, 0]) - np.pi / 2)) % 360)
def get_true_corner_mapXY(fPath_or_geoarray, band=0, noDataVal=None, mp=1, v=0, q=0):
def get_true_corner_mapXY(fPath_or_geoarray, band=0, noDataVal=None, mp=1, v=0, q=0): # pragma: no cover
# FIXME this function is not used anymore
"""
......@@ -48,71 +36,77 @@ def get_true_corner_mapXY(fPath_or_geoarray, band=0, noDataVal=None, mp=1, v=0,
:return:
"""
warnings.warn('This function is not in use anymore. Use it on your own risk!', DeprecationWarning)
geoArr = GeoArray(fPath_or_geoarray) if not isinstance(fPath_or_geoarray,GeoArray) else fPath_or_geoarray
geoArr = GeoArray(fPath_or_geoarray) if not isinstance(fPath_or_geoarray, GeoArray) else fPath_or_geoarray
rows,cols = geoArr.shape[:2]
rows, cols = geoArr.shape[:2]
gt, prj = geoArr.geotransform, geoArr.projection
assert gt and prj, 'GeoTransform an projection must be given for calculation of LonLat corner coordinates.'
mask_1bit = np.zeros((rows,cols),dtype='uint8') # zeros -> image area later overwritten by ones
mask_1bit = np.zeros((rows, cols), dtype='uint8') # zeros -> image area later overwritten by ones
if noDataVal is None:
mask_1bit[:,:] = 1
elif noDataVal=='ambiguous':
mask_1bit[:, :] = 1
elif noDataVal == 'ambiguous':
warnings.warn("No data value could not be automatically detected. Thus the matching window used for shift "
"calculation had to be centered in the middle of the overlap area without respecting no data values. "
"To avoid this provide the correct no data values for reference and shift image via '-nodata'")
mask_1bit[:,:] = 1
"calculation had to be centered in the middle of the overlap area without respecting no data "
"values. To avoid this provide the correct no data values for reference and shift image via "
"'-nodata'")
mask_1bit[:, :] = 1
else:
band_data = geoArr[band] # TODO implement gdal_ReadAsArray_mp (reading in multiprocessing)
mask_1bit[band_data!=noDataVal] = 1
mask_1bit[band_data != noDataVal] = 1
if v: print('detected no data value',noDataVal)
if v:
print('detected no data value', noDataVal)
try:
corner_coords_YX = calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False, algorithm='shapely')
except Exception:
if v:
warnings.warn("\nCalculation of corner coordinates failed within algorithm 'shapely' (Exception: %s)."
" Using algorithm 'numpy' instead." %sys.exc_info()[1])
corner_coords_YX = calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False, algorithm='numpy') # FIXME numpy algorithm returns wrong values for S2A_OPER_MSI_L1C_TL_SGS__20160608T153121_A005024_T33UUU_B03.jp2 (Hannes)
" Using algorithm 'numpy' instead." % sys.exc_info()[1])
# FIXME numpy algorithm returns wrong values for S2A_OPER_MSI_L1C_TL_SGS__20160608T153121_A005024_T33UUU_B03.jp2
# FIXME (Hannes)
corner_coords_YX = \
calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False, algorithm='numpy')
if len(corner_coords_YX)==4: # this avoids shapely self intersection
corner_coords_YX = list(np.array(corner_coords_YX)[[0,1,3,2]]) # UL, UR, LL, LR => UL, UR, LR, LL
if len(corner_coords_YX) == 4: # this avoids shapely self intersection
corner_coords_YX = list(np.array(corner_coords_YX)[[0, 1, 3, 2]]) # UL, UR, LL, LR => UL, UR, LR, LL
# check if enough unique coordinates have been found
if not len(GeoDataFrame(corner_coords_YX).drop_duplicates().values)>=3:
if not len(GeoDataFrame(corner_coords_YX).drop_duplicates().values) >= 3:
if not q:
warnings.warn('\nThe algorithm for automatically detecting the actual image coordinates did not find '
'enough unique corners. Using outer image corner coordinates instead.')
corner_coords_YX = ((0, 0), (0, cols-1), (rows-1, 0), (rows-1, cols-1))
corner_coords_YX = ((0, 0), (0, cols - 1), (rows - 1, 0), (rows - 1, cols - 1))
# check if all points are unique
#all_coords_are_unique = len([UL, UR, LL, LR]) == len(GeoDataFrame([UL, UR, LL, LR]).drop_duplicates().values)
#UL, UR, LL, LR = (UL, UR, LL, LR) if all_coords_are_unique else ((0, 0), (0, cols-1), (rows-1, 0), (rows-1, cols-1))
# all_coords_are_unique = len([UL, UR, LL, LR]) == len(GeoDataFrame([UL, UR, LL, LR]).drop_duplicates().values)
# UL, UR, LL, LR = \
# (UL, UR, LL, LR) if all_coords_are_unique else ((0, 0), (0, cols-1), (rows-1, 0), (rows-1, cols-1))
get_mapYX = lambda YX: pixelToMapYX(list(reversed(YX)),geotransform=gt,projection=prj)[0]
def get_mapYX(YX): return pixelToMapYX(list(reversed(YX)), geotransform=gt, projection=prj)[0]
corner_pos_XY = [list(reversed(i)) for i in [get_mapYX(YX) for YX in corner_coords_YX]]
return corner_pos_XY
def get_subset_GeoTransform(gt_fullArr,subset_box_imYX):
def get_subset_GeoTransform(gt_fullArr, subset_box_imYX):
gt_subset = list(gt_fullArr[:]) # copy
gt_subset[3],gt_subset[0] = imYX2mapYX(subset_box_imYX[0],gt_fullArr)
gt_subset[3], gt_subset[0] = imYX2mapYX(subset_box_imYX[0], gt_fullArr)
return gt_subset
def get_gdalReadInputs_from_boxImYX(boxImYX):
"""Returns row_start,col_start,rows_count,cols_count and assumes boxImYX as [UL_YX,UR_YX,LR_YX,LL_YX)"""
rS, cS = boxImYX[0]
clip_sz_x = abs(boxImYX[1][1]-boxImYX[0][1]) # URx-ULx
clip_sz_y = abs(boxImYX[0][0]-boxImYX[3][0]) # ULy-LLy
return cS, rS, clip_sz_x,clip_sz_y
clip_sz_x = abs(boxImYX[1][1] - boxImYX[0][1]) # URx-ULx
clip_sz_y = abs(boxImYX[0][0] - boxImYX[3][0]) # ULy-LLy
return cS, rS, clip_sz_x, clip_sz_y
def get_GeoArrayPosition_from_boxImYX(boxImYX):
"""Returns row_start,row_end,col_start,col_end and assumes boxImYX as [UL_YX,UR_YX,LR_YX,LL_YX)"""
rS, cS = boxImYX[0] # UL
rE, cE = boxImYX[2] # LR
return rS, rE-1, cS, cE-1 # -1 because boxImYX represents outer box and includes the LR corner of LR pixel
\ No newline at end of file
return rS, rE - 1, cS, cE - 1 # -1 because boxImYX represents outer box and includes the LR corner of LR pixel
import ctypes
import multiprocessing
import os
import time
import numpy as np
import ogr
import osr
try:
import gdal
except ImportError:
from osgeo import gdal
from spectral.io import envi
# internal modules
from .utilities import get_image_tileborders, convertGdalNumpyDataType
from py_tools_ds.geo.map_info import geotransform2mapinfo
from py_tools_ds.geo.projection import EPSG2WKT
from py_tools_ds.dtypes.conversion import get_dtypeStr
def wait_if_used(path_file,lockfile, timeout=100, try_kill=0):
globs = globals()
same_gdalRefs = [k for k,v in globs.items() if isinstance(globs[k],gdal.Dataset) and globs[k].GetDescription()==path_file]
t0 = time.time()
update_same_gdalRefs = lambda sRs: [sR for sR in sRs if sR in globals() and globals()[sR] is not None]
while same_gdalRefs != [] or os.path.exists(lockfile):
if os.path.exists(lockfile): continue
if time.time()-t0 > timeout:
if try_kill:
for sR in same_gdalRefs:
globals()[sR] = None
print('had to kill %s' %sR)
else:
if os.path.exists(lockfile): os.remove(lockfile)
raise TimeoutError('The file %s is permanently used by another variable.' %path_file)
same_gdalRefs = update_same_gdalRefs(same_gdalRefs)
def write_envi(arr,outpath,gt=None,prj=None):
if gt or prj: assert gt and prj, 'gt and prj must be provided together or left out.'
meta = {'map info':geotransform2mapinfo(gt,prj),'coordinate system string':prj} if gt else None
shape = (arr.shape[0],arr.shape[1],1) if len(arr.shape)==3 else arr.shape
out = envi.create_image(outpath,metadata=meta,shape=shape,dtype=arr.dtype,interleave='bsq',ext='.bsq',
force=True) # 8bit for multiple masks in one file
out_mm = out.open_memmap(writable=True)
out_mm[:,:,0] = arr
def wfa(p,c):
try:
with open(p,'a') as of: of.write(c)
except:
pass
shared_array = None
def init_SharedArray_in_globals(dims):
rows, cols = dims
global shared_array
shared_array_base = multiprocessing.Array(ctypes.c_double, rows*cols)
shared_array = np.ctypeslib.as_array(shared_array_base.get_obj())
shared_array = shared_array.reshape(rows, cols)
def gdal_read_subset(fPath,pos,bandNr):
(rS,rE),(cS,cE) = pos
ds = gdal.Open(fPath)
data = ds.GetRasterBand(bandNr).ReadAsArray(cS,rS,cE-cS+1,rE-rS+1)
ds=None
return data
def fill_arr(argDict, def_param=shared_array):
pos = argDict.get('pos')
func = argDict.get('func2call')
args = argDict.get('func_args',[])
kwargs = argDict.get('func_kwargs',{})
(rS,rE),(cS,cE) = pos
shared_array[rS:rE+1,cS:cE+1] = func(*args,**kwargs)
def gdal_ReadAsArray_mp(fPath,bandNr,tilesize=1500):
ds = gdal.Open(fPath)
rows,cols = ds.RasterYSize, ds.RasterXSize
ds = None
init_SharedArray_in_globals((rows,cols))
tilepos = get_image_tileborders([tilesize,tilesize],(rows,cols))
fill_arr_argDicts = [{'pos':pos,'func2call':gdal_read_subset, 'func_args':(fPath,pos,bandNr)} for pos in tilepos]
with multiprocessing.Pool() as pool: pool.map(fill_arr, fill_arr_argDicts)
return shared_array
def write_shp(path_out, shapely_geom, prj=None,attrDict=None):
shapely_geom = [shapely_geom] if not isinstance(shapely_geom,list) else shapely_geom
attrDict = [attrDict] if not isinstance(attrDict,list) else attrDict
# print(len(shapely_geom))
# print(len(attrDict))
assert len(shapely_geom) == len(attrDict), "'shapely_geom' and 'attrDict' must have the same length."
assert os.path.exists(os.path.dirname(path_out)), 'Directory %s does not exist.' % os.path.dirname(path_out)
print('Writing %s ...' %path_out)
if os.path.exists(path_out): os.remove(path_out)
ds = ogr.GetDriverByName("Esri Shapefile").CreateDataSource(path_out)
if prj is not None:
prj = prj if not isinstance(prj, int) else EPSG2WKT(prj)
srs = osr.SpatialReference()
srs.ImportFromWkt(prj)
else:
srs = None
geom_type = list(set([gm.type for gm in shapely_geom]))
assert len(geom_type)==1, 'All shapely geometries must belong to the same type. Got %s.' %geom_type
layer = ds.CreateLayer('', srs, ogr.wkbPoint) if geom_type[0] == 'Point' else \
ds.CreateLayer('', srs, ogr.wkbLineString) if geom_type[0] == 'LineString' else \
ds.CreateLayer('', srs, ogr.wkbPolygon) if geom_type[0] == 'Polygon' else \