Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Daniel Scheffler
arosics
Commits
ba16de00
Commit
ba16de00
authored
Sep 19, 2017
by
Daniel Scheffler
Browse files
PEP8 editing. Added linting.
parent
82629ef7
Changes
11
Expand all
Hide whitespace changes
Inline
Side-by-side
Makefile
View file @
ba16de00
...
...
@@ -52,7 +52,9 @@ clean-test: ## remove test and coverage artifacts
rm
-fr
nosetests.xml
lint
:
##
check style with flake8
flake8 arosics tests
flake8
--max-line-length
=
120 arosics tests
>
./tests/linting/flake8.log
pycodestyle arosics
--exclude
=
"*.ipynb,*.ipynb*"
--max-line-length
=
120
>
./tests/linting/pycodestyle.log
-
pydocstyle arosics
>
./tests/linting/pydocstyle.log
test
:
##
run tests quickly with the default Python
python setup.py
test
...
...
arosics/CoReg.py
View file @
ba16de00
...
...
@@ -336,7 +336,7 @@ class COREG(object):
'COREG._set_outpathes() expects two file pathes (string) or two instances of the '
\
'GeoArray class. Received %s and %s.'
%
(
type
(
im_ref
),
type
(
im_tgt
))
get_baseN
=
lambda
path
:
os
.
path
.
splitext
(
os
.
path
.
basename
(
path
))[
0
]
def
get_baseN
(
path
):
return
os
.
path
.
splitext
(
os
.
path
.
basename
(
path
))[
0
]
# get input pathes
path_im_ref
=
im_ref
.
filePath
if
isinstance
(
im_ref
,
GeoArray
)
else
im_ref
...
...
@@ -450,7 +450,8 @@ class COREG(object):
assert
self
.
overlap_poly
,
'Please calculate the overlap polygon first.'
try
:
import
folium
,
geojson
import
folium
import
geojson
except
ImportError
:
folium
,
geojson
=
None
,
None
if
not
folium
or
not
geojson
:
...
...
@@ -498,16 +499,21 @@ class COREG(object):
otherWin_corr
=
self
.
_get_deshifted_otherWin
()
xmin
,
xmax
,
ymin
,
ymax
=
self
.
matchBox
.
boundsMap
get_vmin
=
lambda
arr
:
np
.
percentile
(
arr
,
2
)
get_vmax
=
lambda
arr
:
np
.
percentile
(
arr
,
98
)
rescale
=
lambda
arr
:
rescale_intensity
(
arr
,
in_range
=
(
get_vmin
(
arr
),
get_vmax
(
arr
)))
get_arr
=
lambda
geoArr
:
rescale
(
np
.
ma
.
masked_equal
(
geoArr
[:],
geoArr
.
nodata
))
get_hv_image
=
lambda
geoArr
:
hv
.
Image
(
get_arr
(
geoArr
),
bounds
=
(
xmin
,
ymin
,
xmax
,
ymax
))(
style
=
{
'cmap'
:
'gray'
,
'vmin'
:
get_vmin
(
geoArr
[:]),
'vmax'
:
get_vmax
(
geoArr
[:]),
# does not work
'interpolation'
:
'none'
},
plot
=
{
'fig_inches'
:
figsize
,
'show_grid'
:
True
})
# plot={'fig_size':100, 'show_grid':True})
def
get_vmin
(
arr
):
return
np
.
percentile
(
arr
,
2
)
def
get_vmax
(
arr
):
return
np
.
percentile
(
arr
,
98
)
def
rescale
(
arr
):
return
rescale_intensity
(
arr
,
in_range
=
(
get_vmin
(
arr
),
get_vmax
(
arr
)))
def
get_arr
(
geoArr
):
return
rescale
(
np
.
ma
.
masked_equal
(
geoArr
[:],
geoArr
.
nodata
))
def
get_hv_image
(
geoArr
):
return
hv
.
Image
(
get_arr
(
geoArr
),
bounds
=
(
xmin
,
ymin
,
xmax
,
ymax
))(
style
=
{
'cmap'
:
'gray'
,
'vmin'
:
get_vmin
(
geoArr
[:]),
'vmax'
:
get_vmax
(
geoArr
[:]),
# does not work
'interpolation'
:
'none'
},
plot
=
{
'fig_inches'
:
figsize
,
'show_grid'
:
True
})
# plot={'fig_size':100, 'show_grid':True})
imgs_orig
=
{
1
:
get_hv_image
(
self
.
matchWin
),
2
:
get_hv_image
(
self
.
otherWin
)}
imgs_corr
=
{
1
:
get_hv_image
(
self
.
matchWin
),
2
:
get_hv_image
(
otherWin_corr
)}
...
...
@@ -521,7 +527,7 @@ class COREG(object):
hmap_orig
=
hv
.
HoloMap
(
imgs_orig
,
kdims
=
[
'image'
])
hmap_corr
=
hv
.
HoloMap
(
imgs_corr
,
kdims
=
[
'image'
])
hmap
=
hv
.
HoloMap
(
imgs
,
kdims
=
[
'image'
]).
collate
().
cols
(
1
)
# display
ing
this results in a too small figure
hv
.
HoloMap
(
imgs
,
kdims
=
[
'image'
]).
collate
().
cols
(
1
)
# display this results in a too small figure
# hmap = hv.HoloMap(imgs_corr, kdims=['image']) + hv.HoloMap(imgs_corr, kdims=['image'])
# Construct a HoloMap by defining the sampling on the Dimension
...
...
@@ -709,7 +715,7 @@ class COREG(object):
if
self
.
success
is
not
False
:
# check result -> ProgrammingError if not fulfilled
within_equal
=
lambda
inner
,
outer
:
inner
.
within
(
outer
)
or
inner
.
equals
(
outer
)
def
within_equal
(
inner
,
outer
):
return
inner
.
within
(
outer
)
or
inner
.
equals
(
outer
)
assert
within_equal
(
matchBox
.
mapPoly
,
otherBox
.
mapPoly
)
assert
within_equal
(
otherBox
.
mapPoly
,
overlapWin
.
mapPoly
)
...
...
@@ -853,8 +859,8 @@ class COREG(object):
# FIXME CoRegPoints_grid.WIN_SZ is taken from self.matchBox.imDimsYX but this is not updated
center_YX
=
np
.
array
(
im0
.
shape
)
/
2
xmin
,
xmax
,
ymin
,
ymax
=
int
(
center_YX
[
1
]
-
wsYX
[
1
]
/
2
),
int
(
center_YX
[
1
]
+
wsYX
[
1
]
/
2
)
,
\
int
(
center_YX
[
0
]
-
wsYX
[
0
]
/
2
),
int
(
center_YX
[
0
]
+
wsYX
[
0
]
/
2
)
xmin
,
xmax
=
int
(
center_YX
[
1
]
-
wsYX
[
1
]
/
2
),
int
(
center_YX
[
1
]
+
wsYX
[
1
]
/
2
)
ymin
,
ymax
=
int
(
center_YX
[
0
]
-
wsYX
[
0
]
/
2
),
int
(
center_YX
[
0
]
+
wsYX
[
0
]
/
2
)
in_arr0
=
im0
[
ymin
:
ymax
,
xmin
:
xmax
].
astype
(
precision
)
in_arr1
=
im1
[
ymin
:
ymax
,
xmin
:
xmax
].
astype
(
precision
)
...
...
@@ -899,7 +905,8 @@ class COREG(object):
ifft_arr
=
pyfftw
.
FFTW
(
temp
,
np
.
empty_like
(
temp
),
axes
=
(
0
,
1
),
direction
=
'FFTW_BACKWARD'
)()
else
:
ifft_arr
=
np
.
fft
.
ifft2
(
temp
)
if
self
.
v
:
print
(
'backward FFTW: %.2fs'
%
(
time
.
time
()
-
time0
))
if
self
.
v
:
print
(
'backward FFTW: %.2fs'
%
(
time
.
time
()
-
time0
))
cps
=
np
.
abs
(
ifft_arr
)
# scps = shifted cps => shift the zero-frequency component to the center of the spectrum
...
...
@@ -936,8 +943,10 @@ class COREG(object):
@
staticmethod
def
_clip_image
(
im
,
center_YX
,
winSzYX
):
# TODO this is also implemented in GeoArray
get_bounds
=
lambda
YX
,
wsY
,
wsX
:
(
int
(
YX
[
1
]
-
(
wsX
/
2
)),
int
(
YX
[
1
]
+
(
wsX
/
2
)),
int
(
YX
[
0
]
-
(
wsY
/
2
)),
int
(
YX
[
0
]
+
(
wsY
/
2
)))
def
get_bounds
(
YX
,
wsY
,
wsX
):
return
int
(
YX
[
1
]
-
(
wsX
/
2
)),
int
(
YX
[
1
]
+
(
wsX
/
2
)),
int
(
YX
[
0
]
-
(
wsY
/
2
)),
int
(
YX
[
0
]
+
(
wsY
/
2
))
wsY
,
wsX
=
winSzYX
xmin
,
xmax
,
ymin
,
ymax
=
get_bounds
(
center_YX
,
wsY
,
wsX
)
return
im
[
ymin
:
ymax
,
xmin
:
xmax
]
...
...
@@ -1451,7 +1460,10 @@ class COREG(object):
if
not
self
.
q
:
print
(
'Resampling shift image in order to align coordinate grid of shift image to reference image. '
)
is_alignable
=
lambda
gsd1
,
gsd2
:
max
(
gsd1
,
gsd2
)
%
min
(
gsd1
,
gsd2
)
==
0
# checks if pixel sizes are divisible
def
is_alignable
(
gsd1
,
gsd2
):
"""Check if pixel sizes are divisible"""
return
max
(
gsd1
,
gsd2
)
%
min
(
gsd1
,
gsd2
)
==
0
if
not
is_alignable
(
self
.
ref
.
xgsd
,
xgsd
)
or
not
is_alignable
(
self
.
ref
.
ygsd
,
ygsd
)
and
not
self
.
q
:
print
(
"
\n
WARNING: The coordinate grids of the reference image and the image to be shifted cannot be "
"aligned because their pixel sizes are not exact multiples of each other (ref [X/Y]: "
...
...
arosics/CoReg_local.py
View file @
ba16de00
...
...
@@ -379,7 +379,7 @@ class COREG_LOCAL(object):
if
exclude_fillVals
else
self
.
CoRegPoints_table
.
loc
[:,
attr2include
]
# get LonLat coordinates for all points
get_LonLat
=
lambda
X
,
Y
:
transform_any_prj
(
self
.
im2shift
.
projection
,
4326
,
X
,
Y
)
def
get_LonLat
(
X
,
Y
):
return
transform_any_prj
(
self
.
im2shift
.
projection
,
4326
,
X
,
Y
)
GDF
[
'LonLat'
]
=
list
(
GDF
[
'geometry'
].
map
(
lambda
geom
:
get_LonLat
(
*
tuple
(
np
.
array
(
geom
.
coords
.
xy
)[:,
0
]))))
# get colors for all points
...
...
@@ -499,7 +499,7 @@ class COREG_LOCAL(object):
# create map
map_osm
=
folium
.
Map
(
location
=
[
center_lat
,
center_lon
])
# ,zoom_start=3)
import
matplotlib
#
import matplotlib
plugins
.
ImageOverlay
(
colormap
=
lambda
x
:
(
1
,
0
,
0
,
x
),
# TODO a colormap must be given
# colormap=matplotlib.cm.gray, # does not work
...
...
arosics/DeShifter.py
View file @
ba16de00
This diff is collapsed.
Click to expand it.
arosics/Tie_Point_Grid.py
View file @
ba16de00
...
...
@@ -244,6 +244,27 @@ class Tie_Point_Grid(object):
return
[
pointID
]
+
CR_res
def
_get_coreg_kwargs
(
self
,
pID
,
wp
):
return
dict
(
pointID
=
pID
,
fftw_works
=
self
.
COREG_obj
.
fftw_works
,
wp
=
wp
,
ws
=
self
.
COREG_obj
.
win_size_XY
,
resamp_alg_calc
=
self
.
rspAlg_calc
,
footprint_poly_ref
=
self
.
COREG_obj
.
ref
.
poly
,
footprint_poly_tgt
=
self
.
COREG_obj
.
shift
.
poly
,
r_b4match
=
self
.
ref
.
band4match
+
1
,
# band4match is internally saved as index, starting from 0
s_b4match
=
self
.
shift
.
band4match
+
1
,
# band4match is internally saved as index, starting from 0
max_iter
=
self
.
COREG_obj
.
max_iter
,
max_shift
=
self
.
COREG_obj
.
max_shift
,
nodata
=
(
self
.
COREG_obj
.
ref
.
nodata
,
self
.
COREG_obj
.
shift
.
nodata
),
force_quadratic_win
=
self
.
COREG_obj
.
force_quadratic_win
,
binary_ws
=
self
.
COREG_obj
.
bin_ws
,
v
=
False
,
# otherwise this would lead to massive console output
q
=
True
,
# otherwise this would lead to massive console output
ignore_errors
=
True
)
def
get_CoRegPoints_table
(
self
):
assert
self
.
XY_points
is
not
None
and
self
.
XY_mapPoints
is
not
None
...
...
@@ -297,26 +318,7 @@ class Tie_Point_Grid(object):
self
.
shift
.
cache_array_subset
([
self
.
COREG_obj
.
shift
.
band4match
])
# get all variations of kwargs for coregistration
get_coreg_kwargs
=
lambda
pID
,
wp
:
dict
(
pointID
=
pID
,
fftw_works
=
self
.
COREG_obj
.
fftw_works
,
wp
=
wp
,
ws
=
self
.
COREG_obj
.
win_size_XY
,
resamp_alg_calc
=
self
.
rspAlg_calc
,
footprint_poly_ref
=
self
.
COREG_obj
.
ref
.
poly
,
footprint_poly_tgt
=
self
.
COREG_obj
.
shift
.
poly
,
r_b4match
=
self
.
ref
.
band4match
+
1
,
# band4match is internally saved as index, starting from 0
s_b4match
=
self
.
shift
.
band4match
+
1
,
# band4match is internally saved as index, starting from 0
max_iter
=
self
.
COREG_obj
.
max_iter
,
max_shift
=
self
.
COREG_obj
.
max_shift
,
nodata
=
(
self
.
COREG_obj
.
ref
.
nodata
,
self
.
COREG_obj
.
shift
.
nodata
),
force_quadratic_win
=
self
.
COREG_obj
.
force_quadratic_win
,
binary_ws
=
self
.
COREG_obj
.
bin_ws
,
v
=
False
,
# otherwise this would lead to massive console output
q
=
True
,
# otherwise this would lead to massive console output
ignore_errors
=
True
)
list_coreg_kwargs
=
(
get_coreg_kwargs
(
i
,
self
.
XY_mapPoints
[
i
])
for
i
in
GDF
.
index
)
# generator
list_coreg_kwargs
=
(
self
.
_get_coreg_kwargs
(
i
,
self
.
XY_mapPoints
[
i
])
for
i
in
GDF
.
index
)
# generator
# run co-registration for whole grid
if
self
.
CPUs
is
None
or
self
.
CPUs
>
1
:
...
...
@@ -861,7 +863,7 @@ class Tie_Point_Refiner(object):
if
level
>
2
:
# exclude previous outliers
ransacInGDF
=
self
.
GDF
[
~
self
.
GDF
[
self
.
new_cols
].
any
(
axis
=
1
)].
copy
()
\
if
self
.
rs_exclude_previous_outliers
else
self
.
GDF
if
self
.
rs_exclude_previous_outliers
else
self
.
GDF
if
len
(
ransacInGDF
)
>
4
:
# running RANSAC with less than four tie points makes no sense
...
...
arosics/__init__.py
View file @
ba16de00
...
...
@@ -13,6 +13,10 @@ __author__ = """Daniel Scheffler"""
__email__
=
'daniel.scheffler@gfz-potsdam.de'
__version__
=
'0.4.33'
__versionalias__
=
'2017-09-13_02'
__all__
=
[
'COREG'
,
'COREG_LOCAL'
,
'DESHIFTER'
,
'Tie_Point_Grid'
]
# check optional dependencies
...
...
arosics/geometry.py
View file @
ba16de00
# -*- coding: utf-8 -*-
__author__
=
'Daniel Scheffler'
import
warnings
import
sys
# custom
import
numpy
as
np
import
numpy
as
np
from
geopandas
import
GeoDataFrame
try
:
import
gdal
import
osr
import
ogr
except
ImportError
:
from
osgeo
import
gdal
from
osgeo
import
osr
from
osgeo
import
ogr
# internal modules
from
py_tools_ds.geo.coord_calc
import
calc_FullDataset_corner_positions
from
py_tools_ds.geo.coord_trafo
import
pixelToMapYX
,
imYX2mapYX
from
geoarray
import
GeoArray
__author__
=
'Daniel Scheffler'
def
angle_to_north
(
XY
):
"""Calculates the angle between the lines [origin:[0,0],north:[0,1]] and
[origin:[0,0],pointXY:[X,Y]] in clockwise direction. Returns values between 0 and 360 degrees.
"""
XY
=
np
.
array
(
XY
)
XYarr
=
XY
if
len
(
XY
.
shape
)
==
2
else
XY
.
reshape
((
1
,
2
))
return
np
.
abs
(
np
.
degrees
(
np
.
array
(
np
.
arctan2
(
XYarr
[:,
1
],
XYarr
[:,
0
])
-
np
.
pi
/
2
))
%
360
)
XY
=
np
.
array
(
XY
)
XYarr
=
XY
if
len
(
XY
.
shape
)
==
2
else
XY
.
reshape
((
1
,
2
))
return
np
.
abs
(
np
.
degrees
(
np
.
array
(
np
.
arctan2
(
XYarr
[:,
1
],
XYarr
[:,
0
])
-
np
.
pi
/
2
))
%
360
)
def
get_true_corner_mapXY
(
fPath_or_geoarray
,
band
=
0
,
noDataVal
=
None
,
mp
=
1
,
v
=
0
,
q
=
0
):
...
...
@@ -48,71 +36,77 @@ def get_true_corner_mapXY(fPath_or_geoarray, band=0, noDataVal=None, mp=1, v=0,
:return:
"""
warnings
.
warn
(
'This function is not in use anymore. Use it on your own risk!'
,
DeprecationWarning
)
geoArr
=
GeoArray
(
fPath_or_geoarray
)
if
not
isinstance
(
fPath_or_geoarray
,
GeoArray
)
else
fPath_or_geoarray
geoArr
=
GeoArray
(
fPath_or_geoarray
)
if
not
isinstance
(
fPath_or_geoarray
,
GeoArray
)
else
fPath_or_geoarray
rows
,
cols
=
geoArr
.
shape
[:
2
]
gt
,
prj
=
geoArr
.
geotransform
,
geoArr
.
projection
rows
,
cols
=
geoArr
.
shape
[:
2
]
gt
,
prj
=
geoArr
.
geotransform
,
geoArr
.
projection
assert
gt
and
prj
,
'GeoTransform an projection must be given for calculation of LonLat corner coordinates.'
mask_1bit
=
np
.
zeros
((
rows
,
cols
),
dtype
=
'uint8'
)
# zeros -> image area later overwritten by ones
mask_1bit
=
np
.
zeros
((
rows
,
cols
),
dtype
=
'uint8'
)
# zeros -> image area later overwritten by ones
if
noDataVal
is
None
:
mask_1bit
[:,:]
=
1
elif
noDataVal
==
'ambiguous'
:
mask_1bit
[:,
:]
=
1
elif
noDataVal
==
'ambiguous'
:
warnings
.
warn
(
"No data value could not be automatically detected. Thus the matching window used for shift "
"calculation had to be centered in the middle of the overlap area without respecting no data values. "
"To avoid this provide the correct no data values for reference and shift image via '-nodata'"
)
mask_1bit
[:,:]
=
1
"calculation had to be centered in the middle of the overlap area without respecting no data "
"values. To avoid this provide the correct no data values for reference and shift image via "
"'-nodata'"
)
mask_1bit
[:,
:]
=
1
else
:
band_data
=
geoArr
[
band
]
# TODO implement gdal_ReadAsArray_mp (reading in multiprocessing)
mask_1bit
[
band_data
!=
noDataVal
]
=
1
band_data
=
geoArr
[
band
]
# TODO implement gdal_ReadAsArray_mp (reading in multiprocessing)
mask_1bit
[
band_data
!=
noDataVal
]
=
1
if
v
:
print
(
'detected no data value'
,
noDataVal
)
if
v
:
print
(
'detected no data value'
,
noDataVal
)
try
:
corner_coords_YX
=
calc_FullDataset_corner_positions
(
mask_1bit
,
assert_four_corners
=
False
,
algorithm
=
'shapely'
)
except
Exception
:
if
v
:
warnings
.
warn
(
"
\n
Calculation of corner coordinates failed within algorithm 'shapely' (Exception: %s)."
" Using algorithm 'numpy' instead."
%
sys
.
exc_info
()[
1
])
corner_coords_YX
=
calc_FullDataset_corner_positions
(
mask_1bit
,
assert_four_corners
=
False
,
algorithm
=
'numpy'
)
# FIXME numpy algorithm returns wrong values for S2A_OPER_MSI_L1C_TL_SGS__20160608T153121_A005024_T33UUU_B03.jp2 (Hannes)
" Using algorithm 'numpy' instead."
%
sys
.
exc_info
()[
1
])
# FIXME numpy algorithm returns wrong values for S2A_OPER_MSI_L1C_TL_SGS__20160608T153121_A005024_T33UUU_B03.jp2
# FIXME (Hannes)
corner_coords_YX
=
\
calc_FullDataset_corner_positions
(
mask_1bit
,
assert_four_corners
=
False
,
algorithm
=
'numpy'
)
if
len
(
corner_coords_YX
)
==
4
:
# this avoids shapely self intersection
corner_coords_YX
=
list
(
np
.
array
(
corner_coords_YX
)[[
0
,
1
,
3
,
2
]])
# UL, UR, LL, LR => UL, UR, LR, LL
if
len
(
corner_coords_YX
)
==
4
:
# this avoids shapely self intersection
corner_coords_YX
=
list
(
np
.
array
(
corner_coords_YX
)[[
0
,
1
,
3
,
2
]])
# UL, UR, LL, LR => UL, UR, LR, LL
# check if enough unique coordinates have been found
if
not
len
(
GeoDataFrame
(
corner_coords_YX
).
drop_duplicates
().
values
)
>=
3
:
if
not
len
(
GeoDataFrame
(
corner_coords_YX
).
drop_duplicates
().
values
)
>=
3
:
if
not
q
:
warnings
.
warn
(
'
\n
The algorithm for automatically detecting the actual image coordinates did not find '
'enough unique corners. Using outer image corner coordinates instead.'
)
corner_coords_YX
=
((
0
,
0
),
(
0
,
cols
-
1
),
(
rows
-
1
,
0
),
(
rows
-
1
,
cols
-
1
))
corner_coords_YX
=
((
0
,
0
),
(
0
,
cols
-
1
),
(
rows
-
1
,
0
),
(
rows
-
1
,
cols
-
1
))
# check if all points are unique
#all_coords_are_unique = len([UL, UR, LL, LR]) == len(GeoDataFrame([UL, UR, LL, LR]).drop_duplicates().values)
#UL, UR, LL, LR = (UL, UR, LL, LR) if all_coords_are_unique else ((0, 0), (0, cols-1), (rows-1, 0), (rows-1, cols-1))
# all_coords_are_unique = len([UL, UR, LL, LR]) == len(GeoDataFrame([UL, UR, LL, LR]).drop_duplicates().values)
# UL, UR, LL, LR = \
# (UL, UR, LL, LR) if all_coords_are_unique else ((0, 0), (0, cols-1), (rows-1, 0), (rows-1, cols-1))
get_mapYX
=
lambda
YX
:
pixelToMapYX
(
list
(
reversed
(
YX
)),
geotransform
=
gt
,
projection
=
prj
)[
0
]
def
get_mapYX
(
YX
):
return
pixelToMapYX
(
list
(
reversed
(
YX
)),
geotransform
=
gt
,
projection
=
prj
)[
0
]
corner_pos_XY
=
[
list
(
reversed
(
i
))
for
i
in
[
get_mapYX
(
YX
)
for
YX
in
corner_coords_YX
]]
return
corner_pos_XY
def
get_subset_GeoTransform
(
gt_fullArr
,
subset_box_imYX
):
gt_subset
=
list
(
gt_fullArr
[:])
# copy
gt_subset
[
3
],
gt_subset
[
0
]
=
imYX2mapYX
(
subset_box_imYX
[
0
],
gt_fullArr
)
def
get_subset_GeoTransform
(
gt_fullArr
,
subset_box_imYX
):
gt_subset
=
list
(
gt_fullArr
[:])
# copy
gt_subset
[
3
],
gt_subset
[
0
]
=
imYX2mapYX
(
subset_box_imYX
[
0
],
gt_fullArr
)
return
gt_subset
def
get_gdalReadInputs_from_boxImYX
(
boxImYX
):
"""Returns row_start,col_start,rows_count,cols_count and assumes boxImYX as [UL_YX,UR_YX,LR_YX,LL_YX)"""
rS
,
cS
=
boxImYX
[
0
]
clip_sz_x
=
abs
(
boxImYX
[
1
][
1
]
-
boxImYX
[
0
][
1
])
# URx-ULx
clip_sz_y
=
abs
(
boxImYX
[
0
][
0
]
-
boxImYX
[
3
][
0
])
# ULy-LLy
return
cS
,
rS
,
clip_sz_x
,
clip_sz_y
clip_sz_x
=
abs
(
boxImYX
[
1
][
1
]
-
boxImYX
[
0
][
1
])
# URx-ULx
clip_sz_y
=
abs
(
boxImYX
[
0
][
0
]
-
boxImYX
[
3
][
0
])
# ULy-LLy
return
cS
,
rS
,
clip_sz_x
,
clip_sz_y
def
get_GeoArrayPosition_from_boxImYX
(
boxImYX
):
"""Returns row_start,row_end,col_start,col_end and assumes boxImYX as [UL_YX,UR_YX,LR_YX,LL_YX)"""
rS
,
cS
=
boxImYX
[
0
]
# UL
rE
,
cE
=
boxImYX
[
2
]
# LR
return
rS
,
rE
-
1
,
cS
,
cE
-
1
# -1 because boxImYX represents outer box and includes the LR corner of LR pixel
\ No newline at end of file
rS
,
cS
=
boxImYX
[
0
]
# UL
rE
,
cE
=
boxImYX
[
2
]
# LR
return
rS
,
rE
-
1
,
cS
,
cE
-
1
# -1 because boxImYX represents outer box and includes the LR corner of LR pixel
arosics/io.py
View file @
ba16de00
...
...
@@ -21,94 +21,109 @@ from py_tools_ds.geo.projection import EPSG2WKT
from
py_tools_ds.dtypes.conversion
import
get_dtypeStr
def
wait_if_used
(
path_file
,
lockfile
,
timeout
=
100
,
try_kill
=
0
):
def
wait_if_used
(
path_file
,
lockfile
,
timeout
=
100
,
try_kill
=
0
):
globs
=
globals
()
same_gdalRefs
=
[
k
for
k
,
v
in
globs
.
items
()
if
isinstance
(
globs
[
k
],
gdal
.
Dataset
)
and
globs
[
k
].
GetDescription
()
==
path_file
]
same_gdalRefs
=
[
k
for
k
,
v
in
globs
.
items
()
if
isinstance
(
globs
[
k
],
gdal
.
Dataset
)
and
globs
[
k
].
GetDescription
()
==
path_file
]
t0
=
time
.
time
()
update_same_gdalRefs
=
lambda
sRs
:
[
sR
for
sR
in
sRs
if
sR
in
globals
()
and
globals
()[
sR
]
is
not
None
]
def
update_same_gdalRefs
(
sRs
):
return
[
sR
for
sR
in
sRs
if
sR
in
globals
()
and
globals
()[
sR
]
is
not
None
]
while
same_gdalRefs
!=
[]
or
os
.
path
.
exists
(
lockfile
):
if
os
.
path
.
exists
(
lockfile
):
continue
if
time
.
time
()
-
t0
>
timeout
:
if
os
.
path
.
exists
(
lockfile
):
continue
if
time
.
time
()
-
t0
>
timeout
:
if
try_kill
:
for
sR
in
same_gdalRefs
:
globals
()[
sR
]
=
None
print
(
'had to kill %s'
%
sR
)
print
(
'had to kill %s'
%
sR
)
else
:
if
os
.
path
.
exists
(
lockfile
):
os
.
remove
(
lockfile
)
raise
TimeoutError
(
'The file %s is permanently used by another variable.'
%
path_file
)
if
os
.
path
.
exists
(
lockfile
):
os
.
remove
(
lockfile
)
raise
TimeoutError
(
'The file %s is permanently used by another variable.'
%
path_file
)
same_gdalRefs
=
update_same_gdalRefs
(
same_gdalRefs
)
def
write_envi
(
arr
,
outpath
,
gt
=
None
,
prj
=
None
):
if
gt
or
prj
:
assert
gt
and
prj
,
'gt and prj must be provided together or left out.'
meta
=
{
'map info'
:
geotransform2mapinfo
(
gt
,
prj
),
'coordinate system string'
:
prj
}
if
gt
else
None
shape
=
(
arr
.
shape
[
0
],
arr
.
shape
[
1
],
1
)
if
len
(
arr
.
shape
)
==
3
else
arr
.
shape
out
=
envi
.
create_image
(
outpath
,
metadata
=
meta
,
shape
=
shape
,
dtype
=
arr
.
dtype
,
interleave
=
'bsq'
,
ext
=
'.bsq'
,
force
=
True
)
# 8bit for multiple masks in one file
def
write_envi
(
arr
,
outpath
,
gt
=
None
,
prj
=
None
):
if
gt
or
prj
:
assert
gt
and
prj
,
'gt and prj must be provided together or left out.'
meta
=
{
'map info'
:
geotransform2mapinfo
(
gt
,
prj
),
'coordinate system string'
:
prj
}
if
gt
else
None
shape
=
(
arr
.
shape
[
0
],
arr
.
shape
[
1
],
1
)
if
len
(
arr
.
shape
)
==
3
else
arr
.
shape
out
=
envi
.
create_image
(
outpath
,
metadata
=
meta
,
shape
=
shape
,
dtype
=
arr
.
dtype
,
interleave
=
'bsq'
,
ext
=
'.bsq'
,
force
=
True
)
# 8bit for multiple masks in one file
out_mm
=
out
.
open_memmap
(
writable
=
True
)
out_mm
[:,:,
0
]
=
arr
out_mm
[:,
:,
0
]
=
arr
def
wfa
(
p
,
c
):
def
wfa
(
p
,
c
):
try
:
with
open
(
p
,
'a'
)
as
of
:
of
.
write
(
c
)
with
open
(
p
,
'a'
)
as
of
:
of
.
write
(
c
)
except
:
pass
shared_array
=
None
def
init_SharedArray_in_globals
(
dims
):
rows
,
cols
=
dims
global
shared_array
shared_array_base
=
multiprocessing
.
Array
(
ctypes
.
c_double
,
rows
*
cols
)
shared_array
=
np
.
ctypeslib
.
as_array
(
shared_array_base
.
get_obj
())
shared_array
=
shared_array
.
reshape
(
rows
,
cols
)
shared_array_base
=
multiprocessing
.
Array
(
ctypes
.
c_double
,
rows
*
cols
)
shared_array
=
np
.
ctypeslib
.
as_array
(
shared_array_base
.
get_obj
())
shared_array
=
shared_array
.
reshape
(
rows
,
cols
)
def
gdal_read_subset
(
fPath
,
pos
,
bandNr
):
(
rS
,
rE
),(
cS
,
cE
)
=
pos
def
gdal_read_subset
(
fPath
,
pos
,
bandNr
):
(
rS
,
rE
),
(
cS
,
cE
)
=
pos
ds
=
gdal
.
Open
(
fPath
)
data
=
ds
.
GetRasterBand
(
bandNr
).
ReadAsArray
(
cS
,
rS
,
cE
-
cS
+
1
,
rE
-
rS
+
1
)
d
s
=
None
data
=
ds
.
GetRasterBand
(
bandNr
).
ReadAsArray
(
cS
,
rS
,
cE
-
cS
+
1
,
rE
-
rS
+
1
)
d
el
ds
return
data
def
fill_arr
(
argDict
,
def_param
=
shared_array
):
pos
=
argDict
.
get
(
'pos'
)
func
=
argDict
.
get
(
'func2call'
)
args
=
argDict
.
get
(
'func_args'
,[])
kwargs
=
argDict
.
get
(
'func_kwargs'
,{})
pos
=
argDict
.
get
(
'pos'
)
func
=
argDict
.
get
(
'func2call'
)
args
=
argDict
.
get
(
'func_args'
,
[])
kwargs
=
argDict
.
get
(
'func_kwargs'
,
{})
(
rS
,
rE
),(
cS
,
cE
)
=
pos
shared_array
[
rS
:
rE
+
1
,
cS
:
cE
+
1
]
=
func
(
*
args
,
**
kwargs
)
(
rS
,
rE
),
(
cS
,
cE
)
=
pos
shared_array
[
rS
:
rE
+
1
,
cS
:
cE
+
1
]
=
func
(
*
args
,
**
kwargs
)
def
gdal_ReadAsArray_mp
(
fPath
,
bandNr
,
tilesize
=
1500
):
def
gdal_ReadAsArray_mp
(
fPath
,
bandNr
,
tilesize
=
1500
):
ds
=
gdal
.
Open
(
fPath
)
rows
,
cols
=
ds
.
RasterYSize
,
ds
.
RasterXSize
d
s
=
None
rows
,
cols
=
ds
.
RasterYSize
,
ds
.
RasterXSize
d
el
ds
init_SharedArray_in_globals
((
rows
,
cols
))
init_SharedArray_in_globals
((
rows
,
cols
))
tilepos
=
get_image_tileborders
([
tilesize
,
tilesize
],(
rows
,
cols
))
fill_arr_argDicts
=
[{
'pos'
:
pos
,
'func2call'
:
gdal_read_subset
,
'func_args'
:(
fPath
,
pos
,
bandNr
)}
for
pos
in
tilepos
]
tilepos
=
get_image_tileborders
([
tilesize
,
tilesize
],
(
rows
,
cols
))
fill_arr_argDicts
=
[{
'pos'
:
pos
,
'func2call'
:
gdal_read_subset
,
'func_args'
:
(
fPath
,
pos
,
bandNr
)}
for
pos
in
tilepos
]
with
multiprocessing
.
Pool
()
as
pool
:
pool
.
map
(
fill_arr
,
fill_arr_argDicts
)
with
multiprocessing
.
Pool
()
as
pool
:
pool
.
map
(
fill_arr
,
fill_arr_argDicts
)
return
shared_array
def
write_shp
(
path_out
,
shapely_geom
,
prj
=
None
,
attrDict
=
None
):
shapely_geom
=
[
shapely_geom
]
if
not
isinstance
(
shapely_geom
,
list
)
else
shapely_geom
attrDict
=
[
attrDict
]
if
not
isinstance
(
attrDict
,
list
)
else
attrDict
def
write_shp
(
path_out
,
shapely_geom
,
prj
=
None
,
attrDict
=
None
):
shapely_geom
=
[
shapely_geom
]
if
not
isinstance
(
shapely_geom
,
list
)
else
shapely_geom
attrDict
=
[
attrDict
]
if
not
isinstance
(
attrDict
,
list
)
else
attrDict
# print(len(shapely_geom))
# print(len(attrDict))
assert
len
(
shapely_geom
)
==
len
(
attrDict
),
"'shapely_geom' and 'attrDict' must have the same length."
assert
os
.
path
.
exists
(
os
.
path
.
dirname
(
path_out
)),
'Directory %s does not exist.'
%
os
.
path
.
dirname
(
path_out
)
print
(
'Writing %s ...'
%
path_out
)
if
os
.
path
.
exists
(
path_out
):
os
.
remove
(
path_out
)
print
(
'Writing %s ...'
%
path_out
)
if
os
.
path
.
exists
(
path_out
):
os
.
remove
(
path_out
)
ds
=
ogr
.
GetDriverByName
(
"Esri Shapefile"
).
CreateDataSource
(
path_out
)
if
prj
is
not
None
:
...
...
@@ -119,19 +134,22 @@ def write_shp(path_out, shapely_geom, prj=None,attrDict=None):
srs
=
None
geom_type
=
list
(
set
([
gm
.
type
for
gm
in
shapely_geom
]))
assert
len
(
geom_type
)
==
1
,
'All shapely geometries must belong to the same type. Got %s.'
%
geom_type
assert
len
(
geom_type
)
==
1
,
'All shapely geometries must belong to the same type. Got %s.'
%
geom_type
layer
=
ds
.
CreateLayer
(
''
,
srs
,
ogr
.
wkbPoint
)
if
geom_type
[
0
]
==
'Point'
else
\
ds
.
CreateLayer
(
''
,
srs
,
ogr
.
wkb
LineString
)
if
geom_type
[
0
]
==
'
LineString
'
else
\
ds
.
CreateLayer
(
''
,
srs
,
ogr
.
wkb
Polygon
)
if
geom_type
[
0
]
==
'
Polygon'
else
\
None
# FIXME
layer
=
\
ds
.
CreateLayer
(
''
,
srs
,
ogr
.
wkb
Point
)
if
geom_type
[
0
]
==
'
Point
'
else
\
ds
.
CreateLayer
(
''
,
srs
,
ogr
.
wkb
LineString
)
if
geom_type
[
0
]
==
'
LineString'
else
\
ds
.
CreateLayer
(
''
,
srs
,
ogr
.
wkbPolygon
)
if
geom_type
[
0
]
==
'Polygon'
else
None
# FIXME
if
isinstance
(
attrDict
[
0
],
dict
):
if
isinstance
(
attrDict
[
0
],
dict
):
for
attr
in
attrDict
[
0
].
keys
():
assert
len
(
attr
)
<=
10
,
"ogr does not support fieldnames longer than 10 digits. '%s' is too long"
%
attr
DTypeStr
=
get_dtypeStr
(
attrDict
[
0
][
attr
])
FieldType
=
ogr
.
OFTInteger
if
DTypeStr
.
startswith
(
'int'
)
else
ogr
.
OFTReal
if
DTypeStr
.
startswith
(
'float'
)
else
\
ogr
.
OFTString
if
DTypeStr
.
startswith
(
'str'
)
else
ogr
.
OFTDateTime
if
DTypeStr
.
startswith
(
'date'
)
else
None
assert
len
(
attr
)
<=
10
,
"ogr does not support fieldnames longer than 10 digits. '%s' is too long"
%
attr
DTypeStr
=
get_dtypeStr
(
attrDict
[
0
][
attr
])
FieldType
=
\
ogr
.
OFTInteger
if
DTypeStr
.
startswith
(
'int'
)
else
\
ogr
.
OFTReal
if
DTypeStr
.
startswith
(
'float'
)
else
\
ogr
.
OFTString
if
DTypeStr
.
startswith
(
'str'
)
else
\
ogr
.
OFTDateTime
if
DTypeStr
.
startswith
(
'date'
)
else
None
FieldDefn
=
ogr
.
FieldDefn
(
attr
,
FieldType
)
if
DTypeStr
.
startswith
(
'float'
):
FieldDefn
.
SetPrecision
(
6
)
...
...
@@ -140,39 +158,41 @@ def write_shp(path_out, shapely_geom, prj=None,attrDict=None):
for
i
in
range
(
len
(
shapely_geom
)):
# Create a new feature (attribute and geometry)
feat
=
ogr
.
Feature
(
layer
.
GetLayerDefn
())
feat
.
SetGeometry
(
ogr
.
CreateGeometryFromWkb
(
shapely_geom
[
i
].
wkb
))
# Make a geometry, from Shapely object
feat
.
SetGeometry
(
ogr
.
CreateGeometryFromWkb
(
shapely_geom
[
i
].
wkb
))
# Make a geometry, from Shapely object
list_attr2set
=
attrDict
[
0
].
keys
()
if
isinstance
(
attrDict
[
0
],
dict
)
else
[]
list_attr2set
=
attrDict
[
0
].
keys
()
if
isinstance
(
attrDict
[
0
],
dict
)
else
[]
for
attr
in
list_attr2set
:
val
=
attrDict
[
i
][
attr
]
DTypeStr
=
get_dtypeStr
(
val
)
val
=
attrDict
[
i
][
attr
]
DTypeStr
=
get_dtypeStr
(
val
)
val
=
int
(
val
)
if
DTypeStr
.
startswith
(
'int'
)
else
float
(
val
)
if
DTypeStr
.
startswith
(
'float'
)
else
\
str
(
val
)
if
DTypeStr
.
startswith
(
'str'
)
else
val
str
(
val
)
if
DTypeStr
.
startswith
(
'str'
)
else
val
feat
.
SetField
(
attr
,
val
)
layer
.
CreateFeature
(
feat
)
feat
.
Destroy
()
# Save