Commit 3fdbebcd authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'master' into enhancement/rasterization

# Conflicts:
#	arosics/Tie_Point_Grid.py
parents 47f79c9d 4094e1ac
Pipeline #36343 passed with stage
in 4 minutes and 25 seconds
......@@ -3,8 +3,11 @@
branch = False
concurrency = multiprocessing
parallel = True
omit = */site-packages/*,*/tests/*,*/.eggs/*
[report]
show_missing = True
# Regexes for lines to exclude from consideration
exclude_lines =
# Have to re-enable the standard pragma
......
......@@ -20,16 +20,21 @@ test_arosics:
- pip install scipy # TODO remove as soon as CI runner is rebuilt
# run tests
- make nosetests
- make pytest
# create the docs
- make docs
artifacts:
expose_as: 'Test and coverage report'
paths:
- htmlcov/
- report.html
- docs/_build/html/
- nosetests.html
- nosetests.xml
reports:
cobertura: coverage.xml
junit: report.xml
expire_in: 30 days
when: always
......@@ -89,7 +94,7 @@ pages: # this job must be called 'pages' to advise GitLab to upload content to
- mkdir -p public/doc
- mkdir -p public/images/
- mkdir -p public/coverage
- mkdir -p public/nosetests_reports
- mkdir -p public/test_reports
# Copy over the docs
- cp -r docs/_build/html/* public/doc/
......@@ -98,14 +103,14 @@ pages: # this job must be called 'pages' to advise GitLab to upload content to
# Copy over the coverage reports
- cp -r htmlcov/* public/coverage/
# Copy over the nosetests reports
- cp nosetests.* public/nosetests_reports/
# Copy over the test reports
- cp report.html public/test_reports/
# Check if everything is working great
- ls -al public
- ls -al public/doc
- ls -al public/coverage
- ls -al public/nosetests_reports
- ls -al public/test_reports
artifacts:
paths:
- public
......
......@@ -2,10 +2,26 @@
History
=======
1.7.2 (coming soon)
-------------------
1.7.4 (2021-12-15)
------------------
* Migrated test calls from nosetests to pytest and implemented new test report (!24).
* Removed folium 0.12.1 exclusion due to fix on PyPI and conda-forge.
* Fixed dead link.
1.7.3 (2021-12-02)
------------------
* Avoid folium 0.12.1 as requirement due to https://github.com/python-visualization/folium/issues/1513.
1.7.2 (2021-11-09)
------------------
* Listed dill in package requirements.
* Resolved inconsistency in documenting boolean default values.
* Improved error message when trying to compute statistics and all tie points are flagged as false-positives.
1.7.1 (2021-10-13)
......
.PHONY: clean clean-test clean-pyc clean-build docs help nosetests
.PHONY: clean clean-test clean-pyc clean-build docs help pytest
.DEFAULT_GOAL := help
define BROWSER_PYSCRIPT
import os, webbrowser, sys
......@@ -49,8 +49,11 @@ clean-test: ## remove test and coverage artifacts
rm -f .coverage
rm -fr .coverage.*
rm -fr htmlcov/
rm -fr nosetests.html
rm -fr nosetests.xml
rm -fr report.html
rm -fr report.xml
rm -fr coverage.xml
rm -fr .pytest_cache
lint: ## check style with flake8
flake8 --max-line-length=120 . tests > ./tests/linting/flake8.log || \
......@@ -77,12 +80,22 @@ coverage: ## check code coverage quickly with the default Python
coverage html
#$(BROWSER) htmlcov/index.html
nosetests: clean-test ## Runs nosetests with coverage, xUnit and nose-html-output
pytest: clean-test ## Runs pytest with coverage and creates coverage and test report
## - puts the coverage results in the folder 'htmlcov'
## - generates 'nosetests.html' (--with-html)
## - generates 'nosetests.xml' (--with-xunit) which is currently not visualizable by GitLab
nosetests -vv --with-coverage --cover-package=arosics --cover-erase --cover-html --cover-html-dir=htmlcov \
--with-html --with-xunit --rednose --force-color
## - generates cobertura 'coverage.xml' (needed to show coverage in GitLab MR changes)
## - generates 'report.html' based on pytest-reporter-html1
## - generates JUnit 'report.xml' to show the test report as a new tab in a GitLab MR
## NOTE: additional options pytest and coverage (plugin pytest-cov) are defined in .pytest.ini and .coveragerc
pytest tests \
--verbosity=3 \
--color=yes \
--tb=short \
--cov=arosics \
--cov-report html:htmlcov \
--cov-report term-missing \
--cov-report xml:coverage.xml \
--template=html1/index.html --report=report.html \
--junitxml report.xml
docs: ## generate Sphinx HTML documentation, including API docs
rm -f docs/arosics.rst
......
......@@ -41,7 +41,7 @@ Status
.. image:: https://zenodo.org/badge/253474603.svg
:target: https://zenodo.org/badge/latestdoi/253474603
See also the latest coverage_ report and the nosetests_ HTML report.
See also the latest coverage_ report and the pytest_ HTML report.
Feature overview
----------------
......@@ -100,7 +100,7 @@ Credits
-------
AROSICS was developed by Daniel Scheffler (German Research Centre of Geosciences) within the context of the
`GeoMultiSens <http://www.geomultisens.de/>`__ project funded by the German Federal Ministry of Education and Research
`GeoMultiSens <http://www.geomultisens.gfz-potsdam.de/>`__ project funded by the German Federal Ministry of Education and Research
(project grant code: 01 IS 14 010 A-C).
This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
......@@ -110,6 +110,6 @@ documentation have been provided by NASA (Landsat-8) and ESA (Sentinel-2).
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
.. _coverage: https://danschef.git-pages.gfz-potsdam.de/arosics/coverage/
.. _nosetests: https://danschef.git-pages.gfz-potsdam.de/arosics/nosetests_reports/nosetests.html
.. _pytest: https://danschef.git-pages.gfz-potsdam.de/arosics/test_reports/report.html
.. _conda: https://conda.io/docs/
......@@ -276,10 +276,10 @@ class COREG(object):
:param calc_corners:
calculate true positions of the dataset corners in order to get a useful matching window position within
the actual image overlap (default: 1; deactivated if '-cor0' and '-cor1' are given
the actual image overlap (default: True; deactivated if '-cor0' and '-cor1' are given
:param binary_ws:
use binary X/Y dimensions for the matching window (default: 1)
use binary X/Y dimensions for the matching window (default: True)
:param mask_baddata_ref:
path to a 2D boolean mask file (or an instance of GeoArray) for the reference image where all bad data
......@@ -297,7 +297,7 @@ class COREG(object):
number of CPUs to use during pixel grid equalization (default: None, which means 'all CPUs available')
:param force_quadratic_win:
force a quadratic matching window (default: 1)
force a quadratic matching window (default: True)
:param progress:
show progress bars (default: True)
......
......@@ -233,7 +233,7 @@ class COREG_LOCAL(object):
use binary X/Y dimensions for the matching window (default: True)
:param force_quadratic_win:
force a quadratic matching window (default: 1)
force a quadratic matching window (default: True)
:param mask_baddata_ref:
path to a 2D boolean mask file (or an instance of BadDataMask) for the reference image where all bad data
......
......@@ -404,6 +404,8 @@ class Tie_Point_Grid(object):
# COREG and is not raised
results = results.get()
break
pool.close() # needed to make coverage work in multiprocessing
pool.join()
else:
# declare global variables needed for self._get_spatial_shifts()
......@@ -468,6 +470,9 @@ class Tie_Point_Grid(object):
tbl = self.CoRegPoints_table
tbl = tbl if include_outliers else tbl[tbl['OUTLIER'] == 0].copy() if 'OUTLIER' in tbl.columns else tbl
if not include_outliers and tbl.empty:
raise RuntimeError('Cannot compute the RMSE because all tie points are flagged as false-positives.')
shifts = np.array(tbl['ABS_SHIFT'])
shifts_sq = [i * i for i in shifts if i != self.outFillVal]
......@@ -488,6 +493,9 @@ class Tie_Point_Grid(object):
tbl = self.CoRegPoints_table
tbl = tbl if include_outliers else tbl[tbl['OUTLIER'] == 0].copy()
if not include_outliers and tbl.empty:
raise RuntimeError('Cannot compute the overall SSIM because all tie points are flagged as false-positives.')
ssim_col = np.array(tbl['SSIM_AFTER' if after_correction else 'SSIM_BEFORE'])
ssim_col = [i * i for i in ssim_col if i != self.outFillVal]
......@@ -578,6 +586,10 @@ class Tie_Point_Grid(object):
tbl = tbl if include_outliers else tbl[tbl['OUTLIER'] == 0].copy() if 'OUTLIER' in tbl.columns else tbl
tbl = tbl.copy().replace(self.outFillVal, np.nan)
if not include_outliers and tbl.empty:
raise RuntimeError('Cannot compute overall statistics '
'because all tie points are flagged as false-positives.')
def RMSE(shifts):
shifts_sq = shifts ** 2
return np.sqrt(sum(shifts_sq) / len(shifts_sq))
......@@ -1321,6 +1333,8 @@ class Tie_Point_Grid_Interpolator(object):
#
# with multiprocessing.Pool() as pool:
# data_full = pool.map(self._Kriging_mp, args_kwargs_dicts)
# pool.close() # needed to make coverage work in multiprocessing
# pool.join()
return data_full
......
......@@ -24,5 +24,5 @@
# limitations under the License.
__version__ = '1.7.1'
__versionalias__ = '2021-10-13_01'
__version__ = '1.7.4'
__versionalias__ = '2021-12-15_01'
......@@ -68,7 +68,7 @@ req_setup = [
req_intplot = ['holoviews', 'ipython']
req_test = ['coverage', 'nose', 'nose2', 'nose-htmloutput', 'rednose', 'urlchecker'] + req_intplot
req_test = ['pytest', 'pytest-cov', 'pytest-reporter-hmtl1', 'urlchecker'] + req_intplot
req_doc = ['sphinx-argparse', 'sphinx_rtd_theme', 'sphinx-autodoc-typehints']
......
......@@ -32,15 +32,13 @@ dependencies:
- scipy
# doc requirements
- coverage
- flake8
- nose
- nose2
- nose-htmloutput
- pycodestyle
- pydocstyle
- pylint
- rednose
- pytest
- pytest-cov
- pytest-reporter-html1
- sphinx-argparse
- sphinx-autodoc-typehints
- sphinx_rtd_theme
......
......@@ -359,5 +359,5 @@ class CompleteWorkflow_INTER1_S2A_S2A(unittest.TestCase):
if __name__ == '__main__':
import nose2
nose2.main()
import pytest
pytest.main()
......@@ -201,5 +201,5 @@ class CompleteWorkflow_INTER1_S2A_S2A(unittest.TestCase):
if __name__ == '__main__':
import nose2
nose2.main()
import pytest
pytest.main()
......@@ -141,5 +141,5 @@ class Test_Tie_Point_Grid(unittest.TestCase):
if __name__ == '__main__':
import nose2
nose2.main()
import pytest
pytest.main()
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment