diff --git a/.coveragerc b/.coveragerc index 8b4f3f55373bd54131b9bf1d7df73f55d974d522..40105a615c7b3c030f6db777603836d46bcf1c35 100644 --- a/.coveragerc +++ b/.coveragerc @@ -3,8 +3,11 @@ branch = False concurrency = multiprocessing parallel = True +omit = */site-packages/*,*/tests/*,*/.eggs/* [report] +show_missing = True + # Regexes for lines to exclude from consideration exclude_lines = # Have to re-enable the standard pragma diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a61f1c03095c3987bf60988647c5f43f4883b7c8..466dfad26224a5484762b729183666a7041e6f8c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -36,19 +36,25 @@ test_gms_preprocessing: - pip check - # run nosetests - - make nosetests + # run tests + - make pytest # create the docs - - pip install -U sphinx_rtd_theme # Read-the-docs theme for SPHINX documentation - - pip install -U sphinx-autodoc-typehints - make docs + artifacts: + expose_as: 'Test and coverage report' paths: - htmlcov/ + - report.html - docs/_build/html/ - - nosetests.html - - nosetests.xml + reports: + coverage_report: + coverage_format: cobertura + path: coverage.xml + junit: report.xml + + expire_in: 30 days when: always @@ -115,7 +121,7 @@ pages: # this job must be called 'pages' to advise GitLab to upload content to - mkdir public - mkdir -p public/doc - mkdir -p public/coverage - - mkdir -p public/nosetests_reports + - mkdir -p public/test_reports # Copy over the docs - cp -r docs/_build/html/* public/doc/ @@ -123,14 +129,14 @@ pages: # this job must be called 'pages' to advise GitLab to upload content to # Copy over the coverage reports - cp -r htmlcov/* public/coverage/ - # Copy over the nosetests reports - - cp nosetests.* public/nosetests_reports/ + # Copy over the test reports + - cp report.html public/test_reports/ # Check if everything is working great - ls -al public - ls -al public/doc - ls -al public/coverage - - ls -al public/nosetests_reports + - ls -al public/test_reports artifacts: paths: - public diff --git a/HISTORY.rst b/HISTORY.rst index 70dddc40b6b605a847087fa9ac962019a6af3fc8..491d306f48b57e9178c26c8a9b9f27cff46fecf5 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,12 @@ History ======= +0.19.6 (coming soon) +-------------------- + +* Migrated test calls from nosetests to pytest and implemented new test report (!13). + + 0.19.5 (2021-11-19) ------------------- diff --git a/Makefile b/Makefile index 0e799ec5ac2a2ed4d2f6b044c03020f58736717b..f03c3ed84a4ef8e8dec42ae3d1a6abd50b17787a 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: clean clean-test clean-pyc clean-build docs help nosetests +.PHONY: clean clean-test clean-pyc clean-build docs help pytest .DEFAULT_GOAL := help define BROWSER_PYSCRIPT import os, webbrowser, sys @@ -48,8 +48,10 @@ clean-test: ## remove test and coverage artifacts rm -f .coverage rm -fr .coverage.* rm -fr htmlcov/ - rm -fr nosetests.html - rm -fr nosetests.xml + rm -fr report.html + rm -fr report.xml + rm -fr coverage.xml + rm -fr .pytest_cache lint: ## check style with flake8 flake8 --max-line-length=120 gms_preprocessing tests > ./tests/linting/flake8.log || \ @@ -78,12 +80,22 @@ coverage: clean-test ## check code coverage quickly with the default Python coverage html # $(BROWSER) htmlcov/index.html -nosetests: clean-test ## Runs nosetests with coverage, xUnit and nose-html-output +pytest: clean-test ## Runs pytest with coverage and creates coverage and test report ## - puts the coverage results in the folder 'htmlcov' - ## - generates 'nosetests.html' (--with-html) - ## - generates 'nosetests.xml' (--with-xunit) which is currently not visualizable by GitLab - nosetests -vv --with-coverage --cover-package=gms_preprocessing --cover-package=bin --cover-erase --cover-html \ - --cover-html-dir=htmlcov --with-html --with-xunit --rednose --force-color + ## - generates cobertura 'coverage.xml' (needed to show coverage in GitLab MR changes) + ## - generates 'report.html' based on pytest-reporter-html1 + ## - generates JUnit 'report.xml' to show the test report as a new tab in a GitLab MR + ## NOTE: additional options pytest and coverage (plugin pytest-cov) are defined in .pytest.ini and .coveragerc + pytest tests \ + --verbosity=3 \ + --color=yes \ + --tb=short \ + --cov=gms_preprocessing \ + --cov-report html:htmlcov \ + --cov-report term-missing \ + --cov-report xml:coverage.xml \ + --template=html1/index.html --report=report.html \ + --junitxml report.xml docs: ## generate Sphinx HTML documentation, including API docs rm -f docs/gms_preprocessing.rst diff --git a/gms_preprocessing/algorithms/L1C_P.py b/gms_preprocessing/algorithms/L1C_P.py index b6ce1a897608d432805cdeade730c64bf41bf67a..2e192b3fd5013631988bcd3813aa1507a0a47f1b 100644 --- a/gms_preprocessing/algorithms/L1C_P.py +++ b/gms_preprocessing/algorithms/L1C_P.py @@ -819,7 +819,7 @@ class AtmCorr(object): max_step=120, # default ecmwf_variables=default_products, processes=0, # singleprocessing - force=False) # dont force download if files already exist + force=False) # don't force download if files already exist t1 = time() self.logger.info("Runtime: %.2f" % (t1 - t0)) for result in results: diff --git a/gms_preprocessing/algorithms/geoprocessing.py b/gms_preprocessing/algorithms/geoprocessing.py index 8d2b33ae824a6c82ef314ab5a490415662af67cd..905213a0129d09d92991e651885eb8b8c5d16f5a 100644 --- a/gms_preprocessing/algorithms/geoprocessing.py +++ b/gms_preprocessing/algorithms/geoprocessing.py @@ -368,6 +368,8 @@ class GEOPROCESSING(object): # import multiprocessing # with multiprocessing.Pool() as pool: # results = pool.map(warp_mp,args) + # pool.close() + # pool.join() # print('warping time', time.time() - t0) # from spectral.io import envi diff --git a/gms_preprocessing/misc/helper_functions.py b/gms_preprocessing/misc/helper_functions.py index 65377c82ec5ae052509f79146db395ff7d545d7f..29fb0051d71efec5ceca88a7646a68a94d74fa6b 100644 --- a/gms_preprocessing/misc/helper_functions.py +++ b/gms_preprocessing/misc/helper_functions.py @@ -214,7 +214,7 @@ class mp_SharedNdarray(object): def mp_initializer(globals, globs): """ globs shall be dict with name:value pairs, when executed value will be added to - globals under the name name, if value provides a _init attribute this one is + globals under the same name, if value provides a _init attribute this one is called instead. This makes most sense when called as initializer in a multiprocessing pool, e.g.: diff --git a/gms_preprocessing/processing/multiproc.py b/gms_preprocessing/processing/multiproc.py index 6b5490b6001d89fd00e6f8e96ce3e6f4bae6ed12..175b8f07a8e0537dc31a7bfa31771af9685618e3 100644 --- a/gms_preprocessing/processing/multiproc.py +++ b/gms_preprocessing/processing/multiproc.py @@ -52,6 +52,8 @@ def MAP(func, args, CPUs=None, flatten_output=False): if CPUs and CPUs > 1 and len(args) > 1: with Pool(CPUs) as pool: results = pool.map(func, args) # always returns a list + pool.close() + pool.join() else: results = [func(argset) for argset in args] # generator does not always work properly here @@ -85,6 +87,8 @@ def imap_unordered(func, args, CPUs=None, flatten_output=False): if CPUs and CPUs > 1 and len(args) > 1: with Pool(CPUs) as pool: results = list(pool.imap_unordered(func, args)) # returns an iterator + pool.close() + pool.join() else: results = [func(argset) for argset in args] # generator does not always work properly here diff --git a/setup.py b/setup.py index 338022f14870e53ffdae7bc1265d0753918989c5..d5cecce30b3abd435ab79c62f641278e6b09c10e 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ req = [ req_setup = ['setuptools-git'] # needed for package_data version controlled by GIT -req_test = ['coverage', 'nose', 'nose2', 'nose-htmloutput', 'rednose', 'urlchecker'] +req_test = ['pytest', 'pytest-cov', 'pytest-reporter-html1', 'urlchecker'] req_doc = ['sphinx-autodoc-typehint', 'sphinx-argparse', 'sphinx_rtd_theme'] diff --git a/tests/CI_docker/context/environment_gms_preprocessing.yml b/tests/CI_docker/context/environment_gms_preprocessing.yml index 06e23712e7b57a4a30394f9a93d99fe27e465559..87b4b160312bfe11d3d5daa508536001d4023ed9 100644 --- a/tests/CI_docker/context/environment_gms_preprocessing.yml +++ b/tests/CI_docker/context/environment_gms_preprocessing.yml @@ -51,15 +51,13 @@ dependencies: - tqdm # test and docs requirements - - coverage - flake8 - pydocstyle - pylint - - nose - - nose2 - - nose-htmloutput - - rednose + - pytest + - pytest-cov + - pytest-reporter-html1 - sphinx-autodoc-typehints - sphinx-argparse - sphinx_rtd_theme - - urlchecker + - urlchecker!=0.0.33 # https://github.com/urlstechie/urlchecker-python/issues/84 diff --git a/tests/test_cli.py b/tests/test_cli.py index 30de90ef3072e739e35296e4ed2607c711c2bb64..bd56f2dffd1284a5842005f0de1a3b66bbaa5728 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -129,3 +129,8 @@ class Test_run_filenames(Base_CLITester.Base_CLITestCase): def setUp(self): super().setUp() self.baseargs = ['filenames', 'LC08_L1TP_193029_20170821_20170911_01_T1.tar.gz'] + + +if __name__ == '__main__': + import pytest + pytest.main() diff --git a/tests/test_config.py b/tests/test_config.py index 0d38bad8044fe2e542f089608895bba3c93513fa..6bc0a95b2c8e853f7a869f4cfc247a7d6bdca6fc 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -129,3 +129,8 @@ class Test_JobConfig(TestCase): # check validity GMSValidator(allow_unknown=True, schema=gms_schema_config_output).validate(params) + + +if __name__ == '__main__': + import pytest + pytest.main() diff --git a/tests/test_exception_handler.py b/tests/test_exception_handler.py index 196cd4761058f64bcc568ebb35b2dd4fdcfc6f55..c3e1488c7f594407f3f3cc80079c4704d69802ab 100644 --- a/tests/test_exception_handler.py +++ b/tests/test_exception_handler.py @@ -213,3 +213,8 @@ class Test_ExceptionHandler_Subsystems(BaseTest_ExceptionHandler.Test_ExceptionH # check progress stats: must remain unchanged prog_stats_after = self.get_current_progress_stats() self.assertEqual(prog_stats_before, prog_stats_after) + + +if __name__ == '__main__': + import pytest + pytest.main() diff --git a/tests/test_fmask_runner.py b/tests/test_fmask_runner.py index 06f2cdde3639de0d822cc4424fd9d6e3b7ac657e..0f62112f9595b606062376203c442aa5d6bb074e 100644 --- a/tests/test_fmask_runner.py +++ b/tests/test_fmask_runner.py @@ -122,3 +122,8 @@ class Test_FMASK_Runner_Sentinel2(unittest.TestCase): # os.environ['RIOS_DFLT_DRIVER'] = 'VRT' FMR = FMASK_Runner_Sentinel2(testdata['Sentinel2A_new_style_data'], 'Sentinel-2A') self.assertIsInstance(FMR.calc_cloudMask(), GeoArray) + + +if __name__ == '__main__': + import pytest + pytest.main() diff --git a/tests/test_gms_preprocessing.py b/tests/test_gms_preprocessing.py index 34784e1e680eed3173f1f48543a2c4f059395c93..7c7d55281dfe33920df2d0b8fb0f544a102bd9cb 100644 --- a/tests/test_gms_preprocessing.py +++ b/tests/test_gms_preprocessing.py @@ -736,3 +736,8 @@ if __name__ == '__main__': # Delete the handlers added to the "log_Test"-logger to ensure that no message is output twice in a row, when # the logger is used again. logger.handlers = [] + + +if __name__ == '__main__': + import pytest + pytest.main() diff --git a/tests/test_input_reader.py b/tests/test_input_reader.py index 577472c8fb9572cb71c32711a2eb5355b43fa71b..c0856b6d33184a3cf09b89da1ae23cb2fa3af1e0 100644 --- a/tests/test_input_reader.py +++ b/tests/test_input_reader.py @@ -93,3 +93,8 @@ class Test_DEM_Creator(unittest.TestCase): except ConnectionRefusedError: warnings.warn("test_index_mediator_query_equals_pgSQL_query() could not been run because " "SpatialIndexMediator refused the connection.") + + +if __name__ == '__main__': + import pytest + pytest.main() diff --git a/tests/test_locks.py b/tests/test_locks.py index 12cb2430273218b5dc6fe84088d72b43ec07e902..5525a83a0e08ef7c54cf92598e890a37d1e93e7f 100644 --- a/tests/test_locks.py +++ b/tests/test_locks.py @@ -101,3 +101,8 @@ class Test_MemoryReserver(unittest.TestCase): def test_with_statement(self): with MemoryReserver(mem2lock_gb=20) as lock: self.assertNotEqual(lock, None) + + +if __name__ == '__main__': + import pytest + pytest.main()