before_script: - git lfs pull # Advise GitLab that these environment vars should be loaded from the Variables config. variables: ANACONDA_USER: SECURE ANACONDA_PASSWORD: SECURE ANACONDA_TOKEN: SECURE stages: - test - deploy_pages - deploy_pypi - deploy_conda - cleanup test_py_tools_ds: stage: test script: - source /root/miniconda3/bin/activate ci_env - make nosetests - make docs artifacts: paths: - htmlcov/ - docs/_build/html/ - nosetests.html - nosetests.xml when: always test_styles: stage: test script: - source /root/miniconda3/bin/activate ci_env - make lint artifacts: paths: - tests/linting/flake8.log - tests/linting/pycodestyle.log - tests/linting/pydocstyle.log when: always test_py_tools_ds_install: stage: test script: - source /root/miniconda3/bin/activate - conda create -y -q -c conda-forge --name py_tools_ds_testinstall python=3 - source activate py_tools_ds_testinstall # update conda and python - conda update -n base -c conda-forge conda # - conda update -c conda-forge python # avoid package incompatibilities due to usage of wrong channels # - conda config --set channel_priority strict # otherwise gdal or libgdal may be installed from defaults channel # resolve some requirements with conda - conda install --yes -q -c conda-forge numpy gdal 'pyproj>=2.1.0' shapely geopandas # run installer - python setup.py install # test if its importable - cd .. - pwd - ls - python -c "import py_tools_ds; print(py_tools_ds)" - python -c "from py_tools_ds.geo.vector.geometry import boxObj" only: - master pages: # this job must be called 'pages' to advise GitLab to upload content to GitLab Pages stage: deploy_pages dependencies: - test_py_tools_ds script: # Create the public directory - rm -rf public - mkdir public - mkdir -p public/doc - mkdir -p public/coverage - mkdir -p public/nosetests_reports # Copy over the docs - cp -r docs/_build/html/* public/doc/ # Copy over the coverage reports - cp -r htmlcov/* public/coverage/ # Copy over the nosetests reports - cp nosetests.* public/nosetests_reports/ # Check if everything is working great - ls -al public - ls -al public/doc - ls -al public/coverage - ls -al public/nosetests_reports artifacts: paths: - public expire_in: 30 days only: - master deploy_pypi: stage: deploy_pypi dependencies: - test_py_tools_ds script: - source /root/miniconda3/bin/activate ci_env - pip install -U twine - python setup.py sdist - twine upload dist/* # requires creds as environment variables only: - /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags) except: - dev deploy_anaconda: stage: deploy_conda dependencies: - test_py_tools_ds script: - source /root/miniconda3/bin/activate ci_env - conda install -y -q conda-build conda-build-all anaconda-client - yum install -y patch # https://stackoverflow.com/questions/40392161/building-conda-skeleton-from-pypi-package-throws-error # - conda skeleton pypi py_tools_ds --output-dir public/conda.recipe/py_tools_ds # create recipe - conda config --set anaconda_upload yes # enable auto-upload to anaconda # - anaconda login --username ${ANACONDA_USER} --password ${ANACONDA_PASSWORD} - conda build -c conda-forge -c defaults --token ${ANACONDA_TOKEN} conda_recipe/meta.yaml # --output returns package name # - conda convert --platform all /home/jsmith/miniconda/conda-bld/linux-64/pyinstrument-0.13.1-py27_0.tar.bz2 # -o outputdir/ - anaconda logout only: # - feature/conda_deployment - /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags) except: - dev