Commit d2a87c9a authored by Romulo Pereira Goncalves's avatar Romulo Pereira Goncalves
Browse files

Merge branch 'issue#3' into 'master'

Issue#3

See merge request !14
parents fa1238c4 f20db00b
Pipeline #24231 passed with stages
in 1 minute and 59 seconds
......@@ -11,9 +11,9 @@ Description: Calculates samples and related classifiers for mapping gradual prob
License: GPL-3
Imports:
BH (<= 1.69.0-1),
sf (<= 0.9-0),
sp (<= 1.4-4),
rgdal (<= 1.5-12),
sf,
sp,
rgdal,
raster,
geojsonio,
maptools,
......
......@@ -20,7 +20,7 @@
"source": [
"### **0.0** - Pin the version of certain dependencies\n",
"\n",
"The `velox` library is not anymore developed. Its latest version does not compile with the latest version of `Boost`. To avoid warnings and issues with the migration of `rgdal` to `gdal 3 and proj 6` we decided to pin the versions of `sf`, `sp` and `rgdal`."
"The `velox` library is not anymore developed. Its latest version does not compile with the latest version of `Boost`."
]
},
{
......@@ -40,10 +40,7 @@
"source": [
"if (install_dependencies == TRUE) {\n",
" install.packages(\"remotes\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz\", repos=NULL, type=\"source\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz\", repos=NULL, type=\"source\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz\", repos=NULL, type=\"source\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz\", repos=NULL, type=\"source\") \n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz\", repos=NULL, type=\"source\") \n",
"}"
]
},
......
......@@ -28,9 +28,6 @@
##0.0##
install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz", repos=NULL, type="source")
##0.1##
library(remotes)
......
......@@ -10,10 +10,10 @@ container_name="hasa"
input_data_folder="/tmp/hasa"
#Check if image exists
if [ $(sudo docker images | grep ${runner_iname} | wc -l) == 0 ]
if [ $(docker images | grep ${runner_iname} | wc -l) == 0 ]
then
# build docker image
sudo docker build --network=host -f ${context_dir}/${dockerfile} -m 20G -t ${runner_tag} ${context_dir}
docker build --network=host -f ${context_dir}/${dockerfile} -m 20G -t ${runner_tag} ${context_dir}
else
echo "It already exists a Docker image with the name ${runner_tag}!!!"
fi
......@@ -9,9 +9,6 @@ install.packages("rmarkdown")
install.packages("knitr")
install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz", repos=NULL, type="source", dependencies=TRUE)
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz", repos=NULL, type="source")
# Install HaSa
library(remotes)
......
......@@ -3,9 +3,6 @@ install.packages("rmarkdown")
install.packages("knitr")
install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz", repos=NULL, type="source", dependencies=TRUE)
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz", repos=NULL, type="source")
# Install HaSa
library(remotes)
......
......@@ -50,12 +50,12 @@ demo_data_foler="$(realpath ../demo/)"
jupyter_lab_port=8888
docker_jupyterlab_port=8888
sudo docker rm -f ${container_name}
docker rm -f ${container_name}
echo "Starting HaSa container, please add the input files to ${input_dir_} and read the results from the ${output_dir_}"
if [ "${jupyter_notebook,,}" = "true" ]
then
sudo docker run -it --name ${container_name} --privileged \
docker run -it --name ${container_name} --privileged \
--oom-kill-disable --memory=12g --memory-swap=14g --net=host \
-p $jupyter_lab_port:$docker_jupyterlab_port \
-v ${input_dir_}:${in_data_folder} \
......@@ -64,7 +64,7 @@ then
-v ${demo_dir}:${demo_data_folder} ${runner_tag} \
bash -i -c "cd /home/hasa; jupyter-lab --ip 0.0.0.0 --no-browser --allow-root demo/"
else
sudo docker run -it --name ${container_name} -u 1000:1000 --privileged \
docker run -it --name ${container_name} -u 1000:1000 --privileged \
--oom-kill-disable --memory=12g --memory-swap=14g --net=host \
-v ${input_dir_}:${in_data_folder} \
-v ${tmp_dir_}:${tmp_data_folder} \
......
......@@ -3,14 +3,6 @@ title: "An Introduction to Habitat Sampler"
author: "Carsten Neumann, Alison Beamish, Romulo Goncalves"
date: "01/06/2021"
output:
md_document:
pandoc_args: ["--output", "README.md"]
toc: true
toc_depth: 2
variant: gfm
pdf_document:
toc: true
toc_depth: 2
html_document:
theme: united
highlight: tango
......@@ -20,7 +12,14 @@ output:
collapsed: false
smooth_scroll: false
df_print: paged
always_allow_html: yes
md_document:
pandoc_args: ["--output", "README.md"]
toc: true
toc_depth: 2
variant: gfm
pdf_document:
toc: true
toc_depth: 2
header-includes:
- \usepackage{caption}
- \captionsetup[figure]{labelformat=empty}
......@@ -36,7 +35,7 @@ knitr::opts_chunk$set(tidy.opts = list(width.cutoff = 75), tidy = TRUE, fig.pos
# 1 Introduction
This manual introduces the Habitat Sampler (HaSa), an innovative tool that autonomously generates representative reference samples for predictive modelling of surface class probabilities. The tool can be applied to any image data that displays surface structures and dynamics of any kind at multiple spatial and temporal scales. HaSa was initially developed to classify habitat dynamics in semi-natural ecosystems but the procedure can theoretically be applied to any surface. The main innovation of the tool is that it reduces reliance on comprehensive in situ ground truth data or comprehensive training datasets which constrain accurate image classification particularly in complex scenes.
Though development of HaSa has prioritized ease of use, this documentation assume a familiarity with the R software. The document is built successively and is intended to lead you step-by-step through the HaSa procedure of generating probability and classification maps. HaSa is still in development and any suggestions or improvements are welcomed and encouraged in our [GitLab Community Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git). If questions remain please don't hesitate to contact the authors of the package. For a detailed description of the Habitat Sampler and its applications, see [Neumann et al., (2020)](https://doi.org/10.1111/ddi.13165).
Though development of HaSa has prioritized ease of use, this documentation assume a familiarity with the R software. The document is built successively and is intended to lead you step-by-step through the HaSa procedure of generating probability and classification maps. HaSa is still in development and any suggestions or improvements are welcomed and encouraged in our [GitHub Community Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git). If questions remain please don't hesitate to contact the authors of the package. For a detailed description of the Habitat Sampler and its applications, see [Neumann et al., (2020)](https://doi.org/10.1111/ddi.13165).
## 1.1 Usage
The tool is implemented in R and uses Leaflet [(Cheng et al., 2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps in a web browser. There are no assumptions about the input image data and there are no constraints for the spectral-temporal-spatial domain in which the image is sampled. The tool requires the input of a priori expert user knowledge to generate reference data about expected surface classes which are delineated in the imagery or extracted from an external spectral library. The user has the choice between image classifiers [random forest](https://doi.org/10.1023/A:1010933404324) (RF) and [support vector](https://doi.org/10.1145/130385.130401) (SV).
......@@ -66,19 +65,13 @@ The point shapefile contains a point location per class and is used to extract t
The following procedure will lead you through the preliminary steps required to setup the HaSa tool.
## 2.1 HaSa dependencies
HaSa uses the latest version of the `velox` library (`v0.2.0`) which does not compile with the latest version the interface to the C++ Boost library `BH`. Hence, it is necessary to pin the `BH` version. HaSa does not yet support the latest developments in `rgdal` and `sp` related with projections. It is also necessary to pin the versions for `sf`, `sp` and `rgdal`.
HaSa uses the latest version of the `velox` library (`v0.2.0`) which does not compile with the latest version the interface to the C++ Boost library `BH`. Hence, it is necessary to pin the `BH` version.
The installation of `BH`, `sf`, `sp` and `rgdal` is possible with the following commands:
The installation of `BH` is possible with the following commands:
```{r install dependencies, eval = FALSE}
install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.
gz", repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.
gz", repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.
gz", repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.
tar.gz", repos = NULL, type = "source")
```
## 2.2 Install HaSa
......
This diff is collapsed.
No preview for this file type
......@@ -2,18 +2,18 @@
- [1 Introduction](#introduction)
- [1 Introduction](#introduction)
- [1.1 Usage](#usage)
- [1.2 Sample datasets](#sample-datasets)
- [2 HaSa installation](#hasa-installation)
- [2 HaSa installation](#hasa-installation)
- [2.1 HaSa dependencies](#hasa-dependencies)
- [2.2 Install HaSa](#install-hasa)
- [2.3 Load HaSa](#load-hasa)
- [3 Load demo data](#load-demo-data)
- [3 Load demo data](#load-demo-data)
- [3.1 Data directories](#data-directories)
- [3.2 Satellite timeseries stack](#satellite-timeseries-stack)
- [3.3 Selecting reference samples](#selecting-reference-samples)
- [4 Generating outputs](#generating-outputs)
- [4 Generating outputs](#generating-outputs)
- [4.1 Calculating class
probability](#calculating-class-probability)
- [4.2 Generating classification map and summary
......@@ -37,7 +37,7 @@ documentation assume a familiarity with the R software. The document is
built successively and is intended to lead you step-by-step through the
HaSa procedure of generating probability and classification maps. HaSa
is still in development and any suggestions or improvements are welcomed
and encouraged in our [GitLab Community
and encouraged in our [GitHub Community
Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git).
If questions remain please don’t hesitate to contact the authors of the
package. For a detailed description of the Habitat Sampler and its
......@@ -46,10 +46,10 @@ applications, see [Neumann et al.,
## 1.1 Usage
The tool is implemented in R and uses Leaflet [(Cheng et al.,
2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps in a
web browser. There are no assumptions about the input image data and
there are no constraints for the spectral-temporal-spatial domain in
The tool is implemented in R and uses Leaflet [(Cheng et
al., 2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps
in a web browser. There are no assumptions about the input image data
and there are no constraints for the spectral-temporal-spatial domain in
which the image is sampled. The tool requires the input of a priori
expert user knowledge to generate reference data about expected surface
classes which are delineated in the imagery or extracted from an
......@@ -78,7 +78,7 @@ data including the band ID in the timeseries stack are provided below
(Table 1).
| | Band 2 | Band 3 | Band 4 | Band 5 | Band 6 | Band 7 | Band 8 | Band 11 | Band 12 |
|------------|:-------|:-------|:-------|:-----------|:-----------|:-----------|:-------|:--------|:--------|
| ---------- | :----- | :----- | :----- | :--------- | :--------- | :--------- | :----- | :------ | :------ |
| Date | Blue | Green | Red | Red Edge 1 | Red Edge 2 | Red Edge 3 | NIR | SWIR 1 | SWIR 2 |
| 2018-03-03 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
| 2018-05-07 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
......@@ -112,28 +112,15 @@ required to setup the HaSa tool.
HaSa uses the latest version of the `velox` library (`v0.2.0`) which
does not compile with the latest version the interface to the C++ Boost
library `BH`. Hence, it is necessary to pin the `BH` version. HaSa does
not yet support the latest developments in `rgdal` and `sp` related with
projections. It is also necessary to pin the versions for `sf`, `sp` and
`rgdal`.
library `BH`. Hence, it is necessary to pin the `BH` version.
The installation of `BH`, `sf`, `sp` and `rgdal` is possible with the
following commands:
The installation of `BH` is possible with the following commands:
``` r
install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.
gz",
repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.
gz",
repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.
gz",
repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.
tar.gz",
repos = NULL, type = "source")
```
## 2.2 Install HaSa
......@@ -201,10 +188,10 @@ raster::rasterOptions(tmpdir = "./RasterTmp/")
The satellite time series is either passed as a **3.2.1** stack of
images already clipped or **3.2.2** a stack of image to be clipped. In
both cases, the input Satellite images needs to either have a valid
projection or the projection be passed as parameter, i.e.,
`sat_crs_str = '+proj=utm +zone=32 +datum=WGS84 +units=m +no_defs'`,
otherwise, the function will report error. Satellite time series data
are available in `dataPath`.
projection or the projection be passed as parameter, i.e., `sat_crs_str
= '+proj=utm +zone=32 +datum=WGS84 +units=m +no_defs'`, otherwise, the
function will report error. Satellite time series data are available in
`dataPath`.
``` r
satellite_series_path <- paste(dataPath, "SentinelStack_2018.tif", sep = "")
......@@ -219,7 +206,7 @@ b = 21
raster::plotRGB(timeseries_stack, r = r, g = g, b = b, stretch = "lin", axes = T)
```
<img src="HabitatSampler_files/figure-markdown_github/raster preview clipped-1.png" />
<img src="HabitatSampler_files/figure-gfm/raster preview clipped-1.png" />
## 3.3 Selecting reference samples
......@@ -247,7 +234,7 @@ ref <- HaSa::load_reference_as_table(table_data_path)
```
| | SentinelStack\_2018.1 | SentinelStack\_2018.2 | SentinelStack\_2018.3 | … | SentinelStack\_2018.54 |
|----------------|----------------------:|----------------------:|----------------------:|:----|-----------------------:|
| -------------- | --------------------: | --------------------: | --------------------: | :- | ---------------------: |
| deciduous | 1066 | 1069 | 915 | … | 1725 |
| coniferous | 656 | 687 | 444 | … | 1671 |
| heather\_young | 2071 | 2303 | 2227 | … | 2726 |
......@@ -320,7 +307,7 @@ HaSa::plot_configuration(
)
```
<img src="HabitatSampler_files/figure-markdown_github/plot configuration-1.png" />
<img src="HabitatSampler_files/figure-gfm/plot configuration-1.png" />
### 4.1.2 Define color palette for probability plot
......@@ -373,15 +360,15 @@ HaSa::multi_Class_Sampling(
)
```
- **Note 1**: Regular sampling is faster than random
- **Note 2**: The argument `last = T` can be set when only one class
- **Note 1**: Regular sampling is faster than random
- **Note 2**: The argument `last = T` can be set when only one class
should be separated from the background pixels
- **Note 3**: The results from previous steps are reproducible when
- **Note 3**: The results from previous steps are reproducible when
using the same seed value and `int.seed=Run@seeds`
(e.g. <Run02@seeds>) in consequence, `init.sample` for regular
sampling determines an invariant sample distribution, use `random`
sampling or vary `init.sample` to get varying sample distributions.
- **Note 4**: If `multiTest > 1` the user will get multiple maps and
- **Note 4**: If `multiTest > 1` the user will get multiple maps and
will be asked to enter the number of the probability distribution
that is appropriate.
......@@ -404,7 +391,7 @@ An interactive map is plotted in a web browser (e.g., Firefox for Linux)
containing a selected habitat type. The number of models predicting this
habitat type can be viewed by hovering the mouse over the map.
<img src="./images/inter_map_ex_new.png" style="width:65.0%" />
![](./images/inter_map_ex_new.png)
From this interactive map, the user has two choices:
......@@ -415,14 +402,14 @@ From this interactive map, the user has two choices:
If the user chooses to extract the class, a user defined threshold is
entered into the R console and the following files are saved:
- HabitatSampler object (Run) - R Binary: The R object is used when
- HabitatSampler object (Run) - R Binary: The R object is used when
the user wants to restart the computation at a specific step or
reuse the seeds for sampling.
- probability map - *.kml, *.png, geocoded \*.tif: Tiff contains all
- probability map - *.kml, *.png, geocoded \*.tif: Tiff contains all
classes plotted, one class, one color. See example in the
demo/Data/Results/HabitatMap\_final.pdf
- threshold list - R Binary
- leaflet interactive web interface - \*.html: LeafLet Map with the 3
- threshold list - R Binary
- leaflet interactive web interface - \*.html: LeafLet Map with the 3
RGB channels and the raster containing the probabilities. The file
is re-written for each run
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment