Commit d2a87c9a authored by Romulo Pereira Goncalves's avatar Romulo Pereira Goncalves
Browse files

Merge branch 'issue#3' into 'master'

Issue#3

See merge request !14
parents fa1238c4 f20db00b
Pipeline #24231 passed with stages
in 1 minute and 59 seconds
...@@ -11,9 +11,9 @@ Description: Calculates samples and related classifiers for mapping gradual prob ...@@ -11,9 +11,9 @@ Description: Calculates samples and related classifiers for mapping gradual prob
License: GPL-3 License: GPL-3
Imports: Imports:
BH (<= 1.69.0-1), BH (<= 1.69.0-1),
sf (<= 0.9-0), sf,
sp (<= 1.4-4), sp,
rgdal (<= 1.5-12), rgdal,
raster, raster,
geojsonio, geojsonio,
maptools, maptools,
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"source": [ "source": [
"### **0.0** - Pin the version of certain dependencies\n", "### **0.0** - Pin the version of certain dependencies\n",
"\n", "\n",
"The `velox` library is not anymore developed. Its latest version does not compile with the latest version of `Boost`. To avoid warnings and issues with the migration of `rgdal` to `gdal 3 and proj 6` we decided to pin the versions of `sf`, `sp` and `rgdal`." "The `velox` library is not anymore developed. Its latest version does not compile with the latest version of `Boost`."
] ]
}, },
{ {
...@@ -40,10 +40,7 @@ ...@@ -40,10 +40,7 @@
"source": [ "source": [
"if (install_dependencies == TRUE) {\n", "if (install_dependencies == TRUE) {\n",
" install.packages(\"remotes\")\n", " install.packages(\"remotes\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz\", repos=NULL, type=\"source\")\n", " install.packages(\"https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz\", repos=NULL, type=\"source\") \n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz\", repos=NULL, type=\"source\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz\", repos=NULL, type=\"source\")\n",
" install.packages(\"https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz\", repos=NULL, type=\"source\") \n",
"}" "}"
] ]
}, },
......
...@@ -28,9 +28,6 @@ ...@@ -28,9 +28,6 @@
##0.0## ##0.0##
install.packages("remotes") install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source") install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz", repos=NULL, type="source")
##0.1## ##0.1##
library(remotes) library(remotes)
......
...@@ -10,10 +10,10 @@ container_name="hasa" ...@@ -10,10 +10,10 @@ container_name="hasa"
input_data_folder="/tmp/hasa" input_data_folder="/tmp/hasa"
#Check if image exists #Check if image exists
if [ $(sudo docker images | grep ${runner_iname} | wc -l) == 0 ] if [ $(docker images | grep ${runner_iname} | wc -l) == 0 ]
then then
# build docker image # build docker image
sudo docker build --network=host -f ${context_dir}/${dockerfile} -m 20G -t ${runner_tag} ${context_dir} docker build --network=host -f ${context_dir}/${dockerfile} -m 20G -t ${runner_tag} ${context_dir}
else else
echo "It already exists a Docker image with the name ${runner_tag}!!!" echo "It already exists a Docker image with the name ${runner_tag}!!!"
fi fi
...@@ -9,9 +9,6 @@ install.packages("rmarkdown") ...@@ -9,9 +9,6 @@ install.packages("rmarkdown")
install.packages("knitr") install.packages("knitr")
install.packages("remotes") install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source") install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz", repos=NULL, type="source", dependencies=TRUE)
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz", repos=NULL, type="source")
# Install HaSa # Install HaSa
library(remotes) library(remotes)
......
...@@ -3,9 +3,6 @@ install.packages("rmarkdown") ...@@ -3,9 +3,6 @@ install.packages("rmarkdown")
install.packages("knitr") install.packages("knitr")
install.packages("remotes") install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source") install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.gz", repos=NULL, type="source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.gz", repos=NULL, type="source", dependencies=TRUE)
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.tar.gz", repos=NULL, type="source")
# Install HaSa # Install HaSa
library(remotes) library(remotes)
......
...@@ -50,12 +50,12 @@ demo_data_foler="$(realpath ../demo/)" ...@@ -50,12 +50,12 @@ demo_data_foler="$(realpath ../demo/)"
jupyter_lab_port=8888 jupyter_lab_port=8888
docker_jupyterlab_port=8888 docker_jupyterlab_port=8888
sudo docker rm -f ${container_name} docker rm -f ${container_name}
echo "Starting HaSa container, please add the input files to ${input_dir_} and read the results from the ${output_dir_}" echo "Starting HaSa container, please add the input files to ${input_dir_} and read the results from the ${output_dir_}"
if [ "${jupyter_notebook,,}" = "true" ] if [ "${jupyter_notebook,,}" = "true" ]
then then
sudo docker run -it --name ${container_name} --privileged \ docker run -it --name ${container_name} --privileged \
--oom-kill-disable --memory=12g --memory-swap=14g --net=host \ --oom-kill-disable --memory=12g --memory-swap=14g --net=host \
-p $jupyter_lab_port:$docker_jupyterlab_port \ -p $jupyter_lab_port:$docker_jupyterlab_port \
-v ${input_dir_}:${in_data_folder} \ -v ${input_dir_}:${in_data_folder} \
...@@ -64,7 +64,7 @@ then ...@@ -64,7 +64,7 @@ then
-v ${demo_dir}:${demo_data_folder} ${runner_tag} \ -v ${demo_dir}:${demo_data_folder} ${runner_tag} \
bash -i -c "cd /home/hasa; jupyter-lab --ip 0.0.0.0 --no-browser --allow-root demo/" bash -i -c "cd /home/hasa; jupyter-lab --ip 0.0.0.0 --no-browser --allow-root demo/"
else else
sudo docker run -it --name ${container_name} -u 1000:1000 --privileged \ docker run -it --name ${container_name} -u 1000:1000 --privileged \
--oom-kill-disable --memory=12g --memory-swap=14g --net=host \ --oom-kill-disable --memory=12g --memory-swap=14g --net=host \
-v ${input_dir_}:${in_data_folder} \ -v ${input_dir_}:${in_data_folder} \
-v ${tmp_dir_}:${tmp_data_folder} \ -v ${tmp_dir_}:${tmp_data_folder} \
......
...@@ -3,14 +3,6 @@ title: "An Introduction to Habitat Sampler" ...@@ -3,14 +3,6 @@ title: "An Introduction to Habitat Sampler"
author: "Carsten Neumann, Alison Beamish, Romulo Goncalves" author: "Carsten Neumann, Alison Beamish, Romulo Goncalves"
date: "01/06/2021" date: "01/06/2021"
output: output:
md_document:
pandoc_args: ["--output", "README.md"]
toc: true
toc_depth: 2
variant: gfm
pdf_document:
toc: true
toc_depth: 2
html_document: html_document:
theme: united theme: united
highlight: tango highlight: tango
...@@ -20,7 +12,14 @@ output: ...@@ -20,7 +12,14 @@ output:
collapsed: false collapsed: false
smooth_scroll: false smooth_scroll: false
df_print: paged df_print: paged
always_allow_html: yes md_document:
pandoc_args: ["--output", "README.md"]
toc: true
toc_depth: 2
variant: gfm
pdf_document:
toc: true
toc_depth: 2
header-includes: header-includes:
- \usepackage{caption} - \usepackage{caption}
- \captionsetup[figure]{labelformat=empty} - \captionsetup[figure]{labelformat=empty}
...@@ -36,7 +35,7 @@ knitr::opts_chunk$set(tidy.opts = list(width.cutoff = 75), tidy = TRUE, fig.pos ...@@ -36,7 +35,7 @@ knitr::opts_chunk$set(tidy.opts = list(width.cutoff = 75), tidy = TRUE, fig.pos
# 1 Introduction # 1 Introduction
This manual introduces the Habitat Sampler (HaSa), an innovative tool that autonomously generates representative reference samples for predictive modelling of surface class probabilities. The tool can be applied to any image data that displays surface structures and dynamics of any kind at multiple spatial and temporal scales. HaSa was initially developed to classify habitat dynamics in semi-natural ecosystems but the procedure can theoretically be applied to any surface. The main innovation of the tool is that it reduces reliance on comprehensive in situ ground truth data or comprehensive training datasets which constrain accurate image classification particularly in complex scenes. This manual introduces the Habitat Sampler (HaSa), an innovative tool that autonomously generates representative reference samples for predictive modelling of surface class probabilities. The tool can be applied to any image data that displays surface structures and dynamics of any kind at multiple spatial and temporal scales. HaSa was initially developed to classify habitat dynamics in semi-natural ecosystems but the procedure can theoretically be applied to any surface. The main innovation of the tool is that it reduces reliance on comprehensive in situ ground truth data or comprehensive training datasets which constrain accurate image classification particularly in complex scenes.
Though development of HaSa has prioritized ease of use, this documentation assume a familiarity with the R software. The document is built successively and is intended to lead you step-by-step through the HaSa procedure of generating probability and classification maps. HaSa is still in development and any suggestions or improvements are welcomed and encouraged in our [GitLab Community Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git). If questions remain please don't hesitate to contact the authors of the package. For a detailed description of the Habitat Sampler and its applications, see [Neumann et al., (2020)](https://doi.org/10.1111/ddi.13165). Though development of HaSa has prioritized ease of use, this documentation assume a familiarity with the R software. The document is built successively and is intended to lead you step-by-step through the HaSa procedure of generating probability and classification maps. HaSa is still in development and any suggestions or improvements are welcomed and encouraged in our [GitHub Community Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git). If questions remain please don't hesitate to contact the authors of the package. For a detailed description of the Habitat Sampler and its applications, see [Neumann et al., (2020)](https://doi.org/10.1111/ddi.13165).
## 1.1 Usage ## 1.1 Usage
The tool is implemented in R and uses Leaflet [(Cheng et al., 2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps in a web browser. There are no assumptions about the input image data and there are no constraints for the spectral-temporal-spatial domain in which the image is sampled. The tool requires the input of a priori expert user knowledge to generate reference data about expected surface classes which are delineated in the imagery or extracted from an external spectral library. The user has the choice between image classifiers [random forest](https://doi.org/10.1023/A:1010933404324) (RF) and [support vector](https://doi.org/10.1145/130385.130401) (SV). The tool is implemented in R and uses Leaflet [(Cheng et al., 2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps in a web browser. There are no assumptions about the input image data and there are no constraints for the spectral-temporal-spatial domain in which the image is sampled. The tool requires the input of a priori expert user knowledge to generate reference data about expected surface classes which are delineated in the imagery or extracted from an external spectral library. The user has the choice between image classifiers [random forest](https://doi.org/10.1023/A:1010933404324) (RF) and [support vector](https://doi.org/10.1145/130385.130401) (SV).
...@@ -66,19 +65,13 @@ The point shapefile contains a point location per class and is used to extract t ...@@ -66,19 +65,13 @@ The point shapefile contains a point location per class and is used to extract t
The following procedure will lead you through the preliminary steps required to setup the HaSa tool. The following procedure will lead you through the preliminary steps required to setup the HaSa tool.
## 2.1 HaSa dependencies ## 2.1 HaSa dependencies
HaSa uses the latest version of the `velox` library (`v0.2.0`) which does not compile with the latest version the interface to the C++ Boost library `BH`. Hence, it is necessary to pin the `BH` version. HaSa does not yet support the latest developments in `rgdal` and `sp` related with projections. It is also necessary to pin the versions for `sf`, `sp` and `rgdal`. HaSa uses the latest version of the `velox` library (`v0.2.0`) which does not compile with the latest version the interface to the C++ Boost library `BH`. Hence, it is necessary to pin the `BH` version.
The installation of `BH`, `sf`, `sp` and `rgdal` is possible with the following commands: The installation of `BH` is possible with the following commands:
```{r install dependencies, eval = FALSE} ```{r install dependencies, eval = FALSE}
install.packages("remotes") install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar. install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.
gz", repos = NULL, type = "source") gz", repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.
gz", repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.
gz", repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.
tar.gz", repos = NULL, type = "source")
``` ```
## 2.2 Install HaSa ## 2.2 Install HaSa
......
This diff is collapsed.
No preview for this file type
...@@ -2,21 +2,21 @@ ...@@ -2,21 +2,21 @@
- [1 Introduction](#introduction) - [1 Introduction](#introduction)
- [1.1 Usage](#usage) - [1.1 Usage](#usage)
- [1.2 Sample datasets](#sample-datasets) - [1.2 Sample datasets](#sample-datasets)
- [2 HaSa installation](#hasa-installation) - [2 HaSa installation](#hasa-installation)
- [2.1 HaSa dependencies](#hasa-dependencies) - [2.1 HaSa dependencies](#hasa-dependencies)
- [2.2 Install HaSa](#install-hasa) - [2.2 Install HaSa](#install-hasa)
- [2.3 Load HaSa](#load-hasa) - [2.3 Load HaSa](#load-hasa)
- [3 Load demo data](#load-demo-data) - [3 Load demo data](#load-demo-data)
- [3.1 Data directories](#data-directories) - [3.1 Data directories](#data-directories)
- [3.2 Satellite timeseries stack](#satellite-timeseries-stack) - [3.2 Satellite timeseries stack](#satellite-timeseries-stack)
- [3.3 Selecting reference samples](#selecting-reference-samples) - [3.3 Selecting reference samples](#selecting-reference-samples)
- [4 Generating outputs](#generating-outputs) - [4 Generating outputs](#generating-outputs)
- [4.1 Calculating class - [4.1 Calculating class
probability](#calculating-class-probability) probability](#calculating-class-probability)
- [4.2 Generating classification map and summary - [4.2 Generating classification map and summary
statistics](#generating-classification-map-and-summary-statistics) statistics](#generating-classification-map-and-summary-statistics)
# 1 Introduction # 1 Introduction
...@@ -37,7 +37,7 @@ documentation assume a familiarity with the R software. The document is ...@@ -37,7 +37,7 @@ documentation assume a familiarity with the R software. The document is
built successively and is intended to lead you step-by-step through the built successively and is intended to lead you step-by-step through the
HaSa procedure of generating probability and classification maps. HaSa HaSa procedure of generating probability and classification maps. HaSa
is still in development and any suggestions or improvements are welcomed is still in development and any suggestions or improvements are welcomed
and encouraged in our [GitLab Community and encouraged in our [GitHub Community
Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git). Version](https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git).
If questions remain please don’t hesitate to contact the authors of the If questions remain please don’t hesitate to contact the authors of the
package. For a detailed description of the Habitat Sampler and its package. For a detailed description of the Habitat Sampler and its
...@@ -46,10 +46,10 @@ applications, see [Neumann et al., ...@@ -46,10 +46,10 @@ applications, see [Neumann et al.,
## 1.1 Usage ## 1.1 Usage
The tool is implemented in R and uses Leaflet [(Cheng et al., The tool is implemented in R and uses Leaflet [(Cheng et
2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps in a al., 2019)](https://rdrr.io/cran/leaflet/) to generate interactive maps
web browser. There are no assumptions about the input image data and in a web browser. There are no assumptions about the input image data
there are no constraints for the spectral-temporal-spatial domain in and there are no constraints for the spectral-temporal-spatial domain in
which the image is sampled. The tool requires the input of a priori which the image is sampled. The tool requires the input of a priori
expert user knowledge to generate reference data about expected surface expert user knowledge to generate reference data about expected surface
classes which are delineated in the imagery or extracted from an classes which are delineated in the imagery or extracted from an
...@@ -78,7 +78,7 @@ data including the band ID in the timeseries stack are provided below ...@@ -78,7 +78,7 @@ data including the band ID in the timeseries stack are provided below
(Table 1). (Table 1).
| | Band 2 | Band 3 | Band 4 | Band 5 | Band 6 | Band 7 | Band 8 | Band 11 | Band 12 | | | Band 2 | Band 3 | Band 4 | Band 5 | Band 6 | Band 7 | Band 8 | Band 11 | Band 12 |
|------------|:-------|:-------|:-------|:-----------|:-----------|:-----------|:-------|:--------|:--------| | ---------- | :----- | :----- | :----- | :--------- | :--------- | :--------- | :----- | :------ | :------ |
| Date | Blue | Green | Red | Red Edge 1 | Red Edge 2 | Red Edge 3 | NIR | SWIR 1 | SWIR 2 | | Date | Blue | Green | Red | Red Edge 1 | Red Edge 2 | Red Edge 3 | NIR | SWIR 1 | SWIR 2 |
| 2018-03-03 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | | 2018-03-03 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
| 2018-05-07 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | | 2018-05-07 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
...@@ -112,27 +112,14 @@ required to setup the HaSa tool. ...@@ -112,27 +112,14 @@ required to setup the HaSa tool.
HaSa uses the latest version of the `velox` library (`v0.2.0`) which HaSa uses the latest version of the `velox` library (`v0.2.0`) which
does not compile with the latest version the interface to the C++ Boost does not compile with the latest version the interface to the C++ Boost
library `BH`. Hence, it is necessary to pin the `BH` version. HaSa does library `BH`. Hence, it is necessary to pin the `BH` version.
not yet support the latest developments in `rgdal` and `sp` related with
projections. It is also necessary to pin the versions for `sf`, `sp` and
`rgdal`.
The installation of `BH`, `sf`, `sp` and `rgdal` is possible with the The installation of `BH` is possible with the following commands:
following commands:
``` r ``` r
install.packages("remotes") install.packages("remotes")
install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar. install.packages("https://cran.r-project.org/src/contrib/Archive/BH/BH_1.69.0-1.tar.
gz", gz",
repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sf/sf_0.9-0.tar.
gz",
repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/sp/sp_1.4-4.tar.
gz",
repos = NULL, type = "source")
install.packages("https://cran.r-project.org/src/contrib/Archive/rgdal/rgdal_1.5-12.
tar.gz",
repos = NULL, type = "source") repos = NULL, type = "source")
``` ```
...@@ -146,8 +133,8 @@ install the `HaSa` R package, build its manual and its vignettes. ...@@ -146,8 +133,8 @@ install the `HaSa` R package, build its manual and its vignettes.
``` r ``` r
library(remotes) library(remotes)
remotes::install_git("https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git", remotes::install_git("https://git.gfz-potsdam.de/habitat-sampler/HabitatSampler.git",
ref = "master", subdir = "R-package", dependencies = NA, upgrade = FALSE, ref = "master", subdir = "R-package", dependencies = NA, upgrade = FALSE,
build = TRUE, build_manual = TRUE, build_vignettes = TRUE) build = TRUE, build_manual = TRUE, build_vignettes = TRUE)
``` ```
...@@ -161,8 +148,8 @@ warning messages related with the latest changes in `gdal` and `PROJ6`. ...@@ -161,8 +148,8 @@ warning messages related with the latest changes in `gdal` and `PROJ6`.
``` r ``` r
options(rgdal_show_exportToProj4_warnings = "none") options(rgdal_show_exportToProj4_warnings = "none")
libraries <- c("rgdal", "raster", "maptools", "spatialEco", "randomForest", libraries <- c("rgdal", "raster", "maptools", "spatialEco", "randomForest",
"e1071", "devtools", "velox", "rgeos", "leaflet", "htmlwidgets", "IRdisplay", "e1071", "devtools", "velox", "rgeos", "leaflet", "htmlwidgets", "IRdisplay",
"HaSa") "HaSa")
lapply(libraries, library, character.only = TRUE) lapply(libraries, library, character.only = TRUE)
``` ```
...@@ -201,10 +188,10 @@ raster::rasterOptions(tmpdir = "./RasterTmp/") ...@@ -201,10 +188,10 @@ raster::rasterOptions(tmpdir = "./RasterTmp/")
The satellite time series is either passed as a **3.2.1** stack of The satellite time series is either passed as a **3.2.1** stack of
images already clipped or **3.2.2** a stack of image to be clipped. In images already clipped or **3.2.2** a stack of image to be clipped. In
both cases, the input Satellite images needs to either have a valid both cases, the input Satellite images needs to either have a valid
projection or the projection be passed as parameter, i.e., projection or the projection be passed as parameter, i.e., `sat_crs_str
`sat_crs_str = '+proj=utm +zone=32 +datum=WGS84 +units=m +no_defs'`, = '+proj=utm +zone=32 +datum=WGS84 +units=m +no_defs'`, otherwise, the
otherwise, the function will report error. Satellite time series data function will report error. Satellite time series data are available in
are available in `dataPath`. `dataPath`.
``` r ``` r
satellite_series_path <- paste(dataPath, "SentinelStack_2018.tif", sep = "") satellite_series_path <- paste(dataPath, "SentinelStack_2018.tif", sep = "")
...@@ -219,7 +206,7 @@ b = 21 ...@@ -219,7 +206,7 @@ b = 21
raster::plotRGB(timeseries_stack, r = r, g = g, b = b, stretch = "lin", axes = T) raster::plotRGB(timeseries_stack, r = r, g = g, b = b, stretch = "lin", axes = T)
``` ```
<img src="HabitatSampler_files/figure-markdown_github/raster preview clipped-1.png" /> <img src="HabitatSampler_files/figure-gfm/raster preview clipped-1.png" />
## 3.3 Selecting reference samples ## 3.3 Selecting reference samples
...@@ -246,15 +233,15 @@ table_data_path <- paste(dataPath, "Example_Reference_table.txt", sep = "") ...@@ -246,15 +233,15 @@ table_data_path <- paste(dataPath, "Example_Reference_table.txt", sep = "")
ref <- HaSa::load_reference_as_table(table_data_path) ref <- HaSa::load_reference_as_table(table_data_path)
``` ```
| | SentinelStack\_2018.1 | SentinelStack\_2018.2 | SentinelStack\_2018.3 | … | SentinelStack\_2018.54 | | | SentinelStack\_2018.1 | SentinelStack\_2018.2 | SentinelStack\_2018.3 | … | SentinelStack\_2018.54 |
|----------------|----------------------:|----------------------:|----------------------:|:----|-----------------------:| | -------------- | --------------------: | --------------------: | --------------------: | :- | ---------------------: |
| deciduous | 1066 | 1069 | 915 | … | 1725 | | deciduous | 1066 | 1069 | 915 | … | 1725 |
| coniferous | 656 | 687 | 444 | … | 1671 | | coniferous | 656 | 687 | 444 | … | 1671 |
| heather\_young | 2071 | 2303 | 2227 | … | 2726 | | heather\_young | 2071 | 2303 | 2227 | … | 2726 |
| heather\_old | 895 | 910 | 728 | … | 1413 | | heather\_old | 895 | 910 | 728 | … | 1413 |
| heather\_shrub | 889 | 927 | 792 | … | 1718 | | heather\_shrub | 889 | 927 | 792 | … | 1718 |
| bare\_ground | 2176 | 2335 | 2277 | … | 2139 | | bare\_ground | 2176 | 2335 | 2277 | … | 2139 |
| xeric\_grass | 3952 | 4566 | 4757 | … | 4893 | | xeric\_grass | 3952 | 4566 | 4757 | … | 4893 |
Reference data Reference data
...@@ -286,7 +273,7 @@ reference spectra (rows = class). ...@@ -286,7 +273,7 @@ reference spectra (rows = class).
``` r ``` r
# create vector with class names. The order of classes must follow the # create vector with class names. The order of classes must follow the
# same order of reference spectra (row = class) # same order of reference spectra (row = class)
classNames <- c("deciduous", "coniferous", "heather_young", "heather_old", "heather_shrub", classNames <- c("deciduous", "coniferous", "heather_young", "heather_old", "heather_shrub",
"bare_ground", "xeric_grass") "bare_ground", "xeric_grass")
``` ```
...@@ -320,7 +307,7 @@ HaSa::plot_configuration( ...@@ -320,7 +307,7 @@ HaSa::plot_configuration(
) )
``` ```
<img src="HabitatSampler_files/figure-markdown_github/plot configuration-1.png" /> <img src="HabitatSampler_files/figure-gfm/plot configuration-1.png" />
### 4.1.2 Define color palette for probability plot ### 4.1.2 Define color palette for probability plot
...@@ -373,15 +360,15 @@ HaSa::multi_Class_Sampling( ...@@ -373,15 +360,15 @@ HaSa::multi_Class_Sampling(
) )
``` ```
- **Note 1**: Regular sampling is faster than random - **Note 1**: Regular sampling is faster than random
- **Note 2**: The argument `last = T` can be set when only one class - **Note 2**: The argument `last = T` can be set when only one class
should be separated from the background pixels should be separated from the background pixels
- **Note 3**: The results from previous steps are reproducible when - **Note 3**: The results from previous steps are reproducible when
using the same seed value and `int.seed=Run@seeds` using the same seed value and `int.seed=Run@seeds`
(e.g. <Run02@seeds>) in consequence, `init.sample` for regular (e.g. <Run02@seeds>) in consequence, `init.sample` for regular
sampling determines an invariant sample distribution, use `random` sampling determines an invariant sample distribution, use `random`
sampling or vary `init.sample` to get varying sample distributions. sampling or vary `init.sample` to get varying sample distributions.
- **Note 4**: If `multiTest > 1` the user will get multiple maps and - **Note 4**: If `multiTest > 1` the user will get multiple maps and
will be asked to enter the number of the probability distribution will be asked to enter the number of the probability distribution
that is appropriate. that is appropriate.
...@@ -404,7 +391,7 @@ An interactive map is plotted in a web browser (e.g., Firefox for Linux) ...@@ -404,7 +391,7 @@ An interactive map is plotted in a web browser (e.g., Firefox for Linux)
containing a selected habitat type. The number of models predicting this containing a selected habitat type. The number of models predicting this
habitat type can be viewed by hovering the mouse over the map. habitat type can be viewed by hovering the mouse over the map.
<img src="./images/inter_map_ex_new.png" style="width:65.0%" /> ![](./images/inter_map_ex_new.png)
From this interactive map, the user has two choices: From this interactive map, the user has two choices:
...@@ -415,14 +402,14 @@ From this interactive map, the user has two choices: ...@@ -415,14 +402,14 @@ From this interactive map, the user has two choices:
If the user chooses to extract the class, a user defined threshold is If the user chooses to extract the class, a user defined threshold is
entered into the R console and the following files are saved: entered into the R console and the following files are saved:
- HabitatSampler object (Run) - R Binary: The R object is used when - HabitatSampler object (Run) - R Binary: The R object is used when
the user wants to restart the computation at a specific step or the user wants to restart the computation at a specific step or
reuse the seeds for sampling. reuse the seeds for sampling.
- probability map - *.kml, *.png, geocoded \*.tif: Tiff contains all - probability map - *.kml, *.png, geocoded \*.tif: Tiff contains all
classes plotted, one class, one color. See example in the classes plotted, one class, one color. See example in the
demo/Data/Results/HabitatMap\_final.pdf demo/Data/Results/HabitatMap\_final.pdf
- threshold list - R Binary - threshold list - R Binary
- leaflet interactive web interface - \*.html: LeafLet Map with the 3 - leaflet interactive web interface - \*.html: LeafLet Map with the 3
RGB channels and the raster containing the probabilities. The file RGB channels and the raster containing the probabilities. The file
is re-written for each run is re-written for each run
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment