Commit 0c7a060b authored by Brage.Forland's avatar Brage.Forland

Initial commit

parents
## 1. ------ FUNCTION TO DOWNLOAD SPECIES OCCUPANCY DATA ------
#' @title Download and Process Data from GBIF
#'
#' @description
#' This function connects to the GBIF server and downloads the relevant
#' occurrence data and processes them into two formats: one
#' \code{\link[sp::SpatialPointsDatafFrame]{SpatialPointsDataFrame}}
#' object that contains the raw occurrence data but with the projection
#' information added, and one \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}}
#' containing the observation density for each downloaded species.
#'
#' @param speciesIDs An integer vector containing the taxanomic ID codes of the species you wish to
#' download from GBIF.
#' @param templateGrid A \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}} object to use as
#' a spatial template for the downloaded points. Only records that fall on cells with non-\code{NA}
#' values in the first layer of \code{templateGrid} will be included.
#' @param outFolder Scalar character giving the location to store the temporary outputs (warning: contents
#' will be deleted after processing).
#' @param yearLimits Integer vector where \code{range(yearLimits, na.rm = TRUE)} specifies the lower and
#' upper ranges of the years to use when selecting GBIF data.
#' @param GBIFUsername Scalar character conatining the username of the GBIF user.
#' @param GBIFPassword Scalar character containing the password of the GBIF user.
#' @param GBIFEmail Scalar character containing the email address of the GBIF user.
#' @param GBIFAttemptTime Scalar integer denoting the number of seconds to wait between each
#' connection to GBIF to query the status of the processed download.
#' @param maxGBIFTime Scalar integer denoting the maximum number of seconds to wait before the connection
#' to GBIF fails.
#' @return A list containing two elements:
#' \describe{
#' \item{\code{rawOccurrences}}{A \code{\link[sp::SpatialPointsDataFrame]{SpatialPointsDataFrame}}
#' object containing the occurrence data downloaded with GBIF.}
#' \item{\code{griddedOccurrences}}{A \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}}
#' object containing the gridded versions of the occurrence data with the density of observations
#' for each species in \code{speciesIDs}.}
#' }
#'
#' @examples
#' @author Joseph D. Chipperfield, \email{joseph.chipperfield@@nmbu.no}
#' @seealso \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}}
#' \code{\link[sp::SpatialPointsDataFrame]{SpatialPointsDataFrame}}
#' @export
#'
downloadOccupancyData <- function(speciesIDs, outFolder = paste(tempdir(), "occData", sep = "/"),
yearLimits = c(1960, Inf), GBIFUsername = Sys.getenv("GBIFUser", unset = NA),
GBIFPassword = Sys.getenv("GBIFPass", unset = NA), GBIFEmail = Sys.getenv("GBIFEmail", unset = NA),
GBIFAttemptTime = 1800, maxGBIFTime = 14400, templateGrid=NULL) {
### 1.1 ==== Sanity check the function inputs ====
# Set the GBIF projection information
GBIFcrsString <- "+init=epsg:4326"
GBIFcrs <- CRS(GBIFcrsString)
# Sanity check the inputs for the species IDs
inSpeciesIDs <- tryCatch(as.character(speciesIDs), error = function(err) {
stop(paste("invalid entry for species IDs:", err, sep = " "))
})
if(length(inSpeciesIDs) <= 0) {
stop("invalid entry for species IDs: zero-length specification vector")
}
if(anyNA(inSpeciesIDs)) {
stop("invalid entry for species IDs: NAs present in specification vector")
}
# Sanity check the inputs for template grid
inTemplateGrid <- templateGrid
if(class(inTemplateGrid) != "SpatialGrid") {
inTemplateGrid <- tryCatch(as(templateGrid, "SpatialGridDataFrame"), error = function(err) {
stop(paste("invalid entry for the template grid:", err, sep = " "))
})
}
# Sanity check the inputs fotor the output folder
inOutFolder <- tryCatch(as.character(outFolder), error = function(err) {
stop(paste("invalid entry for the output folder:", err, sep = " "))
})
if(length(inOutFolder) <= 0) {
stop("invalid entry for the output folder: zero-length specification vector")
} else if(length(inOutFolder) > 1) {
warning("output folder specification vector length greater than one: only the first element will be used")
inOutFolder <- inOutFolder[1]
}
if(is.na(inOutFolder)) {
stop("invalid entry for the output folder: NAs present in specification vector")
}
# Delete any existing copy of the data
if(!dir.exists(inOutFolder)) {
if(!dir.create(inOutFolder)) {
stop("unable to create output folder")
}
}
# Sanity check the year limits for the occurrence data
inYearLimits <- tryCatch(range(as.double(yearLimits), na.rm = TRUE), error = function(err) {
stop(paste("invalid entry for the year limits:", err, sep = " "))
})
if(anyNA(inYearLimits)) {
stop("invalid entry for the year limits: NAs present in the specification vector")
}
# Sanity check the GBIF download attempt interval
inAttemptGBIFTime <- tryCatch(as.integer(GBIFAttemptTime), error = function(err) {
stop(paste("invalid entry for the GBIF download attempt interval:", err, sep = " "))
})
if(length(inAttemptGBIFTime) <= 0) {
stop("invalid entry for the GBIF download attempt interval: zero-length specification vector")
} else if(length(inAttemptGBIFTime) > 1) {
warning("GBIF download attempt interval specification vector length greater than one: only the first element will be used")
inAttemptGBIFTime <- inAttemptGBIFTime[1]
}
if(is.na(inAttemptGBIFTime)) {
stop("invalid entry for the GBIF download attempt interval: NAs present in specification vector")
} else if(inAttemptGBIFTime < 1) {
stop("invalid entry for the GBIF download attempt interval: interval cannot be less than one")
}
# Sanity check the GBIF maximum download attempt time
inMaxGBIFTime <- tryCatch(as.integer(maxGBIFTime), error = function(err) {
stop(paste("invalid entry for the GBIF maximum download attempt time:", err, sep = " "))
})
if(length(inMaxGBIFTime) <= 0) {
stop("invalid entry for the GBIF maximum download attempt time: zero-length specification vector")
} else if(length(inMaxGBIFTime) > 1) {
warning("GBIF maximum download attempt time specification vector length greater than one: only the first element will be used")
inMaxGBIFTime <- inMaxGBIFTime[1]
}
if(is.na(inMaxGBIFTime)) {
stop("invalid entry for the GBIF maximum download attempt time: NAs present in specification vector")
} else if(inMaxGBIFTime < 1) {
stop("invalid entry for the GBIF maximum download attempt time: attempt time cannot be less than one")
}
### 1.2 ==== Create GBIF credentials ====
# Sanity check the GBIF username and password
gbifCredentials <- tryCatch(c(
as.character(GBIFUsername),
as.character(GBIFPassword),
as.character(GBIFEmail)), error = function(err) {
stop(paste("invalid entry for the GBIF credentials:", err, sep = " "))
})
if(length(gbifCredentials) != 3) {
stop("invalid entry for the GBIF credentials: username, password, and email must have a length of 1")
}
if(anyNA(gbifCredentials)) {
stop("invalid entry for the GBIF credentials: NAs present in the credentials")
}
### 1.3 ==== Create GBIF query ====
# Retrieve the latitude and longitude limits from the template grid
latLongCoords <- cbind(
attr(inTemplateGrid, "bbox")[1, ][c(1, 1, 2, 2)],
attr(inTemplateGrid, "bbox")[2, ][c(1, 2, 1, 2)])
rownames(latLongCoords) <- NULL
latLongLimits <- SpatialPoints(latLongCoords, proj4string = CRS(proj4string(inTemplateGrid)))
# Project the coordinates if neccessary
if(proj4string(latLongLimits) != GBIFcrsString) {
latLongLimits <- spTransform(latLongLimits, GBIFcrs)
}
latLimits <- range(coordinates(latLongLimits)[, 2], na.rm = TRUE)
longLimits <- range(coordinates(latLongLimits)[, 1], na.rm = TRUE)
# Set the extra queries relating to the year limits
yearQuery <- c()
if(is.finite(inYearLimits[1])) {
yearQuery <- c(yearQuery, paste("\t\t\t{\"type\":\"greaterThanOrEquals\", \"key\":\"YEAR\", \"value\":\"", as.character(as.integer(inYearLimits[1])), "\"}", sep = ""))
}
if(is.finite(inYearLimits[2])) {
yearQuery <- c(yearQuery, paste("\t\t\t{\"type\":\"lessThanOrEquals\", \"key\":\"YEAR\", \"value\":\"", as.character(as.integer(inYearLimits[2])), "\"}", sep = ""))
}
if(length(yearQuery) > 1) {
yearQuery[1:(length(yearQuery) - 1)] <- paste(yearQuery[1:(length(yearQuery) - 1)], ",", sep = "")
}
if(length(yearQuery) > 0) {
yearQuery[1] <- paste(",", yearQuery[1], sep = "")
}
# Produce a query for the species records from GBIF
downloadQuery <- paste(c("{",
paste("\t\"creator\":\"", gbifCredentials[1], "\",", sep = ""),
paste("\t\"notification_address\":[\"", gbifCredentials[3], "\"],", sep = ""),
"\t\"predicate\":{",
"\t\t\"type\":\"and\",",
"\t\t\"predicates\":[",
"\t\t\t{\"type\":\"or\", \"predicates\":[",
paste("\t\t\t\t{\"type\":\"equals\", \"key\":\"TAXON_KEY\", \"value\":\"", inSpeciesIDs, "\"}", sep = "", collapse = ",\n"),
"\t\t\t]},",
"\t\t\t{\"type\":\"equals\", \"key\":\"HAS_COORDINATE\", \"value\":\"TRUE\"},",
"\t\t\t{\"type\":\"equals\", \"key\":\"HAS_GEOSPATIAL_ISSUE\", \"value\":\"FALSE\"},",
paste("\t\t\t{\"type\":\"greaterThanOrEquals\", \"key\":\"DECIMAL_LONGITUDE\", \"value\":\"", as.character(longLimits[1]), "\"},", sep = ""),
paste("\t\t\t{\"type\":\"lessThanOrEquals\", \"key\":\"DECIMAL_LONGITUDE\", \"value\":\"", as.character(longLimits[2]), "\"},", sep = ""),
paste("\t\t\t{\"type\":\"greaterThanOrEquals\", \"key\":\"DECIMAL_LATITUDE\", \"value\":\"", as.character(latLimits[1]), "\"},", sep = ""),
paste("\t\t\t{\"type\":\"lessThanOrEquals\", \"key\":\"DECIMAL_LATITUDE\", \"value\":\"", as.character(latLimits[2]), "\"}", sep = ""),
yearQuery,
"\t\t]",
"\t}",
"}\n"), collapse = "\n")
### 1.4 ==== Download GBIF occurrence data ====
# Retrieve a download ID from GBIF
downloadID <- tryCatch(occ_download(body = downloadQuery,
user = gbifCredentials[1], pwd = gbifCredentials[2], email = gbifCredentials[3]),
error = function(err) {
stop(paste("error encountered during initialisation of GBIF download:", err, "(check login credentials)", sep = " "))
})
# Wait for GBIF to process the download request
totalTime <- 0
while(totalTime < inMaxGBIFTime & (
occ_download_meta(downloadID)$status == "RUNNING" ||
occ_download_meta(downloadID)$status == "PREPARING"
)) {
Sys.sleep(min(inMaxGBIFTime - totalTime, inAttemptGBIFTime))
totalTime <- totalTime + inAttemptGBIFTime
}
if(!(occ_download_meta(downloadID)$status %in% c("SUCCEEDED", "SUCCESS"))) {
stop("unable to acquire occurrence data from GBIF")
}
# Download and import the occurrence data into R
occDataRaw <- tryCatch(
as.data.frame(occ_download_import(occ_download_get(as.character(downloadID), path = inOutFolder),
downloadID, path = inOutFolder, quote="")),
error = function(err) {
stop(paste("error encountered during download of occurrence data:", err, sep = " "))
})
saveRDS(occDataRaw, paste(inOutFolder, "occDataRaw.rds", sep="/"));
occDataRaw
}
## Todo: only resolve names not already resolved
gbifResolveNames <- function(species, nameCacheFile=NULL, useCache=TRUE, keepUnresolved=FALSE) {
if(!is.null(nameCacheFile) && file.exists(nameCacheFile) && useCache) {
speciesList <- readr::read_tsv(nameCacheFile, col_types="cccc")
if(is.null(speciesList$dirName))
speciesList <- tibble::tibble(speciesName=character(), resolvedName=character(), taxonKey=character(), dirName=character())
} else {
speciesList <- tibble::tibble(speciesName=character(), resolvedName=character(), taxonKey=character(), dirName=character())
}
speciesMissing <- setdiff(species,speciesList$speciesName)
for(sp in speciesMissing) {
gbifName <- rgbif::name_backbone(sp, strict=TRUE, rank="SPECIES")
taxonKey=NA
resolvedName=NA
if(gbifName$matchType == "EXACT") {
if(gbifName$status=="SYNONYM") {
taxonKey <- gbifName$acceptedUsageKey
} else {
taxonKey <- gbifName$usageKey
}
resolvedName=gbifName$species
}
speciesName <-sp
# We want the dirname to be a valid R variable name
dirName <- iconv(sp, from="UTF-8", to="ASCII//TRANSLIT")
dirName <- gsub("-|\\s+","_",dirName)
dirName <- make.names(dirName)
speciesList <- tibble::add_row(speciesList,
speciesName=as.character(speciesName),
resolvedName=as.character(resolvedName),
taxonKey=as.character(taxonKey),
dirName=as.character(dirName))
}
if(!is.null(nameCacheFile)){
tryCatch(
readr::write_tsv(speciesList,nameCacheFile),
error = function(err) {
warning(paste("Unable to save species names to:",nameCacheFile, sep = " "))
})
}
if(keepUnresolved == FALSE) {
speciesList <- speciesList[!is.na(speciesList$taxonKey),]
}
speciesList
}
## 1. ------ FUNCTION TO CONVERT INLA MESH OBJECTS TO SPATIAL POLYGONS ------
#' @title Converts the Delaunay Triangulation in INLA Mesh Objects to SpatialPolygons
#'
#' @description
#' Converts the constructed Delaunay triangulation from a two-dimensional INLA \link[INLA::inla.mesh.create]{mesh} object
#' into a \code{\link[sp::SpatialPolygons]{SpatialPolygons}} object. This can be useful if you want to save the mesh
#' in standard vector file formats or perform further processing on the mesh.
#'
#' @param meshInput An \code{\link[INLA::inla.mesh.create]{inla.mesh}} object. This mesh must be two-dimensional.
#'
#' @return A \code{\link[sp::SpatialPolygons]{SpatialPolygons}} object containing the Delaunay triangulation from
#' the input mesh. Note that inner and outer boundary information is not present in the output.
#'
#' @examples
#' @author Joseph D. Chipperfield, \email{joseph.chipperfield@@nmbu.no}
#' @seealso \code{\link[sp::SpatialPolygons]{SpatialPolygons}} \code{\link[sp::SpatialPolygons]{SpatialPolygons}}
#' @export
#'
inlaMeshToSpatialPolygons <- function(meshInput) {
# Sanity test the input
inMeshInput <- tryCatch(as(meshInput, "inla.mesh"), error = function(err) {
stop(paste("invalid input mesh:", err, sep = " "))
})
# Ensure the mesh has the correct manifold type
if(inMeshInput$manifold != "R2") {
stop("invalid input mesh: mesh must be two-dimensional")
}
# Retrieve the Delaunay triangles as polygon objects
delaunayPolygons <- lapply(X = 1:nrow(inMeshInput$graph$tv), FUN = function(curRowIndex, pointCoords, pointIndeces) {
# Retrieve the vertex coordinates of the current triangle
curIndeces <- pointIndeces[curRowIndex, ]
# Construct a Polygons object to contain the triangle
Polygons(list(Polygon(
pointCoords[c(curIndeces, curIndeces[1]), ], hole = FALSE
)), ID = paste("Delaunay", curRowIndex, sep = "_"))
}, pointCoords = inMeshInput$loc[, c(1, 2)], pointIndeces = inMeshInput$graph$tv)
# Convert the Polygons into a SpatialPolygons object
SpatialPolygons(delaunayPolygons, proj4string = inMeshInput$crs)
}
\ No newline at end of file
loadCovariates <- function(covariateDir,includeFiles='*.tif', excludeFiles='template.tif') {
files <- list.files(covariateDir,pattern=glob2rx(includeFiles))
if(!is.null(excludeFiles)) {
files <- grep(glob2rx(excludeFiles), files, value=TRUE, invert=TRUE)
}
files <- paste(covariateDir, files, sep="/")
rbrick <- raster::brick(lapply(files, raster::raster))
if(file.exists(paste(covariateDir,'layers.txt',sep="/"))) {
layerDesc <- readr::read_tsv(paste(covariateDir,'layers.txt',sep="/"), col_names=c("layer","description"), col_types="cc")
description <- as.list(layerDesc$description)
names(description) <- layerDesc$layer
} else {
desc <- NULL
}
list(raster=rbrick, descriptions=description)
}
##
## Cache interface.
##
writeOccurrences <- function(occDataRaw, cacheDir) {
speciesList = unique(occDataRaw$taxonKey)
if(!dir.exists(cacheDir)) {
dir.create(cacheDir, recursive = TRUE)
}
if(!dir.exists(cacheDir)){
stop("Unable to save occurences cache")
}
for(taxonID in speciesList) {
taxonCacheFile <- paste(cacheDir,"/",taxonID,".csv.gz",sep="")
speciesOccTMP <- occDataRaw[occDataRaw$taxonKey == taxonID, ][c("taxonKey", "decimalLongitude", "decimalLatitude", "species", "family","year","gbifID","speciesKey","lifeStage","institutionCode" )]
write_csv(speciesOccTMP, gzfile(taxonCacheFile))
}
}
## Get cache status for a list if taxonKeys.
getDownloadCacheStatus <- function(taxonIds, cacheDir, maxCacheDays = 365 ) {
if(!is.null(cacheDir) && !dir.exists(cacheDir)){
stop("Unable to access occurences cache")
}
cacheFileExt = '.csv.gz'
taxaMissing = c()
taxaFound = c()
for(taxonID in unlist(taxonIds)) {
taxonCacheFile <- paste(cacheDir,"/",taxonID,".csv.gz",sep="")
if(file.exists(taxonCacheFile) && ( file.info(taxonCacheFile)$mtime - Sys.time() ) < maxCacheDays * 86400 ) {
taxaFound = c(taxaFound, taxonID)
} else {
taxaMissing = c(taxaMissing, taxonID)
}
}
list(missing=taxaMissing, available=taxaFound)
}
loadOccurences <- function(taxonIDs, cacheDir=NULL, maxCacheDays = 365) {
cacheStatus = getDownloadCacheStatus(taxonIDs, cacheDir=NULL, maxCacheDays = 365)
if(!is.null(cacheDir) && !dir.exists(cacheDir)) {
dir.create(cacheDir, recursive = TRUE)
}
taxaMissing = c()
taxaFound = c()
species.df <- data.frame(taxonId=integer(),longitude=double(), latitude=double())
for(taxonID in unlist(taxonIDs)) {
taxonCacheFile <- paste(cacheDir,"/",taxonID,".csv.gz",sep="")
if(file.exists(taxonCacheFile) &&
( file.info(taxonCacheFile)$mtime - Sys.time() ) < maxCacheDays * 86400 ) {
df <- read_csv(gzfile(taxonCacheFile), col_types=cols(taxonKey=col_integer(),decimalLongitude=col_double(), decimalLatitude=col_double()))
species.df <- rbind(species.df, cbind(df$taxonKey, df$decimalLongitude, df$decimalLatitude))
taxaFound = c(taxaFound, taxonID)
} else {
taxaMissing = c(taxaMissing, taxonID)
}
}
names(species.df) <- c("taxonKey","longitude","latitude")
occurrences <- st_as_sf(species.df, coords=c(2,3), crs=4326)
list(
occurrences = occurrences,
taxaFound = taxaFound,
taxaMissing = taxaMissing
)
}
##
## Create gridded occrurrences from point data and write gridded and points used in grid to the
## per taxon directory.
##
occurrencesToGrid <- function(occurrences, gridTemplate, speciesInfo, outputLocation = NULL, saveRaster = FALSE, savePoints = FALSE) {
if((saveRaster ||savePoints) && is.null(outputLocation)) {
warning("outputLocation is not set, ignoring saveRaster and savePoint flags")
saveRaster=FALSE
savePoints=FALSE
}
# Points within template
occPoints <- occurrences[!is.na(raster::extract(gridTemplate, occurrences)),]
taxaFound <- unique(occPoints$taxonKey)
rasterLayers<- lapply(X=taxaFound, FUN=function(taxonID) {
taxonOcc <- dplyr::filter(occPoints, taxonKey == taxonID)
taxonLayer <- raster::rasterize(taxonOcc,
gridTemplate * 0,
fun='count', field='taxonKey', update=TRUE)
taxonLayer <- raster::mask(taxonLayer, gridTemplate)
curTaxonInfo <- speciesInfo[speciesInfo$taxonKey==taxonID,"dirName"]
dirName <- paste(outputLocation,curTaxonInfo[["dirName"]],sep="/")
if(!dir.exists(dirName)) dir.create(dirName,recursive=TRUE)
if(saveRaster && !is.null(curTaxonInfo[["dirName"]])) {
message(paste("Writing gridded occurrences", dirName))
raster::writeRaster(taxonLayer, paste(dirName,"GriddedOccurrences.tif",sep="/"), overwrite=TRUE)
}
if(savePoints && !is.null(curTaxonInfo[["dirName"]])) {
sf::st_write(taxonOcc, paste(outputLocation,curTaxonInfo[["dirName"]],"occurrences.shp",sep="/"), delete_layer=TRUE)
}
names(taxonLayer) <- curTaxonInfo[["dirName"]]
taxonLayer
})
occRaster = raster::brick(rasterLayers)
list(geom=occPoints,
raster=occRaster)
}
## 1. ------ FUNCTION TO MODEL OCCUPANCY USING INLA SPATIAL GLM
#' @title Produce Species Distribution Models
#'
#' @description
#' This function performs a generalised linear regression model with a spatial SPDE random effect using INLA on
#' provided occurrence data and a set of linear covariates.
#'
#' @param occurrenceData A \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}} containing the gridded
#' occurrence data. All values in the associated \code{data.frame} greater than zero will be treated as a one
#' for the purposes of the model.
#' @param climateData A \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}} containing the climate data.
#' @param alpha A \code{numeric} scalar containing the SPDE fractional operater order.
#' @param meshParameters A \code{list} containing the parameters to use in the construction of the spatial mesh
#' to approximate the random effects. These parameters are passed to the \code{\link[INLA::inla.mesh.2d]{inla.mesh.2d}}
#' and \code{\link[INLA::inla.nonconvex.hull]{inla.nonconvex.hull}} functions.
#' @param meshBoundary A \code{\link[sp::SpatialPolygons]{SpatialPolygons}} object containing the boundary of the
#' mesh. If this is \code{NULL} then instead construct a border polygon using the \code{\link[INLA::inla.nonconvex.hull]{inla.nonconvex.hull}}
#' function and parameters passed to \code{meshParameters}.
#' @param responseDensity An \code{integer} scalar. The number of sampling locations to use when building the
#' response curves.
#' @param outFolder A character scalar denoting the folder to place the uncompressed and processed
#' WORLDCLIM data.
#' @param createGeoTIFF A logical scalar. If \code{TRUE} then a series of GeoTIFF files are created
#' in the \code{outFolder} directory with the model predictions for each species.
#' @param createRObFile A logical scalar. If \code{TRUE} then an R object file (with extension .rds) is
#' created in the \code{outFolder} with the fitted model object for each species.
#'
#' @return
#'
#' @examples
#' @author Joseph D. Chipperfield, \email{joseph.chipperfield@@nmbu.no}
#' @seealso \code{\link[sp::SpatialGridDataFrame]{SpatialGridDataFrame}} \code{\link[INLA::inla.mesh.2d]{inla.mesh.2d}}
#' \code{\link[INLA::inla.nonconvex.hull]{inla.nonconvex.hull}}
#' @export
#'
runINLASpatialGLM <- function(occurrenceData, climateData, alpha = 1.5, meshParameters = list(), meshBoundary = NULL, responseDensity = 100,
outFolder = tempdir(), createGeoTIFF = TRUE, createRObFile = TRUE) {
### 1.1 ==== Sanity check the function inputs ====
# Sanity check the output generation flags
inCreateGeoTIFF <- tryCatch(as.logical(createGeoTIFF), error = function(err) {
stop(paste("invalid entry for the GeoTIFF creation flag:", err, sep = " "))
})
if(length(inCreateGeoTIFF) <= 0) {
stop("invalid entry for the GeoTIFF creation flag: zero vector length")
} else if(length(inCreateGeoTIFF) > 1) {
warning("GeoTIFF creation flag vector length greater than one: only the first element will be used")
inCreateGeoTIFF <- inCreateGeoTIFF[1]
}
if(any(is.na(inCreateGeoTIFF))) {
stop("invalid entry for the GeoTIFF creation flag: NA values present")
}
inCreateRObFile <- tryCatch(as.logical(createRObFile), error = function(err) {
stop(paste("invalid entry for the R object file creation flag:", err, sep = " "))
})
if(length(inCreateRObFile) <= 0) {
stop("invalid entry for the R object file creation flag: zero vector length")
} else if(length(inCreateRObFile) > 1) {
warning("R object creation flag vector length greater than one: only the first element will be used")
inCreateRObFile <- inCreateRObFile[1]
}
if(any(is.na(inCreateRObFile))) {
stop("invalid entry for the R object creation flag: NA values present")
}
# Sanity check the location of the output folder
inOutFolder <- tryCatch(as.character(outFolder), error = function(err) {
stop(paste("invalid entry for the output folder location:", err, sep = " "))
})
if(length(inOutFolder) <= 0) {
stop("invalid entry for the output folder location: zero vector length")
} else if(length(inOutFolder) > 1) {
warning("length of vector specifying the location of the ouput folder is greater than one: only the first element will be used")
inOutFolder <- inOutFolder[1]
}
# Check that the folder exists
if(!dir.exists(inOutFolder)) {
stop("output directory does not exist")
}
# Check the occurrence data
inOccurrenceData <- tryCatch(as(occurrenceData, "SpatialGridDataFrame"), error = function(err) {
stop(paste("invalid input for the occurrence data:", err, sep = " "))
})
# Check the climate data
inClimateData <- tryCatch(as(climateData, "SpatialGridDataFrame"), error = function(err) {
stop(paste("invalid input for the environmental covariates:", err, sep = " "))
})
# Ensure that they have the same grid topology and coordinate reference system
if(!identical(as(inOccurrenceData, "SpatialGrid"), as(inClimateData, "SpatialGrid"))) {
stop("occurrence data and environmental covariate data do not share the same grid topology and/or coordinate projection system")
}
# Test the value of the fractional operator order
inAlpha <- tryCatch(as.double(alpha), error = function(err) {
stop(paste("invalid input for the fractional operator order:", err, sep = " "))
})
if(is.null(inAlpha) || length(inAlpha) <= 0) {
stop("invalid input for the fractional operator order: vector has zero length")
} else if(length(inAlpha) > 1) {
warning("fractional operator order specification vector length greater than one: only the first element will be used")
inAlpha <- inAlpha[1]
}