Skip to content

Commit

Permalink
Use linters line_length and mutliple_dots
Browse files Browse the repository at this point in the history
  • Loading branch information
MartinRoth committed Jul 2, 2017
1 parent d27afc4 commit 4276fa3
Show file tree
Hide file tree
Showing 15 changed files with 118 additions and 68 deletions.
13 changes: 7 additions & 6 deletions R/EOBS.R
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,8 @@ GetEOBS <- function(filename, variable, area, period, na.rm) {
result[, year := as.numeric(format(time, "%Y"))]
result[, month := as.numeric(format(time, "%m"))]
result[, day := as.numeric(format(time, "%d"))]
setcolorder(result, c("time", "year", "month", "day", "lat", "lon", variable, "pointID"))
setcolorder(result, c("time", "year", "month", "day", "lat", "lon",
variable, "pointID"))
return(result)
}

Expand All @@ -109,7 +110,7 @@ SanitizeInputEOBS <- function(variable, period, area, grid) {
stop()
},
error = function(e) {
stop("Period should be either Numeric, timeBased or ISO-8601 style.")
stop("Period should be either Numeric, timeBased or ISO-8601 style.") # nolint
})
if (!class(area) %in% c("SpatialPolygons", "SpatialPolygonsDataFrame")) {
stop("Area should be of class SpatialPolygons or SpatialPolygonsDataFrame.")
Expand Down Expand Up @@ -164,7 +165,7 @@ GetEobsBbox <- function(filename, variableName, bbox, period){
# the bounding box
validRange <- list()
validRange$time <- which(findInterval(values$time,
periodBoundaries(values$time, period)) == 1)
periodBoundaries(values$time, period)) == 1)
validRange$lat <- which(findInterval(values$lat, bbox[2, ]) == 1)
validRange$lon <- which(findInterval(values$lon, bbox[1, ]) == 1)

Expand Down Expand Up @@ -198,11 +199,11 @@ CreateDataTableMelt <- function(variable, validValues) {
if (length(validValues$time) > 1) {
meltedValues <- reshape2::melt(validValues[[variable]],
varnames = c("lon", "lat", "time"))
result <- as.data.table(meltedValues)
result <- as.data.table(meltedValues) # nolint
} else {
meltedValues <- reshape2::melt(validValues[[variable]],
varnames = c("lon", "lat"))
result <- as.data.table(meltedValues)
result <- as.data.table(meltedValues) # nolint
result[, time := 1]
}
setkey(result, lon, lat)
Expand Down Expand Up @@ -230,7 +231,7 @@ removeOutsiders <- function(data, area) {
by = pointID][, list(lon, lat)]
points <- sp::SpatialPoints(coords, area@proj4string)
index <- data[, unique(pointID)][which(!is.na(sp::over(points,
as(area, "SpatialPolygons"))))]
as(area, "SpatialPolygons"))))]
data <- data[pointID %in% index]
setkey(data, lon, lat)
return(data[, pointID := .GRP, by = key(data)])
Expand Down
30 changes: 20 additions & 10 deletions R/ImportData.R
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ HomogenizedPrecipitation <- function(stationId, periodStart=1910) {
HomogenPrecip <- function(location, period, whichSet = "automatic", path="") {
cl <- match.call()
SanitizeInput(type = "HomogenPrecip", location, period, whichSet)
if (is.null(path)) tmp <- PrecipitationDownload(location, period, whichSet, cl)
if (is.null(path)) {
tmp <- PrecipitationDownload(location, period, whichSet, cl)
}
else {
fileName <- SpecifyFileName("HomogenPrecip", path, location, period)
if (!file.exists(fileName)) {
Expand All @@ -46,7 +48,8 @@ PrecipitationDownload <- function(location, period, whichSet, call) {
DownloadMessage("HomogenPrecip")
periodStart <- HomogenPrecipPeriodStart(period)
if (is.numeric(location)) {
tmpStart <- ifelse(stationMetaData[list(location), longRecord] & whichSet != 1951, 1910, 1951)
tmpStart <- ifelse(stationMetaData[list(location), longRecord] &
whichSet != 1951, 1910, 1951)
tmp <- HomogenizedPrecipitation(location, tmpStart)
} else {
standardCRSstring <- "+proj=longlat +ellps=WGS84"
Expand All @@ -57,11 +60,14 @@ PrecipitationDownload <- function(location, period, whichSet, call) {
if (periodStart == 1910 | whichSet == 1910) {
tmpMetaData <- tmpMetaData[longRecord == TRUE, ]
}
stationLocations <- sp::SpatialPoints(tmpMetaData[, list(lon, lat)], CRS(standardCRSstring))
tmpMetaData[, inArea := sp::over(stationLocations, as(location, "SpatialPolygons"))]
stationLocations <- sp::SpatialPoints(tmpMetaData[, list(lon, lat)],
CRS(standardCRSstring))
tmpMetaData[, inArea := sp::over(stationLocations,
as(location, "SpatialPolygons"))]
tmpMetaData <- na.omit(tmpMetaData)
tmp <- foreach(i = 1 : tmpMetaData[, .N], .combine = "rbind") %do% {
tmpStart <- ifelse(tmpMetaData[i, longRecord] & whichSet != 1951, 1910, 1951)
tmpStart <- ifelse(tmpMetaData[i, longRecord] &
whichSet != 1951, 1910, 1951)
HomogenizedPrecipitation(tmpMetaData[i, stationId], tmpStart)
}
}
Expand All @@ -76,9 +82,11 @@ DownloadMessage <- function(name) {
}

DownloadMessageContent <- function(name) {
urlQuake <- "www.knmi.nl/kennis-en-datacentrum/dataset/aardbevingscatalogus"
urlClimExp <- "www.climexp.knmi.nl"
switch(name,
"Earthquakes" = return("www.knmi.nl/kennis-en-datacentrum/dataset/aardbevingscatalogus"),
"HomogenPrecip" = return("www.climexp.knmi.nl")
"Earthquakes" = return(urlQuake),
"HomogenPrecip" = return(urlClimExp)
)
}

Expand All @@ -92,8 +100,10 @@ HomogenPrecipDates <- function(period) {
#' Loads the KNMI earthquake catalogue
#' @param type Type of catalogue c('induced', 'tectonic')
#' @param area Inheriting from spatial polygon
#' @param period Either numeric, timeBased or ISO-8601 style (see \code{\link[xts]{.subset.xts}})
#' @param path for saving data (if set to NULL data are always downloaded but not saved)
#' @param period Either numeric, timeBased or ISO-8601 style
#' (see \code{\link[xts]{.subset.xts}})
#' @param path for saving data (if set to NULL data are always downloaded
#' but not saved)
#' @return data.table with rows being the single events
#' @export
#' @examples
Expand Down Expand Up @@ -155,7 +165,7 @@ IsInArea <- function(points, area) {
tmp <- sp::over(points, area)
index <- as.vector(!is.na(tmp[, 1, drop = FALSE]))
} else {
stop("Area should be of class `SpatialPolygons' or `SpatialPolygonsDataFrame'")
stop("Area should be of class `SpatialPolygons' or `SpatialPolygonsDataFrame'") # nolint
}
if (class(index) != "logical" & length(points) != length(index)) {
stop("index should be a logical of the same length as points")
Expand Down
2 changes: 1 addition & 1 deletion R/KIS.R
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ ExecuteKISRecipe <- function(recipeName, period) {

flog.info("Start data download.")
download.file(url, destFile, method = "wget", quiet = T,
extra = c('--header="Content-Type:application/x-www-form-urlencoded"',
extra = c('--header="Content-Type:application/x-www-form-urlencoded"', # nolint
paste0('--post-file="', recipeName, '"')))
flog.info("Download finished.")

Expand Down
8 changes: 5 additions & 3 deletions R/Sanitation.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ SanitizeHomogenPrecip <- function(location, period, whichSet) {
stop()
},
error = function(e) {
stop("Period should be either Numeric, timeBased or ISO-8601 style.")
stop("Period should be either Numeric, timeBased or ISO-8601 style.") # nolint
})
periodStart <- HomogenPrecipPeriodStart(period)
isStationId <- FALSE
Expand All @@ -38,15 +38,17 @@ SanitizeHomogenPrecip <- function(location, period, whichSet) {
}
}
if (!(isStationId | isArea)) {
stop("Location should be either valid station id or spatial polygon, with non-empty intersection.")
stop("Location should be either valid station id or spatial polygon, with non-empty intersection.") # nolint
}
if (isStationId) CheckStationId(location, periodStart)
if (whichSet != "automatic") {
if (periodStart < whichSet) {
lastDate <- as.Date(xts::.parseISO8601(period)$last.time)
warning(paste0("Period is restricted to 1951-01-01/", lastDate))
}
else if (periodStart > whichSet & isArea) message("You could consider more stations for the given period by choosing whichSelect='automatic'")
else if (periodStart > whichSet & isArea) {
message("You could consider more stations for the given period by choosing whichSelect='automatic'") # nolint
}
}
}

Expand Down
3 changes: 2 additions & 1 deletion R/SpecifyFileName.R
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ GetFullySpecifiedArea <- function(name, area = NULL) {
GetMaxDomain <- function(name) {
lon <- lat <- NULL
switch (name,
"HomogenPrecip" = return(sp::bbox(sp::SpatialPoints(stationMetaData[, cbind(lon, lat)]))),
"HomogenPrecip" = return(sp::bbox(sp::SpatialPoints(stationMetaData[,
cbind(lon, lat)]))),
"InducedQuakes" = return(EarthquakesBoundaryBox),
"TectonicQuakes" = return(EarthquakesBoundaryBox))
}
2 changes: 1 addition & 1 deletion R/TechnicalImport.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ ReadZippedFile <- function(url, colNames) {

UpdateJsonTable <- function(jsonTable) {
depth <- lat <- lon <- mag <- NULL
tmp <- as.data.table(jsonTable)
tmp <- as.data.table(jsonTable) # nolint
tmp[, date := as.Date(date, tz = "CET")]
tmp[, depth := as.numeric(depth)]
tmp[, lat := as.numeric(lat)]
Expand Down
2 changes: 0 additions & 2 deletions inst/.lintr
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
linters: with_defaults(
camel_case_linter = NULL, # 283
line_length_linter = NULL, # 64
commented_code_linter = NULL, # 27
multiple_dots_linter = NULL, # 3
NULL
)
2 changes: 2 additions & 0 deletions inst/MetaDataDefinition/MetaDataDefinition.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# nolint start
dataDescription <- list()
dataDescription$Earthquakes <- "Induced Earthquakes"
dataDescription$HomogenPrecip <- "Obtained from <a href='http:https://climexp.knmi.nl'>KNMI</a>
Expand All @@ -17,6 +18,7 @@ availableDataSets <- c("Earthquakes", "HomogenPrecip")
stationMetaData <- data.table::fread("./inst/MetaDataDefinition/stationMetaData.csv",
stringsAsFactors = FALSE)
data.table::setkey(stationMetaData, stationId)
# nolint end

devtools::use_data(dataDescription, dataCitation, dataLicense,
availableDataSets, stationMetaData,
Expand Down
6 changes: 4 additions & 2 deletions man/Earthquakes.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions tests/testthat/test-DataSources.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@ today <- as.Date(Sys.time())
test_that("Actuality", {
skip_on_travis()
skip_on_appveyor()
expect_gt(HomogenPrecip(11, paste0(today - 45, "/", today), path = NULL)[, .N], 0)
recentQuakes <- Earthquakes("induced", NULL, paste0(today - 14, "/", today), path = NULL)
expect_gt(HomogenPrecip(11, paste0(today - 45, "/", today),
path = NULL)[, .N], 0)
recentQuakes <- Earthquakes("induced", NULL, paste0(today - 14, "/", today),
path = NULL)
expect_gt(recentQuakes[, .N], 0)
expect_match(License(recentQuakes), "Open data")
})
20 changes: 13 additions & 7 deletions tests/testthat/test-EOBS.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,24 @@ adm0 <- raster::getData("GADM", country = "NL", level = 0)

test_that("EOBS error messages", {
expect_error(EOBS("foo"), "Variable foo not known.")
expect_error(EOBS("tg", "A", "foo"), "Period should be either Numeric, timeBased or ISO-8601 style.")
expect_error(EOBS("tg", "2014", "foo"), "Area should be of class SpatialPolygons or SpatialPolygonsDataFrame.")
expect_error(EOBS("tg", "2014", adm0, "foo"), "Grid should be specified correctly.")
expect_error(EOBS("tg", "A", "foo"),
"Period should be either Numeric, timeBased or ISO-8601 style.")
expect_error(EOBS("tg", "2014", "foo"),
"Area should be of class SpatialPolygons or SpatialPolygonsDataFrame.")
expect_error(EOBS("tg", "2014", adm0, "foo"),
"Grid should be specified correctly.")
})

test_that("EOBS regression tests", {
skip_on_appveyor()
expect_equal_to_reference(EOBS("tg", "2014", adm0, "0.50reg"), file = "EOBSreference/output.rds")
expect_equal_to_reference(EOBS("rr", "2014", adm0, "0.50reg"), file = "EOBSreference/output_rr.rds")
expect_equal_to_reference(EOBS("tg", "2014", adm0, "0.50reg"),
file = "EOBSreference/output.rds")
expect_equal_to_reference(EOBS("rr", "2014", adm0, "0.50reg"),
file = "EOBSreference/output_rr.rds")
expect_equal_to_reference(EOBS("tg", "2015-06-01", adm0, grid = "0.50reg"),
file = "EOBSreference/output_one_timestep.rds")
expect_equal_to_reference(EOBSLocal("tg", "tg_0.50deg_reg_v12.0_plus_2015_ANN_avg.nc",
"2000/2015", adm0),
expect_equal_to_reference(EOBSLocal("tg",
"tg_0.50deg_reg_v12.0_plus_2015_ANN_avg.nc",
"2000/2015", adm0),
file = "EOBSreference/output_local.rds")
})
9 changes: 6 additions & 3 deletions tests/testthat/test-KISdata.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@ test_that("Obtain temperature", {
skip_on_travis()
skip_if_not(grepl("knmi.nl", node))
expect_match(WriteKISRecipe("TG", "260_H", "2016"), "KIStable.txt")
expect_error(KIS("rr", "260_H", "2016"), "Must be element of set {'TG', 'MOR_10'}.", fixed = TRUE)
expect_error(KIS("rr", "260_H", "2016"),
"Must be element of set {'TG', 'MOR_10'}.", fixed = TRUE)
result <- KIS("TG", "260_H", "2016-08/2016-09")
expect_equal_to_reference(result, file = "testOutput/temperatureDeBilt.rds")
result <- KIS("TG", "310_H", "2016-08/2016-09")
expect_equal_to_reference(result, file = "testOutput/temperatureVlissingen.rds")
expect_equal_to_reference(result,
file = "testOutput/temperatureVlissingen.rds")
})

test_that("Obtain MOR_10", {
Expand All @@ -26,5 +28,6 @@ test_that("Obtain MOR_10", {

test_that("Get error outside", {
skip_if_not(!grepl("knmi.nl", node))
expect_error(KIS("TG", "260_H", "2016"), "function works only inside KNMI", fixed = TRUE)
expect_error(KIS("TG", "260_H", "2016"),
"function works only inside KNMI", fixed = TRUE)
})
2 changes: 1 addition & 1 deletion tests/testthat/testEarthquakes.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ test_that("Earthquakes catalogue gives the same results", {
check.attributes = FALSE)
expect_equal_to_reference(Earthquakes("induced", Groningen, "1980/2015-11-15",
path = NULL),
file = "./referenceOutput/outputQuakesGroningen.rds",
file = "./referenceOutput/outputQuakesGroningen.rds", # nolint
check.attributes = FALSE)
})

Expand Down
Loading

0 comments on commit 4276fa3

Please sign in to comment.