Skip to content

Commit

Permalink
Use linters: sinqle_quotes, infix_spaces, commas_linter
Browse files Browse the repository at this point in the history
  • Loading branch information
MartinRoth committed Jul 2, 2017
1 parent 1a6e9fc commit d27afc4
Show file tree
Hide file tree
Showing 16 changed files with 99 additions and 98 deletions.
67 changes: 34 additions & 33 deletions R/EOBS.R
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,13 @@ GetEOBS <- function(filename, variable, area, period, na.rm) {
# @param area Area
# @param grid Grid
SanitizeInputEOBS <- function(variable, period, area, grid) {
if (variable %in% c('tg_stderr', 'tn_stderr', 'tx_stderr', 'pp_stderr',
'rr_stderr')) {
if (variable %in% c("tg_stderr", "tn_stderr", "tx_stderr", "pp_stderr",
"rr_stderr")) {
stop("Standard error of variables not yet implemented.")
}
else if (!variable %in% c('tg', 'tn', "tx", "pp", 'rr', 'tg_stderr', 'tn_stderr',
'tx_stderr', 'pp_stderr', 'rr_stderr')) {
else if (!variable %in% c("tg", "tn", "tx", "pp", "rr", "tg_stderr",
"tn_stderr", "tx_stderr", "pp_stderr",
"rr_stderr")) {
stop(paste("Variable", variable, "not known."))
}
tryCatch(xts::.parseISO8601(period),
Expand All @@ -110,7 +111,7 @@ SanitizeInputEOBS <- function(variable, period, area, grid) {
error = function(e) {
stop("Period should be either Numeric, timeBased or ISO-8601 style.")
})
if (!class(area) %in% c("SpatialPolygons","SpatialPolygonsDataFrame")) {
if (!class(area) %in% c("SpatialPolygons", "SpatialPolygonsDataFrame")) {
stop("Area should be of class SpatialPolygons or SpatialPolygonsDataFrame.")
}
if (!grid %in% c("0.25reg", "0.50reg", "0.25rot", "0.50rot")) {
Expand All @@ -122,25 +123,25 @@ SanitizeInputEOBS <- function(variable, period, area, grid) {
# @param variableName Variable name
# @param grid Grid
specifyURL <- function(variableName, grid) {
url <- 'http:https://opendap.knmi.nl/knmi/thredds/dodsC/e-obs_'
if (grid=="0.50reg") {
url <- paste(url, '0.50regular/', sep="")
ending <- '_0.50deg_reg_v15.0.nc'
url <- "http:https://opendap.knmi.nl/knmi/thredds/dodsC/e-obs_"
if (grid == "0.50reg") {
url <- paste0(url, "0.50regular/")
ending <- "_0.50deg_reg_v15.0.nc"
}
if (grid=="0.25reg") {
url <- paste(url, '0.25regular/', sep="")
ending <- '_0.25deg_reg_v15.0.nc'
if (grid == "0.25reg") {
url <- paste0(url, "0.25regular/")
ending <- "_0.25deg_reg_v15.0.nc"
}
url <- paste(url, variableName, ending, sep="")
url <- paste(url, variableName, ending, sep = "")
return(url)
}

# Get the EOBS netcdf dimensions
GetEobsDimensions <- function(ncdfConnection) {
values <- list()
values$lat <- ncdf4::ncvar_get(ncdfConnection, varid = 'latitude')
values$lon <- ncdf4::ncvar_get(ncdfConnection, varid = 'longitude')
values$time <- ncdf4::ncvar_get(ncdfConnection, varid = 'time')
values$lat <- ncdf4::ncvar_get(ncdfConnection, varid = "latitude")
values$lon <- ncdf4::ncvar_get(ncdfConnection, varid = "longitude")
values$time <- ncdf4::ncvar_get(ncdfConnection, varid = "time")
return(values)
}

Expand All @@ -163,14 +164,14 @@ GetEobsBbox <- function(filename, variableName, bbox, period){
# the bounding box
validRange <- list()
validRange$time <- which(findInterval(values$time,
periodBoundaries(values$time, period))==1)
validRange$lat <- which(findInterval(values$lat, bbox[2,])==1)
validRange$lon <- which(findInterval(values$lon, bbox[1,])==1)
periodBoundaries(values$time, period)) == 1)
validRange$lat <- which(findInterval(values$lat, bbox[2, ]) == 1)
validRange$lon <- which(findInterval(values$lon, bbox[1, ]) == 1)

# Make a selection of indices which fall in our subsetting window
# E.g. translate degrees to indices of arrays.
determineCount <- function(x) {
return(c(x[1], tail(x,1) - x[1] + 1))
return(c(x[1], tail(x, 1) - x[1] + 1))
}
count <- rbind(determineCount(validRange$lon),
determineCount(validRange$lat),
Expand All @@ -182,10 +183,10 @@ GetEobsBbox <- function(filename, variableName, bbox, period){
validValues$lat <- values$lat[validRange$lat]
validValues$lon <- values$lon[validRange$lon]
validValues$time <- as.Date(values$time[validRange$time],
origin="1950-01-01")
origin = "1950-01-01")
validValues[[variableName]] <- ncdf4::ncvar_get(dataset, variableName,
start=count[, 1],
count=count[, 2])
start = count[, 1],
count = count[, 2])

# Close the data set and return data.table created from the valid values
ncdf4::nc_close(dataset)
Expand All @@ -196,18 +197,18 @@ CreateDataTableMelt <- function(variable, validValues) {
time <- lon <- lat <- pointID <- value <- V1 <- NULL
if (length(validValues$time) > 1) {
meltedValues <- reshape2::melt(validValues[[variable]],
varnames=c("lon", "lat", "time"))
varnames = c("lon", "lat", "time"))
result <- as.data.table(meltedValues)
} else {
meltedValues <- reshape2::melt(validValues[[variable]],
varnames=c("lon", "lat"))
varnames = c("lon", "lat"))
result <- as.data.table(meltedValues)
result[, time := 1]
}
setkey(result, lon, lat)
result[, pointID:=.GRP, by = key(result)]
result[, pointID := .GRP, by = key(result)]
setkey(result, pointID)
index <- result[, !all(is.na(value)), by = pointID][V1==TRUE, pointID]
index <- result[, !all(is.na(value)), by = pointID][V1 == TRUE, pointID]
result <- result[pointID %in% index, ]
result[, pointID := NULL]
result[, lon := validValues$lon[lon]]
Expand All @@ -224,15 +225,15 @@ CreateDataTableMelt <- function(variable, validValues) {
removeOutsiders <- function(data, area) {
lon <- lat <- pointID <- NULL
setkey(data, lon, lat)
data[, pointID:=.GRP, by=key(data)]
data[, pointID := .GRP, by = key(data)]
coords <- data[, list(lon = unique(lon), lat = unique(lat)),
by = pointID][, list(lon, lat)]
points <- sp::SpatialPoints(coords, area@proj4string)
index <- data[, unique(pointID)][which(!is.na(sp::over(points,
as(area, 'SpatialPolygons'))))]
as(area, "SpatialPolygons"))))]
data <- data[pointID %in% index]
setkey(data, lon, lat)
return(data[, pointID:=.GRP, by=key(data)])
return(data[, pointID := .GRP, by = key(data)])
}

# Removes all rows with NAs
Expand All @@ -241,16 +242,16 @@ removeOutsiders <- function(data, area) {
removeNAvalues <- function(data) {
lon <- lat <- pointID <- NULL
# We don't check if time is NA (it should not) but date * 0 is not defined
data <- data[complete.cases(data[,!"time", with=FALSE]*0)]
data <- data[complete.cases(data[, !"time", with = FALSE] * 0)]
setkey(data, lon, lat)
data[, pointID:=.GRP, by=key(data)]
data[, pointID := .GRP, by = key(data)]
}

# To define the valid range
# @param time Time
# @param period Period
periodBoundaries <- function(time, period) {
xts <- xts::xts(time, as.Date(time, origin="1950-01-01"))
xts <- xts::xts(time, as.Date(time, origin = "1950-01-01"))
interval <- range(as.numeric(xts[period]))
interval[2] <- interval[2] + 1
return(interval)
Expand Down
16 changes: 8 additions & 8 deletions R/ImportData.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ HomogenizedPrecipitation <- function(stationId, periodStart=1910) {
url <- SpecifyUrlForHomogenPrecipZipped(stationId, periodStart)
precip <- data.table(ReadZippedFile(url, c("date", "pr")))
precip[, stationId := stationId]
precip[, date := as.Date(paste(date), '%Y%m%d')]
precip[, date := as.Date(paste(date), "%Y%m%d")]
setcolorder(precip, c("date", "stationId", "pr"))
return(precip)
}
Expand Down Expand Up @@ -54,19 +54,19 @@ PrecipitationDownload <- function(location, period, whichSet, call) {
location <- sp::spTransform(location, CRS(standardCRSstring))
}
tmpMetaData <- stationMetaData
if (periodStart==1910 | whichSet==1910) {
tmpMetaData <- tmpMetaData[longRecord==TRUE, ]
if (periodStart == 1910 | whichSet == 1910) {
tmpMetaData <- tmpMetaData[longRecord == TRUE, ]
}
stationLocations <- sp::SpatialPoints(tmpMetaData[, list(lon, lat)], CRS(standardCRSstring))
tmpMetaData[, inArea := sp::over(stationLocations, as(location, 'SpatialPolygons'))]
tmpMetaData[, inArea := sp::over(stationLocations, as(location, "SpatialPolygons"))]
tmpMetaData <- na.omit(tmpMetaData)
tmp <- foreach(i = 1 : tmpMetaData[, .N], .combine = "rbind") %do% {
tmpStart <- ifelse(tmpMetaData[i, longRecord] & whichSet != 1951, 1910, 1951)
HomogenizedPrecipitation(tmpMetaData[i, stationId], tmpStart)
}
}
setkey(tmp, date)
tmp <- tmp[date %in% HomogenPrecipDates(period),]
tmp <- tmp[date %in% HomogenPrecipDates(period), ]
setkey(tmp, stationId, date)
KnmiData(tmp, call, "HomogenPrecip")
}
Expand All @@ -85,7 +85,7 @@ DownloadMessageContent <- function(name) {
#' @importFrom xts .subset.xts
#' @importFrom xts .parseISO8601
HomogenPrecipDates <- function(period) {
tmp <- xts::.parseISO8601(period, tz="GEZ")
tmp <- xts::.parseISO8601(period, tz = "GEZ")
return(seq.Date(as.Date(tmp$first.time), as.Date(tmp$last.time), by = "day"))
}

Expand All @@ -109,7 +109,7 @@ Earthquakes <- function(type="induced", area = NULL, period = NULL, path = "") {
fileName <- SpecifyFileNameEarthquakes(type, path, area, period)
if (!file.exists(fileName)) {
tmp <- EarthquakesDownload(type, area, period, cl)
saveRDS(tmp, file=fileName)
saveRDS(tmp, file = fileName)
} else {
tmp <- readRDS(fileName)
}
Expand All @@ -123,7 +123,7 @@ EarthquakesDownload <- function(type, area, period, call) {
jsonTable <- jsonlite::fromJSON(URL)$events
tmp <- UpdateJsonTable(jsonTable)
if (!is.null(area)) tmp <- ClipQuakes(tmp, area)
if (!is.null(period)) tmp <- tmp[date %in% HomogenPrecipDates(period),]
if (!is.null(period)) tmp <- tmp[date %in% HomogenPrecipDates(period), ]
KnmiData(tmp, call, "Earthquakes")
}

Expand Down
14 changes: 8 additions & 6 deletions R/KIS.R
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,17 @@ WriteKISRecipe <- function(var, locationID, period) {
# FIXME: Ensure that the recipe file is deleted
recipeName <- "KIStable.txt"

if (var == 'TG') {
if (var == "TG") {
dataSeries <- "REH1"
unit <- "graad C"
} else if (var == 'MOR_10') {
} else if (var == "MOR_10") {
dataSeries <- "TOA"
unit <- "m"
} else {
stop(paste0("Variable ", var, " not defined."))
}

# nolint start
recipe <- 'recipe=' %>%
paste0('{"datasetserieselements":[{"datasetseries":"', dataSeries, '",') %>%
paste0('"element":"', var, '","unit":"', unit, '"}],') %>%
Expand All @@ -78,6 +79,7 @@ WriteKISRecipe <- function(var, locationID, period) {
paste0('"condition":"AMOUNT","value":null}]},') %>%
paste0('"displaysettings":{"showMetaData":false,"sort":"DateStationTime"}}') %>%
str_replace_all('%', '%25')
# nolint end

writeLines(recipe, recipeName)
return(recipeName)
Expand All @@ -89,11 +91,11 @@ CorrectDataFormat <- function(xtsObject) {

ExecuteKISRecipe <- function(recipeName, period) {
parsedPeriod <- .parseISO8601(period)
url <- 'http:https://kisapp.knmi.nl:8080/servlet/download/table/'
url <- "http:https://kisapp.knmi.nl:8080/servlet/download/table/"
url <- paste0(url, CorrectDataFormat(parsedPeriod$first.time + 1),
'/', CorrectDataFormat(parsedPeriod$last.time + 1),
'/', 'CSV')
destFile <- 'KIStable.csv'
"/", CorrectDataFormat(parsedPeriod$last.time + 1),
"/", "CSV")
destFile <- "KIStable.csv"

flog.info("Start data download.")
download.file(url, destFile, method = "wget", quiet = T,
Expand Down
2 changes: 1 addition & 1 deletion R/KnmiDataClass.R
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ KnmiData <- function(data, call, type) {
version = utils::packageVersion("knmiR"),
timeStamp = utils::timestamp(format(Sys.time(),
"%Y-%m-%d %H:%M:%S"),
quiet=TRUE),
quiet = TRUE),
class = append("KnmiData", class(data)))
}

Expand Down
2 changes: 1 addition & 1 deletion R/MetaData.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@
#' Earthquakes maximal domain
#' @format bbox i.e. 2*2 matrix
#'
EarthquakesBoundaryBox <- matrix(c(2, 50, 9, 55), nrow=2)
EarthquakesBoundaryBox <- matrix(c(2, 50, 9, 55), nrow = 2)
2 changes: 1 addition & 1 deletion R/Sanitation.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ CheckStationId <- function(stationId, periodStart) {
longRecord <- NULL
if (!stationId %in% stationMetaData$stationId) stop("stationId not available")
else if (periodStart == 1910 &
!stationId %in% stationMetaData[longRecord==TRUE, stationId]) {
!stationId %in% stationMetaData[longRecord == TRUE, stationId]) {
stop("stationId not available for periodStart 1910")
}
}
Expand Down
6 changes: 3 additions & 3 deletions R/SpecifyFileName.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ GetFullySpecifiedPeriod <- function(name, period = NULL) {
if (is.null(period)) {
return(paste0(startDate, "_", endDate))
} else {
parsedPeriod <- lapply(xts::.parseISO8601(period, tz="GEZ"), as.Date)
parsedPeriod <- lapply(xts::.parseISO8601(period, tz = "GEZ"), as.Date)
return(paste0(max(startDate, parsedPeriod$first.time), "_",
min(endDate, parsedPeriod$last.time)))
}
Expand All @@ -41,8 +41,8 @@ GetFullySpecifiedArea <- function(name, area = NULL) {
if (extends(class(area), "SpatialPolygons")) {
return(paste0("AreaHash_", digest::digest(area)))
} else if (class(area) == "matrix") {
return(paste0("bbox_", round(area[1,1], 2), "_", round(area[1,2], 2),
"_", round(area[2,1], 2), "_", round(area[2,2], 2)))
return(paste0("bbox_", round(area[1, 1], 2), "_", round(area[1, 2], 2),
"_", round(area[2, 1], 2), "_", round(area[2, 2], 2)))
} else if (class(area) == "numeric") {
return(paste0("StationId_", area))
} else stop("Area is not set correctly")
Expand Down
14 changes: 7 additions & 7 deletions R/UrlSpecifications.R
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
SpecifyUrlForHomogenPrecipZipped <- function(stationId, periodStart) {
CheckStationId(stationId, periodStart)
url <- "http:https://climexp.knmi.nl/KNMIData/precip"
if (stationId < 100) stationIdString <- paste(0, stationId, "hom", sep="")
else stationIdString <- paste(stationId, "hom", sep="")
url <- paste(url, stationIdString, sep="")
if (periodStart == 1910) url <- paste(url, '1910.dat.gz', sep="")
else if (periodStart == 1951) url <- paste(url, '1951.dat.gz', sep="")
if (stationId < 100) stationIdString <- paste(0, stationId, "hom", sep = "")
else stationIdString <- paste(stationId, "hom", sep = "")
url <- paste(url, stationIdString, sep = "")
if (periodStart == 1910) url <- paste(url, "1910.dat.gz", sep = "")
else if (periodStart == 1951) url <- paste(url, "1951.dat.gz", sep = "")
else stop("periodStart should be 1910 or 1951")
return(url)
}

SpecifyUrlEarthquakes <- function(type) {
if (type=="induced") {
if (type == "induced") {
URL <- "http:https://cdn.knmi.nl/knmi/map/page/seismologie/all_induced.json"
} else if (type=="tectonic") {
} else if (type == "tectonic") {
URL <- "http:https://cdn.knmi.nl/knmi/map/page/seismologie/all_tectonic.json"
} else stop("Catalogue type not known.")
return(URL)
Expand Down
3 changes: 0 additions & 3 deletions inst/.lintr
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
linters: with_defaults(
camel_case_linter = NULL, # 283
single_quotes_linter = NULL, # 71
line_length_linter = NULL, # 64
infix_spaces_linter = NULL, # 61
commented_code_linter = NULL, # 27
commas_linter = NULL, # 15
multiple_dots_linter = NULL, # 3
NULL
)
7 changes: 4 additions & 3 deletions inst/MetaDataDefinition/MetaDataDefinition.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,14 @@ dataCitation$HomogenPrecip <- strwrap("Please cite as Buishand, T.A., G. DeMarti

dataLicense <- list()
dataLicense$Earthquakes <- "Open data"
dataLicense$HomogenPrecip <- strwrap('THESE DATA CAN BE USED FREELY PROVIDED THAT THE FOLLOWING SOURCE IS ACKNOWLEDGED: ROYAL NETHERLANDS METEOROLOGICAL INSTITUTE')
dataLicense$HomogenPrecip <- strwrap("THESE DATA CAN BE USED FREELY PROVIDED THAT THE FOLLOWING SOURCE IS ACKNOWLEDGED: ROYAL NETHERLANDS METEOROLOGICAL INSTITUTE")

availableDataSets <- c("Earthquakes", "HomogenPrecip")

stationMetaData <- data.table::fread("./inst/MetaDataDefinition/stationMetaData.csv", stringsAsFactors = FALSE)
stationMetaData <- data.table::fread("./inst/MetaDataDefinition/stationMetaData.csv",
stringsAsFactors = FALSE)
data.table::setkey(stationMetaData, stationId)

devtools::use_data(dataDescription, dataCitation, dataLicense,
availableDataSets, stationMetaData,
internal=TRUE, overwrite=TRUE)
internal = TRUE, overwrite = TRUE)
4 changes: 2 additions & 2 deletions tests/testthat/test-DataSources.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ today <- as.Date(Sys.time())
test_that("Actuality", {
skip_on_travis()
skip_on_appveyor()
expect_gt(HomogenPrecip(11, paste0(today-45, "/", today), path = NULL)[, .N], 0)
recentQuakes <- Earthquakes("induced", NULL, paste0(today-14, "/", today), path=NULL)
expect_gt(HomogenPrecip(11, paste0(today - 45, "/", today), path = NULL)[, .N], 0)
recentQuakes <- Earthquakes("induced", NULL, paste0(today - 14, "/", today), path = NULL)
expect_gt(recentQuakes[, .N], 0)
expect_match(License(recentQuakes), "Open data")
})
12 changes: 6 additions & 6 deletions tests/testthat/test-EOBS.R
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
context("EOBS")

library(knmiR)
adm0 <- raster::getData('GADM', country='NL', level=0)
adm0 <- raster::getData("GADM", country = "NL", level = 0)

test_that("EOBS error messages", {
expect_error(EOBS("foo"), "Variable foo not known.")
expect_error(EOBS('tg', 'A', "foo"), "Period should be either Numeric, timeBased or ISO-8601 style.")
expect_error(EOBS('tg', '2014', "foo"), "Area should be of class SpatialPolygons or SpatialPolygonsDataFrame.")
expect_error(EOBS('tg', '2014', adm0, "foo"), "Grid should be specified correctly.")
expect_error(EOBS("tg", "A", "foo"), "Period should be either Numeric, timeBased or ISO-8601 style.")
expect_error(EOBS("tg", "2014", "foo"), "Area should be of class SpatialPolygons or SpatialPolygonsDataFrame.")
expect_error(EOBS("tg", "2014", adm0, "foo"), "Grid should be specified correctly.")
})

test_that("EOBS regression tests", {
skip_on_appveyor()
expect_equal_to_reference(EOBS('tg', '2014', adm0, '0.50reg'), file="EOBSreference/output.rds")
expect_equal_to_reference(EOBS('rr', '2014', adm0, '0.50reg'), file="EOBSreference/output_rr.rds")
expect_equal_to_reference(EOBS("tg", "2014", adm0, "0.50reg"), file = "EOBSreference/output.rds")
expect_equal_to_reference(EOBS("rr", "2014", adm0, "0.50reg"), file = "EOBSreference/output_rr.rds")
expect_equal_to_reference(EOBS("tg", "2015-06-01", adm0, grid = "0.50reg"),
file = "EOBSreference/output_one_timestep.rds")
expect_equal_to_reference(EOBSLocal("tg", "tg_0.50deg_reg_v12.0_plus_2015_ANN_avg.nc",
Expand Down
Loading

0 comments on commit d27afc4

Please sign in to comment.