Skip to content

Commit

Permalink
Merge branch 'develop' into fix_warnings_uncertainty
Browse files Browse the repository at this point in the history
  • Loading branch information
moki1202 authored Nov 19, 2023
2 parents 53f01d3 + 91531ae commit 577b0a2
Show file tree
Hide file tree
Showing 16 changed files with 113 additions and 58 deletions.
4 changes: 2 additions & 2 deletions docker/depends/pecan.depends.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Sys.setenv(RLIB = rlib)

# install remotes first in case packages are references in dependencies
remotes::install_github(c(
'adokter/suntools',
'araiho/linkages_package',
'chuhousen/amerifluxr',
'ebimodeling/[email protected]',
Expand Down Expand Up @@ -76,7 +77,6 @@ wanted <- c(
'magic',
'magrittr',
'maps',
'maptools',
'markdown',
'MASS',
'Matrix',
Expand Down Expand Up @@ -113,7 +113,6 @@ wanted <- c(
'reshape',
'reshape2',
'reticulate',
'rgdal',
'rjags',
'rjson',
'rlang',
Expand All @@ -130,6 +129,7 @@ wanted <- c(
'stats',
'stringi',
'stringr',
'suntools',
'swfscMisc',
'terra',
'testthat',
Expand Down
6 changes: 4 additions & 2 deletions modules/data.atmosphere/DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ Imports:
jsonlite,
lubridate (>= 1.6.0),
magrittr,
maptools,
MASS,
mgcv,
ncdf4 (>= 1.15),
Expand All @@ -49,10 +48,12 @@ Imports:
raster,
REddyProc,
reshape2,
rgdal,
rlang (>= 0.2.0),
sf,
sp,
stringr (>= 1.1.0),
suntools,
terra,
testthat (>= 2.0.0),
tibble,
tidyr,
Expand All @@ -69,6 +70,7 @@ Suggests:
progress,
reticulate
Remotes:
github::adokter/suntools,
github::chuhousen/amerifluxr,
github::ropensci/geonames,
github::ropensci/nneo
Expand Down
2 changes: 1 addition & 1 deletion modules/data.atmosphere/NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -113,5 +113,5 @@ export(write_noaa_gefs_netcdf)
import(dplyr)
import(tidyselect)
importFrom(magrittr,"%>%")
importFrom(rgdal,checkCRSArgs)
importFrom(rlang,.data)
importFrom(sf,st_crs)
29 changes: 17 additions & 12 deletions modules/data.atmosphere/R/GEFS_helper_functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -243,25 +243,30 @@ process_gridded_noaa_download <- function(lat_list,

for(hr in 1:length(curr_hours)){
file_name <- paste0(base_filename2, curr_hours[hr])
grib_file_name <- paste0(working_directory,"/", file_name,".grib")

if(file.exists(paste0(working_directory,"/", file_name,".grib"))){
grib <- rgdal::readGDAL(paste0(working_directory,"/", file_name,".grib"), silent = TRUE)
lat_lon <- sp::coordinates(grib)
if(file.exists(grib_file_name)){
grib_data <- terra::rast(grib_file_name)

## Convert to data frame
grib_data_df <- terra::as.data.frame(grib_data, xy=TRUE)
lat_lon <- grib_data_df[, c("x", "y")]

for(s in 1:length(site_id)){

index <- which(lat_lon[,2] == lats[s] & lat_lon[,1] == lons[s])

pressfc[s, hr] <- grib$band1[index]
tmp2m[s, hr] <- grib$band2[index]
rh2m[s, hr] <- grib$band3[index]
ugrd10m[s, hr] <- grib$band4[index]
vgrd10m[s, hr] <- grib$band5[index]
pressfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Pressure [Pa]`[index]
tmp2m[s, hr] <- grib_data_df$`2[m] HTGL=Specified height level above ground; Temperature [C]`[index]
rh2m[s, hr] <- grib_data_df$`2[m] HTGL=Specified height level above ground; Relative humidity [%]`[index]
ugrd10m[s, hr] <- grib_data_df$`10[m] HTGL=Specified height level above ground; u-component of wind [m/s]`[index]
vgrd10m[s, hr] <- grib_data_df$`10[m] HTGL=Specified height level above ground; v-component of wind [m/s]`[index]

if(curr_hours[hr] != "000"){
apcpsfc[s, hr] <- grib$band6[index]
tcdcclm[s, hr] <- grib$band7[index]
dswrfsfc[s, hr] <- grib$band8[index]
dlwrfsfc[s, hr] <- grib$band9[index]
apcpsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; 03 hr Total precipitation [kg/(m^2)]`[index]
tcdcclm[s, hr] <- grib_data_df$`RESERVED(10) (Reserved); Total cloud cover [%]`[index]
dswrfsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Downward Short-Wave Rad. Flux [W/(m^2)]`[index]
dlwrfsfc[s, hr] <- grib_data_df$`SFC=Ground or water surface; Downward Long-Wave Rad. Flux [W/(m^2)]`[index]
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions modules/data.atmosphere/R/download.NARR_site.R
Original file line number Diff line number Diff line change
Expand Up @@ -465,9 +465,9 @@ latlon2narr <- function(nc, lat.in, lon.in) {
#' @inheritParams get_NARR_thredds
#' @return `sp::SpatialPoints` object containing transformed x and y
#' coordinates, in km, which should match NARR coordinates
#' @importFrom rgdal checkCRSArgs
#' @importFrom sf st_crs
# ^not used directly here, but needed by sp::CRS.
# sp lists rgdal in Suggests rather than Imports,
# sp lists sf in Suggests rather than Imports,
# so importing it here to ensure it's available at run time
#' @author Alexey Shiklomanov
#' @export
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,11 +216,11 @@ met_temporal_downscale.Gaussian_ensemble <- function(in.path, in.prefix, outfold
day <- as.POSIXct(sprintf("%s 12:00:00", date), tz = tz)
sequence <- seq(from = day, length.out = span, by = "days")

sunrise <- maptools::sunriset(lon.lat, sequence, direction = "sunrise",
sunrise <- suntools::sunriset(lon.lat, sequence, direction = "sunrise",
POSIXct.out = TRUE)
sunset <- maptools::sunriset(lon.lat, sequence, direction = "sunset",
sunset <- suntools::sunriset(lon.lat, sequence, direction = "sunset",
POSIXct.out = TRUE)
solar_noon <- maptools::solarnoon(lon.lat, sequence, POSIXct.out = TRUE)
solar_noon <- suntools::solarnoon(lon.lat, sequence, POSIXct.out = TRUE)

data.frame(date = as.Date(sunrise$time), sunrise = as.numeric(format(sunrise$time, "%H%M")),
solarnoon = as.numeric(format(solar_noon$time, "%H%M")),
Expand Down
4 changes: 2 additions & 2 deletions modules/data.atmosphere/tests/Rcheck_reference.log
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,13 @@ Found the following (possibly) invalid URLs:
The Date field is over a month old.
* checking package namespace information ... OK
* checking package dependencies ... WARNING
Imports includes 39 non-default packages.
Imports includes 40 non-default packages.
Importing from so many packages makes the package vulnerable to any of
them becoming unavailable. Move as many as possible to Suggests and
use conditionally.

* checking package dependencies ... NOTE
Imports includes 39 non-default packages.
Imports includes 40 non-default packages.
Importing from so many packages makes the package vulnerable to any of
them becoming unavailable. Move as many as possible to Suggests and
use conditionally.
Expand Down
1 change: 0 additions & 1 deletion modules/data.land/DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ Suggests:
PEcAn.settings,
redland,
raster,
rgdal,
RPostgreSQL,
testthat (>= 1.0.2)
License: BSD_3_clause + file LICENSE
Expand Down
8 changes: 4 additions & 4 deletions modules/data.land/R/gis.functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,13 @@ shp2kml <- function(dir, ext, kmz = FALSE, proj4 = NULL, color = NULL, NameField
# Read in shapefile(s) & get coordinates/projection info shp.file <-
# readShapeSpatial(file.path(dir,i),verbose=TRUE) coordinates(test) <- ~X+Y

layers <- rgdal::ogrListLayers(file.path(dir, i))
info <- rgdal::ogrInfo(file.path(dir, i), layers)
layers <- sf::st_layers(file.path(dir, i))
# shp.file <- readOGR(file.path(dir,i),layer=layers) # no need to read in file

# Display vector info to the console
print("")
print(paste0("Input layers: ", layers))
print(paste0("Input projection info: ", info$p4s))
print(paste0("Input layers: ", layers$name))
print(paste0("Input projection info: ", layers$crs[[1]]$input))
print("")

# Write out kml/kmz using plotKML package if (is.null(color)){ color <- 'grey70' }
Expand Down Expand Up @@ -133,6 +132,7 @@ get.attributes <- function(file, coords) {

#library(fields)
#require(rgdal)
# note that OGR support is now provided by the sf and terra packages among others

# print('NOT IMPLEMENTED YET') subset_layer(file,coords)
} # get.attributes
Expand Down
4 changes: 2 additions & 2 deletions modules/data.mining/DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: PEcAn.data.mining
Type: Package
Title: PEcAn functions used for exploring model residuals and structures
Title: PEcAn Functions Used for Exploring Model Residuals and Structures
Description: (Temporary description) PEcAn functions used for exploring model residuals and structures
Version: 1.7.2
Date: 2021-10-04
Expand All @@ -21,4 +21,4 @@ LazyLoad: yes
LazyData: FALSE
Collate:
Encoding: UTF-8
RoxygenNote: 6.1.1
RoxygenNote: 7.1.2
49 changes: 49 additions & 0 deletions modules/data.mining/tests/Rcheck_reference.log
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
* using log directory ‘/tmp/Rtmpyv5R27/PEcAn.data.mining.Rcheck’
* using R version 4.1.3 (2022-03-10)
* using platform: x86_64-pc-linux-gnu (64-bit)
* using session charset: UTF-8
* using options ‘--no-manual --as-cran’
* checking for file ‘PEcAn.data.mining/DESCRIPTION’ ... OK
* checking extension type ... Package
* this is package ‘PEcAn.data.mining’ version ‘1.7.2’
* package encoding: UTF-8
* checking package namespace information ... OK
* checking package dependencies ... OK
* checking if this is a source package ... OK
* checking if there is a namespace ... OK
* checking for executable files ... OK
* checking for hidden files and directories ... OK
* checking for portable file names ... OK
* checking for sufficient/correct file permissions ... OK
* checking serialization versions ... OK
* checking whether package ‘PEcAn.data.mining’ can be installed ... OK
* checking installed package size ... OK
* checking package directory ... OK
* checking for future file timestamps ... OK
* checking DESCRIPTION meta-information ... NOTE
Malformed Description field: should contain one or more complete sentences.
* checking top-level files ... OK
* checking for left-over files ... OK
* checking index information ... OK
* checking package subdirectories ... OK
* checking whether the package can be loaded ... OK
* checking whether the package can be loaded with stated dependencies ... OK
* checking whether the package can be unloaded cleanly ... OK
* checking whether the namespace can be loaded with stated dependencies ... OK
* checking whether the namespace can be unloaded cleanly ... OK
* checking loading without being on the library search path ... OK
* checking examples ... NONE
* checking for unstated dependencies in ‘tests’ ... OK
* checking tests ...
Running ‘testthat.R’
OK
* checking for non-standard things in the check directory ... OK
* checking for detritus in the temp directory ... OK
* DONE

Status: 1 NOTE
See
‘/tmp/Rtmpyv5R27/PEcAn.data.mining.Rcheck/00check.log’
for details.


4 changes: 2 additions & 2 deletions modules/data.remote/DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@ Imports:
PEcAn.utils,
purrr,
XML,
raster,
sp,
MODISTools (>= 1.1.0),
reticulate,
PEcAn.logger,
magrittr,
PEcAn.remote,
stringr (>= 1.1.0),
terra,
doParallel,
parallel,
foreach
Expand All @@ -36,7 +36,7 @@ Suggests:
dplyr,
ggplot2,
lubridate,
rgdal,
raster,
reshape,
testthat (>= 1.0.2),
tibble
Expand Down
38 changes: 19 additions & 19 deletions modules/data.remote/R/NLCD.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,9 @@ download.NLCD <- function(outdir, year = 2011, con = NULL) {

## before downloading, check if the file already exists on this host
if (!is.null(con)) {
library(PEcAn.DB)
chk <- dbfile.check(type = "Input", id = input.id, con = con)
chk <- PEcAn.DB::dbfile.check(type = "Input", id = input.id, con = con)
if (nrow(chk) > 0) {
machines <- db.query(paste("SELECT * from machines where id in (",
machines <- PEcAn.DB::db.query(paste("SELECT * from machines where id in (",
paste(chk$machine_id, sep = ","), ")"), con)
if (PEcAn.remote::fqdn() %in% machines$hostname) {
## record already exists on this host
Expand All @@ -49,7 +48,7 @@ download.NLCD <- function(outdir, year = 2011, con = NULL) {
prefix <- table(sapply(strsplit(dir(data_dir), ".", fixed = TRUE), function(x) { x[1] }))
prefix <- names(which.max(prefix))
site.id <- 1000000676
return(dbfile.insert(data_dir, in.prefix = prefix, type = "Input", input.id, con,
return(PEcAn.DB::dbfile.insert(data_dir, in.prefix = prefix, type = "Input", input.id, con,
reuse = TRUE))
}
return(data_dir)
Expand All @@ -68,21 +67,19 @@ download.NLCD <- function(outdir, year = 2011, con = NULL) {
##'
##' @description Based on codes from Christy Rollinson and from Max Joseph (http://mbjoseph.github.io/2014/11/08/nlcd.html)
extract_NLCD <- function(buffer, coords, data_dir = NULL, con = NULL, year = 2011) {
library(raster)
require(rgdal)


if (!is.null(con)) {
library(PEcAn.DB)

if (year == 2001) {
input.id <- 1000000482
} else if (year == 2011) {
input.id <- 1000000483
} else {
print(paste("Year not yet supported: ", year))
}
chk <- dbfile.check(type = "Input", id = input.id, con = con)
chk <- PEcAn.DB::dbfile.check(type = "Input", id = input.id, con = con)
if (nrow(chk) > 0) {
machines <- db.query(paste("SELECT * from machines where id in (",
machines <- PEcAn.DB::db.query(paste("SELECT * from machines where id in (",
paste(chk$machine_id, sep = ","), ")"), con)
if (PEcAn.remote::fqdn() %in% machines$hostname) {
## record already exists on this host
Expand All @@ -104,21 +101,24 @@ extract_NLCD <- function(buffer, coords, data_dir = NULL, con = NULL, year = 201
print(paste("File not found:", filename))
return(NULL)
}
nlcd <- raster(filename)

# WARNING: the following extraction previously used raster and sp package functions
# this new implementation with terra functions has not been thoroughly tested
nlcd <- terra::rast(filename)

# transform points
sites <- SpatialPoints(coords = coords, proj4string = CRS("+proj=longlat +datum=WGS84"))
sites <- spTransform(sites, crs(nlcd))
sites <- terra::vect(coords, geom=c("long", "lat"), crs="+proj=longlat +datum=WGS84")
sites <- terra::buffer(sites, width=buffer)

# extract
sum.raw <- table(extract(nlcd, sites, buffer = buffer))
sum.raw <- table(terra::extract(nlcd, sites))
summ <- prop.table(sum.raw)
mydf <- data.frame(cover = names(summ), percent = as.vector(summ), count = as.vector(sum.raw))
mydf <- data.frame(cover.name = colnames(summ), percent = as.vector(summ), count = as.vector(sum.raw))
mydf <- mydf[mydf$count!=0,]

# land cover number to name conversions
cover.table <- nlcd@data@attributes[[1]]
cover.names <- cover.table[as.numeric(as.character(mydf$cover)) + 1, grep("Land", names(cover.table))]
mydf$cover.name <- cover.names
# land cover name to number conversions
nlcd_levels <- terra::levels(nlcd)[[1]]
mydf$cover <- nlcd_levels$value[nlcd_levels$`Land Cover Class` %in% mydf$cover.name]

return(mydf)
} # extract_NLCD
2 changes: 1 addition & 1 deletion modules/data.remote/R/call_MODIS.R
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ call_MODIS <- function(var, product,
fname <- paste(site_info$site_id[i], "/", product, "_", band, "_", start_date, "-", end_date, "_unfiltered.csv", sep = "")
}
fname <- file.path(outdir, fname)
write.csv(site, fname, row.names = FALSE)
utils::write.csv(site, fname, row.names = FALSE)
}

}
Expand Down
8 changes: 4 additions & 4 deletions modules/data.remote/R/download.thredds.R
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ download.thredds.AGB <- function(outdir = NULL, site_ids, run_parallel = FALSE,
# option to save output dataset to directory for user.
if (!(is.null(outdir)))
{
write.csv(output, file = paste0(outdir, "THREDDS_", sub("^([^.]*).*", "\\1",basename(files[1])), "_site_", site, ".csv"), row.names = FALSE)
utils::write.csv(output, file = paste0(outdir, "THREDDS_", sub("^([^.]*).*", "\\1",basename(files[1])), "_site_", site, ".csv"), row.names = FALSE)
}

return(output)
Expand All @@ -82,12 +82,12 @@ download.thredds.AGB <- function(outdir = NULL, site_ids, run_parallel = FALSE,
} else {
ncores <- parallel::detectCores() -1
}
require(doParallel)

PEcAn.logger::logger.info(paste0("Running in parallel with: ", ncores))
cl = parallel::makeCluster(ncores)
doParallel::registerDoParallel(cl)
data = foreach(i = seq_along(mylat), .combine = rbind) %dopar% get_data(i)
stopCluster(cl)
data = foreach::foreach(i = seq_along(mylat), .combine = rbind) %dopar% get_data(i)
parallel::stopCluster(cl)

} else {
# setup sequential run
Expand Down
Loading

0 comments on commit 577b0a2

Please sign in to comment.