diff --git a/DESCRIPTION b/DESCRIPTION index d4eafcd9..06a62f3d 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -19,7 +19,6 @@ Imports: graphics, flextable, bit64, - PearsonDS, caTools, lubridate, plyr, @@ -34,3 +33,5 @@ Imports: sf, lmomco, e1071 +Suggests: + dataRetrieval diff --git a/NAMESPACE b/NAMESPACE index 0c828728..4c937ebd 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -79,7 +79,6 @@ export(water.month.default) export(water.month.numeric) export(water.year) export(xQy) -import(PearsonDS) import(ggplot2) import(sqldf) import(stringr) diff --git a/R/DFLOW_xQy.R b/R/DFLOW_xQy.R index 08070823..e5eae50d 100644 --- a/R/DFLOW_xQy.R +++ b/R/DFLOW_xQy.R @@ -101,7 +101,7 @@ xQyComp <- function(xQy_ann, y){ n <- length(logFlows) #Step 1: Find statistics of log distribution of dates and flows u <- mean(logFlows) - s <- sqrt(var(logFlows)) + s <- sqrt(stats::var(logFlows)) #Find skewness using the appropriate method. In DFLOW example, matches type #1 but SW toolbox uses type 2 @@ -153,8 +153,9 @@ xQyComp <- function(xQy_ann, y){ #' the next analysis year. This makes choosing the correct analysis season #' very important.\cr #' This is functionally equivalent to the VPDES' DFLOW R Shiny application. -#' @param gageDataIn A data frame containing a date and flow column of daily -#' streamflows +#' @param gageDataIn A data frame or 1-D zoo timeseries containing a date and +#' flow column of daily streamflows. If a zoo is passed in, the flowColumn +#' argument the flowColumn and dateColumn arguments are ignored #' @param flowColumn The column name in gageDataIn that contains the flow values #' @param dateColumn The column name in gageDataIn that contains the date #' values. Will be converted to the date class. @@ -187,24 +188,34 @@ xQyComp <- function(xQy_ann, y){ #' the number of days in each analysis year on a non-leap year #' @export xQy #' @examples -#' #gageDat <- dataRetrieval::readNWISdv("01631000","00060") -#' #gageDat <- dataRetrieval::renameNWISColumns(gageDat) -#' #gageDat <- gageDat[!grepl("P",gageDat$Flow_cd),] -#' #low_flows <- xQy(gageDataIn = gageDat, -#' # flowColumn = "Flow", dateColumn = "Date", -#' # AYS = "07-13", AYE = "02-15", -#' # startYear = NULL, endYear = NULL, -#' # x = 7, y = 10, -#' # IncludeSummerFlow = F) -#' #low_flows$Flows +#' gageDat <- dataRetrieval::readNWISdv("01631000","00060") +#' gageDat <- dataRetrieval::renameNWISColumns(gageDat) +#' gageDat <- gageDat[!grepl("P",gageDat$Flow_cd),] +#' low_flows <- xQy(gageDataIn = gageDat, +#' flowColumn = "Flow", dateColumn = "Date", +#' AYS = "07-13", AYE = "02-15", +#' startYear = NULL, endYear = NULL, +#' x = 7, y = 10, +#' IncludeSummerFlow = FALSE) +#' low_flows$Flows xQy <- function(gageDataIn, flowColumn = "Flow", dateColumn = "Date", - AYS = "10-01", AYE = "09-30", + AYS = "04-01", AYE = "03-31", startYear = NULL, endYear = NULL, x = 7, y = 10, IncludeSummerFlow = FALSE){ - #Create a simplified copy of the dataset to manipulate - gageData <- gageDataIn[,c(dateColumn,flowColumn)] - names(gageData) <- c("Date", "Flow") + #Is user has passed in a zoo time series, convert to data frame and ensure a + #flow and date column exist. This was added for backwards compatibility with + #existing OM summary workflows + if(zoo::is.zoo(gageDataIn)){ + gageData <- as.data.frame(gageDataIn) + names(gageData) <- "Flow" + gageData$Date <- as.Date(zoo::index(gageDataIn)) + }else{ + #Create a simplified copy of the dataset to manipulate + gageData <- gageDataIn[,c(dateColumn,flowColumn)] + names(gageData) <- c("Date", "Flow") + } + #Treat negative flows as missing flows, per SW toolbox gageData$Flow[gageData$Flow < 0] <- NA #Ensure date column exists @@ -513,7 +524,7 @@ xQy <- function(gageDataIn, flowColumn = "Flow", dateColumn = "Date", #uses all data and is not concerned with complete analysis years. This is also #easier in terms of communication with external groups just checking gages #every day - flowCDF <- ecdf(gageData$Flow) + flowCDF <- stats::ecdf(gageData$Flow) out_xQy$pctg <- flowCDF(out_xQy$xQy) out_1Q10$pctg <- flowCDF(out_1Q10$xQy) out_7Q10$pctg <- flowCDF(out_7Q10$xQy) @@ -553,12 +564,12 @@ xQy <- function(gageDataIn, flowColumn = "Flow", dateColumn = "Date", } ## Testing #### -gageDat <- dataRetrieval::readNWISdv("01631000","00060") -gageDat <- dataRetrieval::renameNWISColumns(gageDat) -gageDat <- gageDat[!grepl("P",gageDat$Flow_cd),] -low_flows <- xQy(gageDataIn = gageDat, - flowColumn = "Flow", dateColumn = "Date", - AYS = "03-01", AYE = "07-31", - startYear = NULL, endYear = NULL, - x = 8, y = 12, - IncludeSummerFlow = F) +# gageDat <- dataRetrieval::readNWISdv("01631000","00060") +# gageDat <- dataRetrieval::renameNWISColumns(gageDat) +# gageDat <- gageDat[!grepl("P",gageDat$Flow_cd),] +# low_flows <- xQy(gageDataIn = gageDat, +# flowColumn = "Flow", dateColumn = "Date", +# AYS = "03-01", AYE = "07-31", +# startYear = NULL, endYear = NULL, +# x = 8, y = 12, +# IncludeSummerFlow = F) diff --git a/R/RomEntity.R b/R/RomEntity.R index 6de0f5da..4b16e969 100644 --- a/R/RomEntity.R +++ b/R/RomEntity.R @@ -216,6 +216,7 @@ RomEntity <- R6Class( #' @param varkey which varkey? defaults to guess Constant and AlphanumericConstant #' @param data_matrix dataframe contained rows/cols #' @param remote look at remote datasource? + #' @param proptext text to set on the property #' @returns the property object for this entity set_prop = function( propname, propcode=NULL,propvalue=NULL,varkey=NULL, diff --git a/R/TNC_IHA_Ports.R b/R/TNC_IHA_Ports.R index b354e2f7..24593084 100644 --- a/R/TNC_IHA_Ports.R +++ b/R/TNC_IHA_Ports.R @@ -133,14 +133,14 @@ water.year <- function (x) { #'@references #'\url{http://www.nature.org/initiatives/freshwater/conservationtools/art17004.html} #' @examples -#'# #Get data for NF Shenandoah Mount Jackson -#'# flows <- dataRetrieval::readNWISdv("01633000",parameterCd = "00060") -#'# flows <- dataRetrieval::renameNWISColumns(flows) -#'# #Convert flows to zoo -#'# flows_zoo <- zoo::as.zoo(x = flows$Flow) -#'# zoo::index(flows_zoo) <- flows$Date -#'# #Use group 1 to get the minimum monthly flows: -#'# hydrotools::group1(flows_zoo,"water",FUN = min) +#'#Get data for NF Shenandoah Mount Jackson +#'flows <- dataRetrieval::readNWISdv("01633000",parameterCd = "00060") +#'flows <- dataRetrieval::renameNWISColumns(flows) +#'#Convert flows to zoo +#'flows_zoo <- zoo::as.zoo(x = flows$Flow) +#'zoo::index(flows_zoo) <- flows$Date +#'#Use group 1 to get the minimum monthly flows: +#'group1(flows_zoo,"water",FUN = min) #'@importFrom zoo index coredata is.zoo #'@importFrom lubridate year month #'@export @@ -202,14 +202,14 @@ group1 <- function ( #'@author jason.e.law@@gmail.com (imported to Hydrotools by Connor Brogan,connor.brogan@@deq.virginia.gov) #'@references \url{http://www.conservationgateway.org/Files/Pages/indicators-hydrologic-altaspx47.aspx} #'@examples -#'# #Get data for NF Shenandoah Mount Jackson -#'# flows <- dataRetrieval::readNWISdv("01633000",parameterCd = "00060") -#'# flows <- dataRetrieval::renameNWISColumns(flows) -#'# #Convert flows to zoo -#'# flows_zoo <- zoo::as.zoo(x = flows$Flow) -#'# zoo::index(flows_zoo) <- flows$Date -#'# #Use group 2 to get critical period flows and stats: -#'# hydrotools::group2(flows_zoo,"water",mimic.tnc = TRUE) +#' #Get data for NF Shenandoah Mount Jackson +#' flows <- dataRetrieval::readNWISdv("01633000",parameterCd = "00060") +#' flows <- dataRetrieval::renameNWISColumns(flows) +#' #Convert flows to zoo +#' flows_zoo <- zoo::as.zoo(x = flows$Flow) +#' zoo::index(flows_zoo) <- flows$Date +#' #Use group 2 to get critical period flows and stats: +#' group2(flows_zoo,"water",mimic.tnc = TRUE) #'@importFrom plyr ddply '.' #'@importFrom zoo coredata index #'@importFrom lubridate year diff --git a/R/cia_utils.R b/R/cia_utils.R index d0b09a3a..b098339b 100644 --- a/R/cia_utils.R +++ b/R/cia_utils.R @@ -10,7 +10,8 @@ #' @export om_find_dh_elid om_find_dh_elid <- function(elid, ds) { - model_search_elid <- fn$sqldf( + model_search_elid <- sqldf( + paste0( "select a.name, a.hydrocode, CASE WHEN a.hydroid IS NULL THEN 'model' @@ -36,8 +37,7 @@ om_find_dh_elid <- function(elid, ds) { and b.entity_type = 'dh_properties' ) where e.propname = 'om_element_connection' - and e.propvalue = $elid - ", + and e.propvalue = ",elid), connection = ds$connection ) if (nrow(model_search_elid) == 0) { @@ -400,7 +400,7 @@ fn_ALL.upstream <- function( getAllSegmentsOnly = TRUE ){ #AllSegList should be a data frame and must contain a river segment column: - if(class(AllSegList) != "data.frame" || + if(!inherits(AllSegList, "data.frame") || length(AllSegList$riverseg) == 0){ stop("The input DATA.FRAME for AllSegList must contain a column with the name of \"riverseg\". This column MUST contain all river segments @@ -431,7 +431,7 @@ fn_ALL.upstream <- function( #fn_upstream() if(!is.null(riv.seg) && length(riv.seg) > 0 && - class(riv.seg) == 'character' + inherits(riv.seg, 'character') ){ #Get only river segments specified by user segDataFrame <- segDataFrame[segDataFrame$riverseg %in% riv.seg,] @@ -609,7 +609,7 @@ fn_downstream <- function(riv.seg, AllSegList) { SegDownstream <- ModelSegments$Downstream[SegDownstream] SegDownstream <- strsplit(as.character(SegDownstream), "\\+") SegDownstream <- try(SegDownstream[[1]], silent = TRUE) - if (class(SegDownstream)=='try-error') { + if (inherits(SegDownstream, 'try-error')) { SegDownstream <- NA } return(SegDownstream) @@ -679,38 +679,44 @@ om_ts_diff <- function(df1, df2, col1, col2, op = "<>") { # fn_iha_7q10 -#' #' @name fn_iha_7q10 #' @title fn_iha_7q10 -#' @description provide the 7q10 from a given flow timeseries -#' @param zoots a timeseries flormatted in zoo (required by IHA) -#' @return singel numeric value for 7q10 -#' @import PearsonDS +#' @description Calculate the 7Q10 from a flow timeseries +#' @details +#' This function was originally inspired by the Nature Conservancy's IHA package +#' and relied on \code{group2()}. However, now this function is just a simple +#' wrapper of \code{xQy()} to ensure consistency with the rest of DEQ. It takes +#' in a data frame or zoo timeseries and outputs the 7Q10 as a single value +#' numeric. +#' @param zoots a timeseries formatted in zoo or a data frame that contains a +#' date and flow column, to be specified by user +#' @param flowColumnIn If a dataframe is provided to the function, this is the +#' name of the column that contains the flow data for analysis. Ignored if a +#' zoo timeseries was provided. +#' @param dateColumnIn If a dataframe is provided to the function, this is the +#' name of the column that contains the dates. Ignored if a zoo timeseries was +#' provided +#' @return single numeric value for 7Q10 #' @export fn_iha_7q10 -#' @examples NA -#' @seealso NA -fn_iha_7q10 <- function(zoots) { - g2 <- group2(zoots) - #print("Group 2, 7-day low flow results ") - #print(g2["7 Day Min"]) - x <- as.vector(as.matrix(g2["7 Day Min"])) - # fudge 0 values - # correct for zeroes?? If so, use this loop: - # This is not an "approved" method - we need to see how the GS/other authorities handles this - for (k in 1:length(x)) { - if (x[k] <= 0) { - x[k] <- 0.00000001 - print (paste("Found 0.0 average in year", g2["year"], sep = " ")) - } - } - x <- log(x) - if (length(x) <= 1) { - return(exp(x[1])) - } else { - pars <- PearsonDS:::pearsonIIIfitML(x) - x7q10 <- exp(qpearsonIII(0.1, params = pars$par)) - return(x7q10); - } +#' @examples +#' flows <- dataRetrieval::readNWISdv("01631000","00060") +#' flows <- dataRetrieval::renameNWISColumns(flows) +#' #Convert flows to zoo +#' flows_zoo <- zoo::as.zoo(x = flows$Flow) +#' zoo::index(flows_zoo) <- flows$Date +#' fn_iha_7q10(flows_zoo) +#' @seealso xQy +fn_iha_7q10 <- function(zoots, flowColumnIn = "Flow", dateColumnIn = "Date") { + #Calculate critical low flows from the zoo time series + low_flows <- xQy(gageDataIn = zoots, flowColumn = flowColumnIn, + dateColumn = dateColumnIn, + AYS = "04-01", AYE = "03-31", + startYear = NULL, endYear = NULL, + x = 7, y = 10, + IncludeSummerFlow = FALSE) + #Return the 7Q10 + out_7Q10 <- low_flows$Flows$n7Q10 + return(out_7Q10) } @@ -732,7 +738,7 @@ fn_iha_mlf <- function(zoots, targetmo, q=0.5) { # calculates the 50th percentile - this is the August Low Flow # August Low Flow = median flow of the annual minimum flows in August for a chosen time period message("Performing quantile analysis") - x <- quantile(g1vec, q, na.rm = TRUE); + x <- stats::quantile(g1vec, q, na.rm = TRUE); return(as.numeric(x)); } @@ -755,7 +761,7 @@ fn_iha_flow_extreme <- function(flows, metric, stat='min', wyear_type='calendar' } else if (stat == 'max') { ndx = which.max(as.numeric(metric_flows[,metric])); } else if (stat == 'median') { - ndx = which(as.numeric(metric_flows[,metric]) == median(as.numeric(metric_flows[,metric]))) + ndx = which(as.numeric(metric_flows[,metric]) == stats::median(as.numeric(metric_flows[,metric]))) } metric_flows_Qout = round(g2flows[ndx,metric],6); diff --git a/R/dHVariablePluginDefault.R b/R/dHVariablePluginDefault.R index 7cb832df..8a834b9a 100644 --- a/R/dHVariablePluginDefault.R +++ b/R/dHVariablePluginDefault.R @@ -553,6 +553,8 @@ dHOMwaterSupplyModelNode <- R6Class( return(export) }, + #' @param entity The object to find impoundmnet on + #' @return FALSE, not yet implemented get_impoundment = function(entity) { message("Warning: get_impoundment() not yet implemented.") # check for full model impoundment, sub-comp impoundment, or link to feature model @@ -588,11 +590,17 @@ dHOMWaterSystemTieredFlowBy <- R6Class( return(export) }, + #'@param l The list to convert to matrix + #'@return A matrix of values from the list, with one value per row list2matrix = function(l) { matrix_vals = data.frame(matrix(unlist(l), nrow = length(l), byrow = TRUE)) |> setNames(names(l[[1]])) return(matrix_vals) }, + #'@param entity The object to store the translated JSON on via set_matrix() + #'@param om_json The JSON to translate to a matrix and set on entity + #'@return The entity, but the method is primarily designed to set useful + #' values on entity not return anything useful translateOMtoDH = function(entity, om_json) { matrix_vals = self$list2matrix(om_json$processors$flowby$rule_matrix$matrix_rowcol) entity$set_matrix(matrix_vals) diff --git a/R/fac_utils.R b/R/fac_utils.R index 0413401c..26d6bac3 100644 --- a/R/fac_utils.R +++ b/R/fac_utils.R @@ -44,7 +44,7 @@ om_flow_table <- function(df2sum, q_col = "Qout", mo_col = "month", rdigits = 1) for (i in index(month.abb)) { moname <- month.abb[i] drows <- df2sum[df2sum[,mo_col] == i,] - q_drows <- quantile(drows[,q_col], probs=c(0,0.05,0.1,0.25, 0.3, 0.5), na.rm=TRUE) + q_drows <- stats::quantile(drows[,q_col], probs=c(0,0.05,0.1,0.25, 0.3, 0.5), na.rm=TRUE) q_mean <- mean(drows[,q_col]) intake_summary_tbl[i,'Min'] <- round(as.numeric(q_drows["0%"]),rdigits) intake_summary_tbl[i,"fivep"] <- round(as.numeric(q_drows["5%"]),rdigits) diff --git a/R/imp_utils.R b/R/imp_utils.R index 4861a788..b18f71c0 100644 --- a/R/imp_utils.R +++ b/R/imp_utils.R @@ -22,24 +22,24 @@ fn_plot_impoundment_flux <- function( ymn <- 1 ymx <- 100 # par(mar = c(5,5,2,5)) - par(mar = c(8.8,5,0.5,5)) + graphics::par(mar = c(8.8,5,0.5,5)) plot( dat[,pur_col] * 100.0, ylim=c(ymn,ymx), ylab="Reservoir Usable Storage (%)", xlab=paste("Model Flow Period",min(index(dat)),"to",max(index(dat))) ) - par(new = TRUE) + graphics::par(new = TRUE) plot(dat[,Qin_col],col='blue', axes=FALSE, xlab="", ylab="") - lines(dat[,Qout_col],col='green') - lines(dat[,wd_col] * 1.547,col='red') + graphics::lines(dat[,Qout_col],col='green') + graphics::lines(dat[,wd_col] * 1.547,col='red') graphics::axis(side = 4) - mtext(side = 4, line = 3, 'Flow/Demand (cfs)') + graphics::mtext(side = 4, line = 3, 'Flow/Demand (cfs)') if (legend_on == TRUE) { - legend("bottom",inset=-0.36, xpd=TRUE, c("Reservoir Usable Storage","Inflow","Outflow","Demand"), - col = c("black", "blue", "green","red"), - lty = c(1,1,1,1), - bg='white',cex=0.8) + graphics::legend("bottom",inset=-0.36, xpd=TRUE, c("Reservoir Usable Storage","Inflow","Outflow","Demand"), + col = c("black", "blue", "green","red"), + lty = c(1,1,1,1), + bg='white',cex=0.8) } } diff --git a/R/om_get_rundata.R b/R/om_get_rundata.R index c0555bf6..7d553d69 100644 --- a/R/om_get_rundata.R +++ b/R/om_get_rundata.R @@ -47,7 +47,7 @@ om_get_rundata <- function(elid, runid, site='http://deq2.bse.vt.edu', edate <- as.POSIXct(edate,tz = "EST") #Get the window of interest from the timeseries - dat <- window(dat, start = sdate, end = edate); + dat <- stats::window(dat, start = sdate, end = edate); #Change mode of zoo to numeric e.g. Convert all fields to numeric mode(dat) <- 'numeric' diff --git a/R/om_vahydro_metric_grid.R b/R/om_vahydro_metric_grid.R index 779dbae5..ef3a14cf 100644 --- a/R/om_vahydro_metric_grid.R +++ b/R/om_vahydro_metric_grid.R @@ -64,7 +64,7 @@ om_vahydro_metric_grid <- function ( httr::add_headers(HTTP_X_CSRF_TOKEN = token), encode = "xml", httr::content_type("text/csv") ); - dat <- content(rawdat) + dat <- httr::content(rawdat) } else { if (ds$connection_type == 'odbc') { #message("om_vahydro_metric_grid() called using ODBC ") @@ -73,7 +73,7 @@ om_vahydro_metric_grid <- function ( message(prop_sql) } message(paste("retrieving via ODBC")) - dat <- dbGetQuery(conn = ds$connection, prop_sql) + dat <- DBI::dbGetQuery(conn = ds$connection, prop_sql) #message(paste("returned", nrow(dat),"rows")) } else { message(paste("retrieving ", url)) diff --git a/R/utils.R b/R/utils.R index 176b92da..fd63fdc8 100644 --- a/R/utils.R +++ b/R/utils.R @@ -31,8 +31,8 @@ fn_get_rundata <- function( filename<-paste(urlbase, elementid, "&variables=", varname, "&runid=", runid, "&startdate=1984-10-01&enddate=2005-09-30", sep = "") message(paste("From ", filename)); - dat = try(read.table(filename, header = TRUE, sep = ",")) - if (class(dat)=='try-error') { + dat = try(utils::read.table(filename, header = TRUE, sep = ",")) + if (inherits(dat, 'try-error')) { # what to do if file empty message(paste("Error: problem reading file ", filename)) return (FALSE); @@ -71,7 +71,7 @@ fn_remove_model_warmup <- function(dat) { #ensure there are associated timezones sdate <- as.POSIXct(sdate,tz = "EST") edate <- as.POSIXct(edate,tz = "EST") - dat <- window(dat, start = sdate, end = edate) + dat <- stats::window(dat, start = sdate, end = edate) return(dat) } @@ -103,8 +103,8 @@ fn_get_runfile_info <- function( message(paste("Getting Info for run ", runid, " for element ", elementid)) # creates the whole url by pasting the element and run ids into it filename<-paste(urlbase, elementid, "&runid=", runid, "&startdate=1984-10-01&enddate=2005-09-30", sep = "") message(paste("From ", filename)) - finfo = try(read.csv(filename, header = TRUE, sep = "\t")) ; - if (class(finfo)=='try-error') { + finfo = try(utils::read.csv(filename, header = TRUE, sep = "\t")) ; + if (inherits(finfo, 'try-error')) { # what to do if file empty message(paste("Error: retrieving ", filename)) return (FALSE); @@ -162,8 +162,8 @@ fn_get_runfile <- function( # re-download if the remote is newer than the local if (finfo$compressed == 1) { message(paste("Downloading Compressed Run File ", filename)); - drez <- try(download.file(filename,'tempfile',mode="wb", method = "libcurl")) - if ((drez == FALSE) | class(drez)=='try-error') { + drez <- try(utils::download.file(filename,'tempfile',mode="wb", method = "libcurl")) + if ((drez == FALSE) | inherits(drez, 'try-error')) { message(paste("Download for", filename, "failed. ")) return(FALSE) } @@ -179,8 +179,8 @@ fn_get_runfile <- function( } else { # does not exist locally message(paste("Downloading Run File ", filename)); - drez <- try(download.file(filename,'tempfile',mode="wb", method = "libcurl")) - if (is.logical(drez) | class(drez)=='try-error') { + drez <- try(utils::download.file(filename,'tempfile',mode="wb", method = "libcurl")) + if (is.logical(drez) | inherits(drez, 'try-error')) { message(paste("Download for", filename, "failed.")) return(FALSE) } @@ -189,8 +189,8 @@ fn_get_runfile <- function( filename <- utils::unzip ('tempfile'); } } - dat = try(read.table( filename, header = TRUE, sep = ",")) - if (is.logical(dat) | class(dat)=='try-error') { + dat = try(utils::read.table( filename, header = TRUE, sep = ",")) + if (is.logical(dat) | inherits(dat, 'try-error')) { # what to do if file empty message(paste("Error: empty file ", filename)) return (FALSE); diff --git a/R/wshd_utils.R b/R/wshd_utils.R index 0864390d..542c3e10 100644 --- a/R/wshd_utils.R +++ b/R/wshd_utils.R @@ -14,8 +14,8 @@ #'@param da Numeric. The drainage area of your channel. #'@return A list with the bank full stage (h), bank full width (bf), base width #' (b), side slope (z), and mannings roughness (n) -#'@example -#'#usgs_bankfull_properties(prov = 1,da = 10) +#'@examples +#'usgs_bankfull_properties(prov = 1,da = 10) #'@export usgs_bankfull_properties <- function(prov, da) { #Provincial Channel Geometry @@ -264,7 +264,7 @@ simple_wshed_map <- function(ds,wshdHydroid, findUpstream = FALSE, ))) #point buffer, which may have x,y buffers if(!is.null(config$pointBuffer)){ - if(class(config$pointBuffer) == "numeric" & length(config$pointBuffer) <= 2){ + if(inherits(config$pointBuffer, "numeric") & length(config$pointBuffer) <= 2){ pointLabelLocations <- sweep(pointLabelLocations,2,config$pointBuffer) }else if( !is.null(config$pointBuffer) && @@ -275,7 +275,7 @@ simple_wshed_map <- function(ds,wshdHydroid, findUpstream = FALSE, } #watershed buffer, which may be specific to each watershed or a generic buffer #vector - if(class(config$wshdBuffer) == "numeric" & length(config$wshdBuffer) <= 2){ + if(inherits(config$wshdBuffer, "numeric") & length(config$wshdBuffer) <= 2){ wshdLabelLocations <- sweep(wshdLabelLocations,2,config$wshdBuffer) }else if( !is.null(config$wshdBuffer) && diff --git a/README.md b/README.md index 3d0b8abc..88f9072a 100644 --- a/README.md +++ b/README.md @@ -111,6 +111,8 @@ This package is in active development. ## Release notes ### 1.0.7 11/13/2025 1. Fixed a variable name error in the `xQy()` function that was preventing the code from being able to handle non-standard flow data frames +2. Enabled examples in `xQy()`, `group1()`, `group2()`, and `fn_iha_7q10()` +3. Updated `fn_iha_7q10()` to use `xQy()` for consistency ### 1.0.6 10/06/2025 1. Bug fix to variable plugin for timeseries file objects by setting the correct object class to send to OM. diff --git a/man/RomEntity.Rd b/man/RomEntity.Rd index 891e4add..e44d66c5 100644 --- a/man/RomEntity.Rd +++ b/man/RomEntity.Rd @@ -282,6 +282,8 @@ case of new prop creation)} \item{\code{data_matrix}}{dataframe contained rows/cols} \item{\code{remote}}{look at remote datasource?} + +\item{\code{proptext}}{text to set on the property} } \if{html}{\out{}} } diff --git a/man/dHOMWaterSystemTieredFlowBy.Rd b/man/dHOMWaterSystemTieredFlowBy.Rd index 02e61ad7..270c9aba 100644 --- a/man/dHOMWaterSystemTieredFlowBy.Rd +++ b/man/dHOMWaterSystemTieredFlowBy.Rd @@ -80,6 +80,16 @@ an updated config if necessary or FALSE if it fails \if{html}{\out{