LDC Walk Through
2019-09-09
1 Load Duration Curve R Code - Walk Through
1.1 Loading Calculations
The loading calculation function is called tmdlCalcs(). It requires a workbook with specific tabs and headers, as well as a command outlining whether the output should be exported from the function.
We’ll define the parameters (and load the packages) in this bookdown so you can see the steps in real time.
The first step is to define the output object (a list of dataframes), and some of the equations needed.
calcs <- list()
## Calculation functions needed for plotting and assessment ##
perc.red <- function(x,y){100-x/y*100} # percent reduction equation where x = capacity and y = observed
flow_perc <- function(x){(1-percent_rank(x))*100} # gives each flow measurement a percent rank (what percentage of flows are higher than value?)
The next step is to read in the workbook and define the parameters contained in the input tab.
### Load the dataset from the workbook ###
wb.dat <- openxlsx::loadWorkbook(wb_path)
### Obtain criteria from specs.dat sheet or function inputs ###
if(!"Inputs"%in%wb.dat$sheet_names){print("Workbook is missing 'Inputs' tab. Please refer to template for required tab contents/format.")}
specs.dat <- openxlsx::readWorkbook(wb.dat, sheet="Inputs",startRow=1)
specs.dat
## Parameter Value Aggregating.Function
## 1 Max Criterion 409.0 gmean
## 2 Geometric Mean Criterion 126.0 <NA>
## 3 Correction Factor 24465715.0 <NA>
## 4 Margin of Safety 0.1 <NA>
## 5 Rec Season Start 121.0 <NA>
## 6 Rec Season End 304.0 <NA>
## 7 Irrigation Season Start 135.0 <NA>
## 8 Irrigation Season End 288.0 <NA>
# Add to list
calcs$Inputs <- specs.dat
geom_crit = specs.dat[specs.dat$Parameter=="Geometric Mean Criterion","Value"]
max_crit = specs.dat[specs.dat$Parameter=="Max Criterion","Value"]
cf = specs.dat[specs.dat$Parameter=="Correction Factor","Value"]
mos = specs.dat[specs.dat$Parameter=="Margin of Safety","Value"]
rec_ssn = c(specs.dat[specs.dat$Parameter=="Rec Season Start","Value"],specs.dat[specs.dat$Parameter=="Rec Season End","Value"])
irg_ssn = c(specs.dat[specs.dat$Parameter=="Irrigation Season Start","Value"],specs.dat[specs.dat$Parameter=="Irrigation Season End","Value"])
aggFun = specs.dat[1,"Aggregating.Function"]
if(aggFun=="gmean"){
aggFun = function(x){exp(mean(log(x)))}
}
### Obtain site order from site_order sheet ###
if("Site_order"%in%wb.dat$sheet_names){
site.order = openxlsx::readWorkbook(wb.dat, sheet="Site_order", startRow = 1)
if(dim(site.order)[1]>0){
calcs$Site_order = site.order
head(site.order)
}
}
## Monitoring.Location.ID Order
## 1 4955310 1
## 2 4955330 2
## 3 4954380 3
## 4 4954390 4
## 5 4954482 5
## 6 4954480 6
# Load Param concentrations
param.dat <- openxlsx::readWorkbook(wb.dat,sheet="Param_data",startRow=1)
param.dat$Activity.Start.Date <- as.Date(param.dat$Activity.Start.Date, origin="1899-12-30")
### CHECK THAT UNITS ARE CONSISTENT
if(length(unique(param.dat$Result.Unit))>1){
stop("Multiple units found in parameter data. Convert units before proceeding.")
}
# Add to list
calcs$Param_data <- param.dat
head(param.dat)
## Organization.ID Activity.ID Monitoring.Location.ID
## 1 UTAHDWQ_WQX FRTMDL121216-4954390-1212-4/2-E 4954390
## 2 UTAHDWQ_WQX FRTMDL121216-4954390-1212-4/2-E 4954390
## 3 UTAHDWQ_WQX FRTMDL121216-4954480-1212-4/2-E 4954480
## 4 UTAHDWQ_WQX FRTMDL121216-4954480-1212-4/2-E 4954480
## 5 UTAHDWQ_WQX FRTMDL102916-4954770-1029-4/2-E 4954770
## 6 UTAHDWQ_WQX FRTMDL102916-4954770-1029-4/2-E 4954770
## Monitoring.Location.Name
## 1 FREMONT RIVER AT SR-12 CROSSING AB CONFL W/ FISH CREEK
## 2 FREMONT RIVER AT SR-12 CROSSING AB CONFL W/ FISH CREEK
## 3 FREMONT RIVER AT CRNP CAMPGROUND AB ROAD XING AT FRUITA
## 4 FREMONT RIVER AT CRNP CAMPGROUND AB ROAD XING AT FRUITA
## 5 SULPHUR CREEK AB CONFL W/ FREMONT RIVER IN PICNIC AREA
## 6 SULPHUR CREEK AB CONFL W/ FREMONT RIVER IN PICNIC AREA
## Monitoring.Location.State Monitoring.Location.Latitude
## 1 Utah 38.26998
## 2 Utah 38.26998
## 3 Utah 38.28359
## 4 Utah 38.28359
## 5 Utah 38.28720
## 6 Utah 38.28720
## Monitoring.Location.Longitude Monitoring.Location.Type Activity.Type
## 1 -111.3755 River/Stream Sample-Routine
## 2 -111.3755 River/Stream Sample-Routine
## 3 -111.2479 River/Stream Sample-Routine
## 4 -111.2479 River/Stream Sample-Routine
## 5 -111.2477 River/Stream Sample-Routine
## 6 -111.2477 River/Stream Sample-Routine
## Activity.Start.Date Activity.Start.Time Project.ID1
## 1 2016-12-12 367.4583 302
## 2 2016-12-12 367.4583 302
## 3 2016-12-12 367.5069 302
## 4 2016-12-12 367.5069 302
## 5 2016-10-29 367.5347 302
## 6 2016-10-29 367.5347 302
## Activity.Conducting.Organization1 Relative.Depth Depth/Height
## 1 Division of Water Quality Surface NA
## 2 Division of Water Quality Surface NA
## 3 Division of Water Quality Surface NA
## 4 Division of Water Quality Surface NA
## 5 Division of Water Quality Surface NA
## 6 Division of Water Quality Surface NA
## Depth/Height.Unit Top.Depth/Height Top.Depth/Height.Unit
## 1 NA NA NA
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## Bottom.Depth/Height Bottom.Depth/Height.Measure.Unit Activity.Media
## 1 NA NA Water
## 2 NA NA Water
## 3 NA NA Water
## 4 NA NA Water
## 5 NA NA Water
## 6 NA NA Water
## Activity.Comment Sample.Collection.Method.ID
## 1 NA DWQ-001
## 2 NA DWQ-001
## 3 NA DWQ-001
## 4 NA DWQ-001
## 5 NA DWQ-001
## 6 NA DWQ-001
## Sample.Collection.Method.Context Sample.Collection.Method.Name
## 1 UTAHDWQ Water Grab Sampling
## 2 UTAHDWQ Water Grab Sampling
## 3 UTAHDWQ Water Grab Sampling
## 4 UTAHDWQ Water Grab Sampling
## 5 UTAHDWQ Water Grab Sampling
## 6 UTAHDWQ Water Grab Sampling
## Data.Logger.Line Characteristic.Name CAS.# Method.Speciation
## 1 NA Escherichia coli 68583-22-2 NA
## 2 NA Escherichia coli 68583-22-2 NA
## 3 NA Escherichia coli 68583-22-2 NA
## 4 NA Escherichia coli 68583-22-2 NA
## 5 NA Escherichia coli 68583-22-2 NA
## 6 NA Escherichia coli 68583-22-2 NA
## Sample.Fraction Detection.Condition Result.Qualifier Result.Value
## 1 NA <NA> <NA> 51.2
## 2 NA <NA> <NA> 51.2
## 3 NA <NA> <NA> 52.9
## 4 NA <NA> <NA> 52.9
## 5 NA <NA> <NA> 20.1
## 6 NA <NA> <NA> 20.1
## Result.Unit Result.Value.Type Result.Status Time.Basis Statistical.Base
## 1 MPN/100ml Calculated Accepted NA NA
## 2 MPN/100ml Calculated Accepted NA NA
## 3 MPN/100ml Calculated Accepted NA NA
## 4 MPN/100ml Calculated Accepted NA NA
## 5 MPN/100ml Calculated Accepted NA NA
## 6 MPN/100ml Calculated Accepted NA NA
## Statistic.N.Value Result.Comment Substance.Dilution.Factor1
## 1 NA NA 1
## 2 NA NA 1
## 3 NA NA 1
## 4 NA NA 1
## 5 NA NA 1
## 6 NA NA 1
## Laboratory.Comment.Code Detection.Limit.Type1 Detection.Limit.Value1
## 1 NA <NA> NA
## 2 NA <NA> NA
## 3 NA <NA> NA
## 4 NA <NA> NA
## 5 NA <NA> NA
## 6 NA <NA> NA
## Detection.Limit.Unit1 Detection.Limit.Type2 Detection.Limit.Value2
## 1 <NA> NA NA
## 2 <NA> NA NA
## 3 <NA> NA NA
## 4 <NA> NA NA
## 5 <NA> NA NA
## 6 <NA> NA NA
## Detection.Limit.Unit2 Precision Bias Confidence.Interval
## 1 NA NA NA NA
## 2 NA NA NA NA
## 3 NA NA NA NA
## 4 NA NA NA NA
## 5 NA NA NA NA
## 6 NA NA NA NA
## Upper.Confidence.Limit Lower.Confidence.Limit Analytical.Method.ID
## 1 NA NA COLILERT
## 2 NA NA COLILERT
## 3 NA NA COLILERT
## 4 NA NA COLILERT
## 5 NA NA COLILERT
## 6 NA NA COLILERT
## Analytical.Method.Context
## 1 IDEXX
## 2 IDEXX
## 3 IDEXX
## 4 IDEXX
## 5 IDEXX
## 6 IDEXX
## Analytical.Method.Name Laboratory.Name
## 1 Coliform/E. coli Enzyme substrate test; ONPG-MUG test NA
## 2 Coliform/E. coli Enzyme substrate test; ONPG-MUG test NA
## 3 Coliform/E. coli Enzyme substrate test; ONPG-MUG test NA
## 4 Coliform/E. coli Enzyme substrate test; ONPG-MUG test NA
## 5 Coliform/E. coli Enzyme substrate test; ONPG-MUG test NA
## 6 Coliform/E. coli Enzyme substrate test; ONPG-MUG test NA
## Analysis.Start.Date
## 1 NA
## 2 NA
## 3 NA
## 4 NA
## 5 NA
## 6 NA
if("Flow_data"%in%wb.dat$sheet_names){
flow.dat <- openxlsx::readWorkbook(wb.dat, sheet="Flow_data", startRow=1)
flow.dat$Activity.Start.Date <- as.Date(flow.dat$Activity.Start.Date, origin="1899-12-30")
if(length(unique(flow.dat$Result.Unit))>1){
stop("Multiple units found in flow data. Convert units before proceeding.")
}
flo.dat = TRUE
head(flow.dat)
}else{flo.dat=FALSE}
## Organization.ID Activity.ID Monitoring.Location.ID
## 1 UTAHDWQ_WQX CBI041513-4954360-0424-4-D 4954360
## 2 UTAHDWQ_WQX CBI082113-4954360-0828-4-D 4954360
## 3 UTAHDWQ_WQX NPSETAL060514-4954770-0611-4-D 4954770
## 4 UTAHDWQ_WQX NPSETAL051115-4954770-0512-4-D 4954770
## 5 UTAHDWQ_WQX NPSETAL060516-4954770-0614-4-D 4954770
## 6 UTAHDWQ_WQX NPSETAL041116-4954770-0411-4-D 4954770
## Monitoring.Location.Name
## 1 FREMONT R AT HICKMAN BRIDGE TRAILHEAD
## 2 FREMONT R AT HICKMAN BRIDGE TRAILHEAD
## 3 SULPHUR CREEK AB CONFL W/ FREMONT RIVER IN PICNIC AREA
## 4 SULPHUR CREEK AB CONFL W/ FREMONT RIVER IN PICNIC AREA
## 5 SULPHUR CREEK AB CONFL W/ FREMONT RIVER IN PICNIC AREA
## 6 SULPHUR CREEK AB CONFL W/ FREMONT RIVER IN PICNIC AREA
## Monitoring.Location.State Monitoring.Location.Latitude
## 1 Utah 38.28859
## 2 Utah 38.28859
## 3 Utah 38.28720
## 4 Utah 38.28720
## 5 Utah 38.28720
## 6 Utah 38.28720
## Monitoring.Location.Longitude Monitoring.Location.Type Activity.Type
## 1 -111.2352 River/Stream Field Msr/Obs
## 2 -111.2352 River/Stream Field Msr/Obs
## 3 -111.2477 River/Stream Field Msr/Obs
## 4 -111.2477 River/Stream Field Msr/Obs
## 5 -111.2477 River/Stream Field Msr/Obs
## 6 -111.2477 River/Stream Field Msr/Obs
## Activity.Start.Date Activity.Start.Time Project.ID1
## 1 2013-04-24 367.5889 856
## 2 2013-08-28 367.4229 302
## 3 2014-06-11 367.4792 306
## 4 2015-05-12 367.6354 306
## 5 2016-06-14 367.5312 306
## 6 2016-04-11 367.5486 306
## Activity.Conducting.Organization1 Relative.Depth Depth/Height
## 1 Division of Water Quality Surface NA
## 2 Division of Water Quality Surface NA
## 3 National Park Service Surface NA
## 4 National Park Service <NA> NA
## 5 National Park Service Surface NA
## 6 National Park Service Surface NA
## Depth/Height.Unit Top.Depth/Height Top.Depth/Height.Unit
## 1 NA NA NA
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## Bottom.Depth/Height Bottom.Depth/Height.Measure.Unit Activity.Media
## 1 NA NA Water
## 2 NA NA Water
## 3 NA NA Water
## 4 NA NA Water
## 5 NA NA Water
## 6 NA NA Water
## Activity.Comment Sample.Collection.Method.ID
## 1 <NA> DWQ-001
## 2 <NA> DWQ-001
## 3 <NA> DWQ-001
## 4 <NA> DWQ-001
## 5 <NA> DWQ-001
## 6 <NA> DWQ-001
## Sample.Collection.Method.Context Sample.Collection.Method.Name
## 1 UTAHDWQ Water Grab Sampling
## 2 UTAHDWQ Water Grab Sampling
## 3 UTAHDWQ Water Grab Sampling
## 4 UTAHDWQ Water Grab Sampling
## 5 UTAHDWQ Water Grab Sampling
## 6 UTAHDWQ Water Grab Sampling
## Data.Logger.Line Characteristic.Name CAS.# Method.Speciation
## 1 NA Flow NA NA
## 2 NA Flow NA NA
## 3 NA Flow NA NA
## 4 NA Flow NA NA
## 5 NA Flow NA NA
## 6 NA Flow NA NA
## Sample.Fraction Detection.Condition Result.Qualifier Result.Value
## 1 NA NA NA 46.7
## 2 NA NA NA 74.4
## 3 NA NA NA 7.72
## 4 NA NA NA 8.33
## 5 NA NA NA 9.01
## 6 NA NA NA 5.86
## Result.Unit Result.Value.Type Result.Status Time.Basis Statistical.Base
## 1 ft3/sec Actual Accepted NA NA
## 2 ft3/sec Actual Accepted NA NA
## 3 ft3/sec Actual Accepted NA NA
## 4 ft3/sec Actual Accepted NA NA
## 5 ft3/sec Actual Accepted NA NA
## 6 ft3/sec Actual Accepted NA NA
## Statistic.N.Value Result.Comment Substance.Dilution.Factor1
## 1 NA NA NA
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## Laboratory.Comment.Code Detection.Limit.Type1 Detection.Limit.Value1
## 1 NA NA NA
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## Detection.Limit.Unit1 Detection.Limit.Type2 Detection.Limit.Value2
## 1 NA NA NA
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## Detection.Limit.Unit2 Precision Bias Confidence.Interval
## 1 NA NA NA NA
## 2 NA NA NA NA
## 3 NA NA NA NA
## 4 NA NA NA NA
## 5 NA NA NA NA
## 6 NA NA NA NA
## Upper.Confidence.Limit Lower.Confidence.Limit Analytical.Method.ID
## 1 NA NA FIELD MEASURES
## 2 NA NA FIELD MEASURES
## 3 NA NA FIELD MEASURES
## 4 NA NA FIELD MEASURES
## 5 NA NA FIELD MEASURES
## 6 NA NA FIELD MEASURES
## Analytical.Method.Context Analytical.Method.Name
## 1 UTAHDWQ_WQX Field Measurements performed by Utah DWQ
## 2 UTAHDWQ_WQX Field Measurements performed by Utah DWQ
## 3 UTAHDWQ_WQX Field Measurements performed by Utah DWQ
## 4 UTAHDWQ_WQX Field Measurements performed by Utah DWQ
## 5 UTAHDWQ_WQX Field Measurements performed by Utah DWQ
## 6 UTAHDWQ_WQX Field Measurements performed by Utah DWQ
## Laboratory.Name Analysis.Start.Date
## 1 NA NA
## 2 NA NA
## 3 NA NA
## 4 NA NA
## 5 NA NA
## 6 NA NA
Then, we make some switches to ensure characters are converted to numeric for calculations, and rename column names so that flow columns and concentration columns remain separate when merging dataframes together.
### Convert any "<" to min and max detection limits
param.dat$Result.Value=gsub("<1",1,param.dat$Result.Value)
param.dat$Result.Value=gsub(">2419.6",2420,param.dat$Result.Value)
param.dat$Result.Value = as.numeric(param.dat$Result.Value)
#param.dat = param.dat[!is.na(param.dat$Result.Value),]
names(param.dat)[names(param.dat)=="Characteristic.Name"] = "Parameter.Name"
names(param.dat)[names(param.dat)=="Result.Value"] = "Parameter.Value"
names(param.dat)[names(param.dat)=="Result.Unit"] = "Parameter.Unit"
In AWQMS, if concentrations occur below or above detection limits, their result value is reported in another column (Detection.Limit.Value1), so we will substitute the detection limits into the parameter value column
# Fill in concentrations present at detection limits
param.dat$Parameter.Value = ifelse(is.na(param.dat$Parameter.Value),param.dat$Detection.Limit.Value1,param.dat$Parameter.Value)
Then, we use the aggregating function supplied in the Inputs tab to aggregate to daily values by site, date, and parameter.
param.day.mean <- aggregate(Parameter.Value~Activity.Start.Date+Monitoring.Location.ID+Monitoring.Location.Name+Parameter.Name+Parameter.Unit, data=param.dat, FUN=aggFun)
names(param.day.mean)[names(param.day.mean)=="Parameter.Value"] <- "Parameter.Value_Mean"
head(param.day.mean)
## Activity.Start.Date Monitoring.Location.ID
## 1 2016-10-29 4954680
## 2 2016-12-12 4954680
## 3 2017-01-11 4954680
## 4 2017-03-23 4954680
## 5 2017-04-27 4954680
## 6 2017-05-25 4954680
## Monitoring.Location.Name Parameter.Name Parameter.Unit
## 1 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 2 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 3 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 4 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 5 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 6 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## Parameter.Value_Mean
## 1 706.852799
## 2 30.106312
## 3 49.483331
## 4 255.666247
## 5 6.873864
## 6 4.014972
The next steps determine which concentrations fall within each calendar season, recreation season, and irrigation season.
# Determine calendar season for LDC - taken from https://stackoverflow.com/questions/9500114/find-which-season-a-particular-date-belongs-to
getSeason <- function(DATES) {
WS <- as.Date("2012-12-15", format = "%Y-%m-%d") # Winter Solstice
SE <- as.Date("2012-3-15", format = "%Y-%m-%d") # Spring Equinox
SS <- as.Date("2012-6-15", format = "%Y-%m-%d") # Summer Solstice
FE <- as.Date("2012-9-15", format = "%Y-%m-%d") # Fall Equinox
# Convert dates from any year to 2012 dates
d <- as.Date(strftime(DATES, format="2012-%m-%d"))
ifelse (d >= WS | d < SE, "Winter",
ifelse (d >= SE & d < SS, "Spring",
ifelse (d >= SS & d < FE, "Summer", "Fall")))}
param.day.mean$CalSeason <- getSeason(param.day.mean$Activity.Start.Date)
# Determine if each point falls within the rec season
#rec_ssn1 = yday(as.Date(rec_ssn, "%m-%d"))
param.day.mean$Rec_Season = ifelse(yday(param.day.mean$Activity.Start.Date)>=rec_ssn[1]&yday(param.day.mean$Activity.Start.Date)<=rec_ssn[2],"Rec Season","Not Rec Season")
# Determine if each point falls within the irrigation season
#irg_ssn1 = yday(as.Date(irg_ssn, "%m-%d"))
param.day.mean$Irg_Season = ifelse(yday(param.day.mean$Activity.Start.Date)>=irg_ssn[1]&yday(param.day.mean$Activity.Start.Date)<=irg_ssn[2],"Irrigation Season","Not Irrigation Season")
# Add to list
calcs$Daily_Mean_Data <- param.day.mean
head(param.day.mean)
## Activity.Start.Date Monitoring.Location.ID
## 1 2016-10-29 4954680
## 2 2016-12-12 4954680
## 3 2017-01-11 4954680
## 4 2017-03-23 4954680
## 5 2017-04-27 4954680
## 6 2017-05-25 4954680
## Monitoring.Location.Name Parameter.Name Parameter.Unit
## 1 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 2 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 3 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 4 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 5 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## 6 FISH CK @ COUNTY RD XING TO TEASDALE Escherichia coli MPN/100ml
## Parameter.Value_Mean CalSeason Rec_Season Irg_Season
## 1 706.852799 Fall Rec Season Not Irrigation Season
## 2 30.106312 Fall Not Rec Season Not Irrigation Season
## 3 49.483331 Winter Not Rec Season Not Irrigation Season
## 4 255.666247 Spring Not Rec Season Not Irrigation Season
## 5 6.873864 Spring Not Rec Season Not Irrigation Season
## 6 4.014972 Spring Rec Season Irrigation Season
Finally, the heavy lifting begins with loading calculations (if flow data are present). After the function creates the loading dataset, it then aggregates loading (usually means) by month (across ALL data), and different seasons. Ncount for each site is also calculated by month, rec season, and irrigation season.
if(flo.dat){
# Add to list
calcs$Flow_data <- flow.dat
flow.dat = flow.dat[,c("Activity.Start.Date","Monitoring.Location.ID","Characteristic.Name","Result.Value","Result.Unit")]
names(flow.dat)[names(flow.dat)=="Result.Value"] = "Flow.Value"
flow.dat$Flow.Value = as.numeric(flow.dat$Flow.Value)
names(flow.dat)[names(flow.dat)=="Result.Unit"] = "Flow.Unit"
flow.dat.mean <- aggregate(Flow.Value~Activity.Start.Date+Monitoring.Location.ID+Characteristic.Name+Flow.Unit, data=flow.dat, FUN=mean)
## Create loading dataset
param.flow.dat <- merge(flow.dat.mean,param.day.mean, all.x=TRUE)
head(param.flow.dat)
param.flow.dat$TMDL <- ((param.flow.dat$Flow.Value*geom_crit*cf)*(1-mos))/1000000000
units = sub("\\/.*", "", param.day.mean$Parameter.Unit[1])
param.flow.dat$Units = paste0("Giga",units,"/day")
param.flow.dat$Observed_Loading <- (param.flow.dat$Flow.Value*param.flow.dat$Parameter.Value_Mean*cf)/1000000000
param.flow.dat$Exceeds <- ifelse(param.flow.dat$Observed_Loading>param.flow.dat$TMDL,"yes","no")
# Flow Percentile calc function
ldc_func <- function(x){
x$Flow_Percentile = flow_perc(x$Flow.Value)
out = x
return(out)
}
# Apply percentile calc to data
ldc.dat <- plyr::ddply(.data=param.flow.dat, .(Monitoring.Location.ID), .fun=ldc_func)
# Add to list
calcs$LDC_Data <- ldc.dat
head(ldc.dat)
# Continue forward with loadings only for records with Result.Value concentrations
param.ldc <- ldc.dat[!is.na(ldc.dat$Parameter.Value_Mean),]
head(param.ldc)
## Loading by month ##
param.ldc$month <- month(param.ldc$Activity.Start.Date, label=TRUE)
ol_mo <- aggregate(Observed_Loading~month+Monitoring.Location.ID, dat=param.ldc, FUN=aggFun)
tmdl_mo <- aggregate(TMDL~month+Monitoring.Location.ID, dat=param.ldc, FUN=aggFun)
n_mo <- aggregate(TMDL~month+Monitoring.Location.ID, dat = param.ldc, FUN=length)
names(n_mo)[names(n_mo)=="TMDL"]<- "Ncount_mo_L"
mo_load <- merge(ol_mo,tmdl_mo, all=TRUE)
mo_load <- merge(mo_load, n_mo, all=TRUE)
mo_load <- mo_load[order(mo_load$month),]
mo_load$Percent_Reduction_L <- ifelse(mo_load$Observed_Loading>mo_load$TMDL,round(perc.red(mo_load$TMDL,mo_load$Observed_Loading), digits=0),0)
mo_load
## Loading by rec season ##
param.ldc$Year = year(param.ldc$Activity.Start.Date)
ol_rec <- aggregate(Observed_Loading~Year+Monitoring.Location.ID+Rec_Season, dat=param.ldc, FUN=aggFun)
tmdl_rec <- aggregate(TMDL~Year+Monitoring.Location.ID+Rec_Season, dat=param.ldc, FUN=aggFun)
n_rec <- aggregate(TMDL~Year+Monitoring.Location.ID+Rec_Season, dat = param.ldc, FUN=length)
names(n_rec)[names(n_rec)=="TMDL"]<- "Ncount_rec_L"
rec_load <- merge(ol_rec,tmdl_rec, all=TRUE)
rec_load <- merge(rec_load, n_rec, all= TRUE)
rec_load$Percent_Reduction_L <- ifelse(rec_load$Observed_Loading>rec_load$TMDL,round(perc.red(rec_load$TMDL,rec_load$Observed_Loading), digits=0),0)
rec_load
## Loading by irrigation season ##
ol_irg <- aggregate(Observed_Loading~Year+Monitoring.Location.ID+Irg_Season, dat=param.ldc, FUN=aggFun)
tmdl_irg <- aggregate(TMDL~Year+Monitoring.Location.ID+Irg_Season, dat=param.ldc, FUN=aggFun)
n_irg <- aggregate(TMDL~Year+Monitoring.Location.ID+Irg_Season, dat = param.ldc, FUN=length)
names(n_irg)[names(n_irg)=="TMDL"]<- "Ncount_irg_L"
irg_load <- merge(ol_irg,tmdl_irg, all=TRUE)
irg_load <- merge(irg_load, n_irg)
irg_load$Percent_Reduction_L <- ifelse(irg_load$Observed_Loading>irg_load$TMDL,round(perc.red(irg_load$TMDL,irg_load$Observed_Loading), digits=0),0)
}else{mo_load = data.frame(Monitoring.Location.ID=unique(param.day.mean$Monitoring.Location.ID))
rec_load = data.frame(Monitoring.Location.ID=unique(param.day.mean$Monitoring.Location.ID))
irg_load = data.frame(Monitoring.Location.ID=unique(param.day.mean$Monitoring.Location.ID))}
I wanted to make this function flexible to take data with and without flow, so the next section of the function calculates monthly means, rec/non-rec means, and irrigation/non-irrigation means on concentration data.
## Concentration by month ##
param.day.mean$month <- lubridate::month(param.day.mean$Activity.Start.Date, label=TRUE)
concen_mo <- aggregate(Parameter.Value_Mean~month+Monitoring.Location.ID, dat=param.day.mean, FUN=aggFun)
concen_mo_n <- aggregate(Parameter.Value_Mean~month+Monitoring.Location.ID, dat=param.day.mean, FUN=length)
names(concen_mo_n)[names(concen_mo_n)=="Parameter.Value_Mean"] <- "Ncount_mo_C"
concen_mo = merge(concen_mo, concen_mo_n, all=TRUE)
concen_mo$Percent_Reduction_C <- ifelse(concen_mo$Parameter.Value_Mean>geom_crit,round(perc.red(geom_crit,concen_mo$Parameter.Value_Mean), digits=0),0)
concen_mo
## month Monitoring.Location.ID Parameter.Value_Mean Ncount_mo_C
## 1 Apr 4954360 20.966991 2
## 2 Apr 4954380 170.042305 2
## 3 Apr 4954390 51.571737 2
## 4 Apr 4954480 14.181071 2
## 5 Apr 4954482 16.865010 2
## 6 Apr 4954680 3.121760 2
## 7 Apr 4954770 28.157992 3
## 8 Apr 4954772 107.680333 2
## 9 Apr 4955310 127.948034 2
## 10 Apr 4955330 369.193277 2
## 11 Aug 4954360 317.281736 1
## 12 Aug 4954380 125.449193 1
## 13 Aug 4954390 233.311873 1
## 14 Aug 4954480 165.185350 1
## 15 Aug 4954482 67.952042 1
## 16 Aug 4954680 40.516416 1
## 17 Aug 4954770 181.847500 2
## 18 Aug 4954772 97.791615 1
## 19 Aug 4955310 547.500000 1
## 20 Aug 4955330 96.159451 1
## 21 Dec 4954360 21.646917 2
## 22 Dec 4954380 28.027733 2
## 23 Dec 4954390 26.633883 2
## 24 Dec 4954480 25.488152 2
## 25 Dec 4954482 22.782952 2
## 26 Dec 4954680 6.541338 2
## 27 Dec 4954770 16.750983 2
## 28 Dec 4954772 26.231547 2
## 29 Dec 4955310 238.799406 2
## 30 Dec 4955330 56.031652 2
## 31 Feb 4954360 35.219182 1
## 32 Feb 4954380 13.759437 1
## 33 Feb 4954390 27.832844 1
## 34 Feb 4954480 31.843657 1
## 35 Feb 4954482 31.943178 1
## 36 Feb 4954680 1.000000 1
## 37 Feb 4954770 2.486202 1
## 38 Feb 4954772 2.480040 1
## 39 Feb 4955310 117.582483 1
## 40 Feb 4955330 112.926607 1
## 41 Jan 4954360 18.291263 2
## 42 Jan 4954380 42.210978 2
## 43 Jan 4954390 22.715387 2
## 44 Jan 4954480 29.081330 2
## 45 Jan 4954482 29.882240 2
## 46 Jan 4954680 9.303788 2
## 47 Jan 4954770 9.938657 2
## 48 Jan 4954772 28.602744 2
## 49 Jan 4955310 293.273052 2
## 50 Jan 4955330 177.532043 2
## 51 Jul 4954360 479.598200 2
## 52 Jul 4954380 356.117757 2
## 53 Jul 4954390 1220.698584 2
## 54 Jul 4954480 483.086599 2
## 55 Jul 4954482 573.439904 2
## 56 Jul 4954680 154.526731 2
## 57 Jul 4954770 207.602690 3
## 58 Jul 4954772 120.676082 2
## 59 Jul 4955310 1595.930659 2
## 60 Jul 4955330 1373.768681 2
## 61 Jun 4954360 145.881060 2
## 62 Jun 4954380 234.323704 2
## 63 Jun 4954390 224.485557 2
## 64 Jun 4954480 177.820849 2
## 65 Jun 4954482 81.520879 2
## 66 Jun 4954680 22.712134 2
## 67 Jun 4954770 334.741907 3
## 68 Jun 4954772 16.734573 2
## 69 Jun 4955310 906.907569 2
## 70 Jun 4955330 1012.779953 1
## 71 Mar 4954360 40.330491 2
## 72 Mar 4954380 123.984783 2
## 73 Mar 4954390 62.000722 2
## 74 Mar 4954480 22.512135 2
## 75 Mar 4954482 51.038278 2
## 76 Mar 4954680 30.147727 2
## 77 Mar 4954770 16.280452 2
## 78 Mar 4954772 23.857038 2
## 79 Mar 4955310 159.434945 2
## 80 Mar 4955330 620.248501 2
## 81 May 4954360 33.965495 2
## 82 May 4954380 116.845018 2
## 83 May 4954390 65.508336 2
## 84 May 4954480 10.816740 2
## 85 May 4954482 8.311468 2
## 86 May 4954680 2.003739 2
## 87 May 4954770 55.573618 3
## 88 May 4954772 33.756613 2
## 89 May 4955310 42.272456 2
## 90 May 4955330 68.481464 2
## 91 Nov 4954360 52.385907 1
## 92 Nov 4954380 84.742471 1
## 93 Nov 4954390 78.453580 1
## 94 Nov 4954480 55.009795 1
## 95 Nov 4954482 51.605506 1
## 96 Nov 4954680 1.749286 1
## 97 Nov 4954770 79.633695 2
## 98 Nov 4954772 98.528651 1
## 99 Nov 4955310 325.663986 1
## 100 Nov 4955330 79.404220 1
## 101 Oct 4954360 65.817932 2
## 102 Oct 4954380 52.258838 2
## 103 Oct 4954390 64.506835 2
## 104 Oct 4954480 64.202463 2
## 105 Oct 4954482 59.230708 2
## 106 Oct 4954680 59.273278 2
## 107 Oct 4954770 53.600544 3
## 108 Oct 4954772 42.983857 2
## 109 Oct 4955310 278.757348 2
## 110 Oct 4955330 266.591947 2
## 111 Sep 4954360 112.618826 1
## 112 Sep 4954380 115.730981 1
## 113 Sep 4954390 123.652578 1
## 114 Sep 4954480 74.649581 1
## 115 Sep 4954482 86.540742 1
## 116 Sep 4954680 75.217019 1
## 117 Sep 4954770 152.103681 2
## 118 Sep 4954772 219.443615 1
## 119 Sep 4955310 1052.615143 1
## 120 Sep 4955330 112.837937 1
## Percent_Reduction_C
## 1 0
## 2 26
## 3 0
## 4 0
## 5 0
## 6 0
## 7 0
## 8 0
## 9 2
## 10 66
## 11 60
## 12 0
## 13 46
## 14 24
## 15 0
## 16 0
## 17 31
## 18 0
## 19 77
## 20 0
## 21 0
## 22 0
## 23 0
## 24 0
## 25 0
## 26 0
## 27 0
## 28 0
## 29 47
## 30 0
## 31 0
## 32 0
## 33 0
## 34 0
## 35 0
## 36 0
## 37 0
## 38 0
## 39 0
## 40 0
## 41 0
## 42 0
## 43 0
## 44 0
## 45 0
## 46 0
## 47 0
## 48 0
## 49 57
## 50 29
## 51 74
## 52 65
## 53 90
## 54 74
## 55 78
## 56 18
## 57 39
## 58 0
## 59 92
## 60 91
## 61 14
## 62 46
## 63 44
## 64 29
## 65 0
## 66 0
## 67 62
## 68 0
## 69 86
## 70 88
## 71 0
## 72 0
## 73 0
## 74 0
## 75 0
## 76 0
## 77 0
## 78 0
## 79 21
## 80 80
## 81 0
## 82 0
## 83 0
## 84 0
## 85 0
## 86 0
## 87 0
## 88 0
## 89 0
## 90 0
## 91 0
## 92 0
## 93 0
## 94 0
## 95 0
## 96 0
## 97 0
## 98 0
## 99 61
## 100 0
## 101 0
## 102 0
## 103 0
## 104 0
## 105 0
## 106 0
## 107 0
## 108 0
## 109 55
## 110 53
## 111 0
## 112 0
## 113 0
## 114 0
## 115 0
## 116 0
## 117 17
## 118 43
## 119 88
## 120 0
## Concentration by rec season ##
param.day.mean$Year <- lubridate::year(param.day.mean$Activity.Start.Date)
concen_rec <- aggregate(Parameter.Value_Mean~Rec_Season+Monitoring.Location.ID+Year, dat=param.day.mean, FUN=aggFun)
concen_rec_n <- aggregate(Parameter.Value_Mean~Rec_Season+Monitoring.Location.ID+Year, dat=param.day.mean, FUN=length)
names(concen_rec_n)[names(concen_rec_n)=="Parameter.Value_Mean"] <- "Ncount_rec_C"
concen_rec = merge(concen_rec, concen_rec_n, all=TRUE)
concen_rec$Percent_Reduction_C <- ifelse(concen_rec$Parameter.Value_Mean>geom_crit,round(perc.red(geom_crit,concen_rec$Parameter.Value_Mean), digits=0),0)
concen_rec
## Rec_Season Monitoring.Location.ID Year Parameter.Value_Mean
## 1 Not Rec Season 4954360 2016 52.007692
## 2 Not Rec Season 4954360 2017 37.906285
## 3 Not Rec Season 4954360 2018 15.014027
## 4 Not Rec Season 4954380 2016 30.063932
## 5 Not Rec Season 4954380 2017 77.984348
## 6 Not Rec Season 4954380 2018 53.780405
## 7 Not Rec Season 4954390 2016 45.198230
## 8 Not Rec Season 4954390 2017 43.491037
## 9 Not Rec Season 4954390 2018 32.831754
## 10 Not Rec Season 4954480 2016 48.790368
## 11 Not Rec Season 4954480 2017 31.669349
## 12 Not Rec Season 4954480 2018 15.849752
## 13 Not Rec Season 4954482 2016 46.008695
## 14 Not Rec Season 4954482 2017 39.723322
## 15 Not Rec Season 4954482 2018 18.780385
## 16 Not Rec Season 4954680 2016 30.106312
## 17 Not Rec Season 4954680 2017 11.667396
## 18 Not Rec Season 4954680 2018 1.723152
## 19 Not Rec Season 4954770 2016 47.256316
## 20 Not Rec Season 4954770 2017 33.225625
## 21 Not Rec Season 4954770 2018 5.416408
## 22 Not Rec Season 4954772 2016 87.928039
## 23 Not Rec Season 4954772 2017 50.178924
## 24 Not Rec Season 4954772 2018 13.421902
## 25 Not Rec Season 4955310 2016 348.097745
## 26 Not Rec Season 4955310 2017 243.574459
## 27 Not Rec Season 4955310 2018 127.210914
## 28 Not Rec Season 4955330 2016 26.200000
## 29 Not Rec Season 4955330 2017 327.828292
## 30 Not Rec Season 4955330 2018 147.160565
## 31 Rec Season 4954360 2016 51.660043
## 32 Rec Season 4954360 2017 166.682578
## 33 Rec Season 4954360 2018 92.404355
## 34 Rec Season 4954380 2016 36.400000
## 35 Rec Season 4954380 2017 143.342440
## 36 Rec Season 4954380 2018 228.547070
## 37 Rec Season 4954390 2016 39.470495
## 38 Rec Season 4954390 2017 204.422711
## 39 Rec Season 4954390 2018 237.700605
## 40 Rec Season 4954480 2016 59.652577
## 41 Rec Season 4954480 2017 120.705219
## 42 Rec Season 4954480 2018 61.959555
## 43 Rec Season 4954482 2016 42.530577
## 44 Rec Season 4954482 2017 87.323200
## 45 Rec Season 4954482 2018 54.865873
## 46 Rec Season 4954680 2016 706.852799
## 47 Rec Season 4954680 2017 24.670315
## 48 Rec Season 4954680 2018 14.922142
## 49 Rec Season 4954770 2016 41.470409
## 50 Rec Season 4954770 2017 162.766889
## 51 Rec Season 4954770 2018 122.926222
## 52 Rec Season 4954772 2016 17.879597
## 53 Rec Season 4954772 2017 58.074308
## 54 Rec Season 4954772 2018 64.523946
## 55 Rec Season 4955310 2016 225.302597
## 56 Rec Season 4955310 2017 658.593044
## 57 Rec Season 4955310 2018 208.914605
## 58 Rec Season 4955330 2016 178.229795
## 59 Rec Season 4955330 2017 249.114442
## 60 Rec Season 4955330 2018 343.203395
## Ncount_rec_C Percent_Reduction_C
## 1 1 0
## 2 5 0
## 3 4 0
## 4 1 0
## 5 5 0
## 6 4 0
## 7 1 0
## 8 5 0
## 9 4 0
## 10 1 0
## 11 5 0
## 12 4 0
## 13 1 0
## 14 5 0
## 15 4 0
## 16 1 0
## 17 5 0
## 18 4 0
## 19 2 0
## 20 6 0
## 21 4 0
## 22 1 0
## 23 5 0
## 24 4 0
## 25 1 64
## 26 5 48
## 27 4 1
## 28 1 0
## 29 5 62
## 30 4 14
## 31 1 0
## 32 6 24
## 33 3 0
## 34 1 0
## 35 6 12
## 36 3 45
## 37 1 0
## 38 6 38
## 39 3 47
## 40 1 0
## 41 6 0
## 42 3 0
## 43 1 0
## 44 6 0
## 45 3 0
## 46 1 82
## 47 6 0
## 48 3 0
## 49 2 0
## 50 11 23
## 51 3 0
## 52 1 0
## 53 6 0
## 54 3 0
## 55 1 44
## 56 6 81
## 57 3 40
## 58 1 29
## 59 5 49
## 60 3 63
## Concentration by irrigation season ##
concen_irg <- aggregate(Parameter.Value_Mean~Irg_Season+Monitoring.Location.ID+Year, dat=param.day.mean, FUN=aggFun)
concen_irg_n <- aggregate(Parameter.Value_Mean~Irg_Season+Monitoring.Location.ID+Year, dat=param.day.mean, FUN=length)
names(concen_irg_n)[names(concen_irg_n)=="Parameter.Value_Mean"] <- "Ncount_irg_C"
concen_irg = merge(concen_irg, concen_irg_n, all=TRUE)
concen_irg$Percent_Reduction_C <- ifelse(concen_irg$Parameter.Value_Mean>geom_crit,round(perc.red(geom_crit,concen_irg$Parameter.Value_Mean), digits=0),0)
At this point, if loading data also exists, then these aggregated values will be merged to the loading values calculated above.
# Merge monthly data and write to new datasheet
month.dat = merge(concen_mo,mo_load, all=TRUE)
# Add to list
calcs$Monthly_Data <- month.dat
# Merge rec season data and write to new datasheet
rec.dat = merge(rec_load,concen_rec, all=TRUE)
# Add to list
calcs$Rec_Season_Data <- rec.dat
# Merge irrigation season data and write to new datasheet
irg.dat = merge(irg_load, concen_irg, all=TRUE)
# Add to list
calcs$Irg_Season_Data <- irg.dat
And finally, if you are using tmdlCalcs as a standalone function, you have the option to export the workbook of objects created by tmdlCalcs.
############################ SAVE WORKBOOK FILE WITH NEW SHEETS #########################
if(exportfromfunc){
# Create workbooks for each data frame
# Daily means
if(!any(wb.dat$sheet_names=="Daily_Mean_Data")){
addWorksheet(wb.dat, "Daily_Mean_Data", gridLines = TRUE)
writeData(wb.dat, sheet = "Daily_Mean_Data", param.day.mean, rowNames = FALSE)}
# Monthly means
if(!any(wb.dat$sheet_names=="Monthly_Data")){
addWorksheet(wb.dat, "Monthly_Data", gridLines = TRUE)
writeData(wb.dat, sheet = "Monthly_Data", month.dat, rowNames = FALSE)}
# Rec Season means
if(!any(wb.dat$sheet_names=="Rec_Season_Data")){
addWorksheet(wb.dat, "Rec_Season_Data", gridLines = TRUE)
writeData(wb.dat, sheet = "Rec_Season_Data", rec.dat, rowNames = FALSE)}
# Irrigation Season means
if(!any(wb.dat$sheet_names=="Irg_Season_Data")){
addWorksheet(wb.dat, "Irg_Season_Data", gridLines = TRUE)
writeData(wb.dat, sheet = "Irg_Season_Data", irg.dat, rowNames = FALSE)}
if(flo.dat){
if(!any(wb.dat$sheet_names=="LDC_Data")){
addWorksheet(wb.dat, "LDC_Data", gridLines = TRUE)
writeData(wb.dat, sheet = "LDC_Data", ldc.dat, rowNames = FALSE)}
}
saveWorkbook(wb.dat, paste0(unlist(strsplit(wb_path,".xlsx")),"_",Sys.Date(),".xlsx"), overwrite = TRUE)}
The function also returns the list of the objects created in R, such that the proper syntax for using the function is:
Here’s what’s happening with the load duration curve in the shiny app:
output$LDC <- renderPlot({
req(input$pt_type)
req(input$ldcsite)
ldcdata = workbook$LDC_Data
x = ldcdata[ldcdata$Monitoring.Location.ID==input$ldcsite,]
flow.plot <- x[order(x$Flow_Percentile),]
flow.plot$Regime = "Low"
flow.plot$Regime[flow.plot$Flow_Percentile<=90] = "Dry"
flow.plot$Regime[flow.plot$Flow_Percentile<=60] = "Mid"
flow.plot$Regime[flow.plot$Flow_Percentile<=40] = "Moist"
flow.plot$Regime[flow.plot$Flow_Percentile<=10] = "High"
exc_regime <- function(x){
x = x[!is.na(x$Observed_Loading),]
exceed = round(length(x$Observed_Loading[x$Observed_Loading>x$TMDL])/length(x$Observed_Loading)*100, digits = 0)
}
flow_exc = plyr::ddply(.data = flow.plot, .variables = c("Regime"), .fun = exc_regime)
names(flow_exc)[names(flow_exc)=="V1"] = "Percent Exceedance"
flow_exc$`Percent Exceedance` = paste(flow_exc$`Percent Exceedance`,"%")
# Pull out observed loadings (E.coli data)
param.loads <- x[!is.na(x$Parameter.Value_Mean),]
plot(1, type="n", xlab="Flow Exceedance Percentile", ylab="Load (GigaMPN/day)", xlim=c(0, 100), ylim=c(0,max(c(param.loads$Observed_Loading, param.loads$TMDL))), main=paste("Load Duration Curve:",x$Monitoring.Location.ID[1]))
abline(v=10, lty=2)
abline(v=40, lty=2)
abline(v=60, lty=2)
abline(v=90, lty=2)
text(5, max(param.loads$Observed_Loading)-.3*max(param.loads$Observed_Loading),paste0("High \n Flows \n (",flow_exc$`Percent Exceedance`[flow_exc$Regime=="High"],")"))
text(25, max(param.loads$Observed_Loading)-.3*max(param.loads$Observed_Loading), paste0("Moist \n Conditions \n (",flow_exc$`Percent Exceedance`[flow_exc$Regime=="Moist"],")"))
text(50, max(param.loads$Observed_Loading)-.3*max(param.loads$Observed_Loading), paste0("Mid-Range \n Flows \n (",flow_exc$`Percent Exceedance`[flow_exc$Regime=="Mid"],")"))
text(75, max(param.loads$Observed_Loading)-.3*max(param.loads$Observed_Loading), paste0("Dry \n Conditions \n (",flow_exc$`Percent Exceedance`[flow_exc$Regime=="Dry"],")"))
text(95, max(param.loads$Observed_Loading)-.3*max(param.loads$Observed_Loading),paste0("Low \n Flows \n (",flow_exc$`Percent Exceedance`[flow_exc$Regime=="Low"],")"))
lines(flow.plot$TMDL~flow.plot$Flow_Percentile, col="firebrick3", lwd=2)
# Plot types
if(input$ldc_type == "Scatterplot"){
if(input$pt_type=="Calendar Seasons"){
colpal <- colorspace::sequential_hcl(4)
wine <- param.loads[param.loads$CalSeason=="Winter",]
spre <- param.loads[param.loads$CalSeason=="Spring",]
sume <- param.loads[param.loads$CalSeason=="Summer",]
fale <- param.loads[param.loads$CalSeason=="Fall",]
points(wine$Observed_Loading~wine$Flow_Percentile, pch=21, col="black", bg=colpal[4], cex=2)
points(spre$Observed_Loading~spre$Flow_Percentile, pch=21, col="black", bg=colpal[3], cex=2)
points(sume$Observed_Loading~sume$Flow_Percentile, pch=21, col="black", bg=colpal[2], cex=2)
points(fale$Observed_Loading~fale$Flow_Percentile, pch=21, col="black", bg=colpal[1], cex=2)
legend("topright",legend=c("TMDL", "Loading - Winter", "Loading - Spring", "Loading - Summer","Loading - Fall"), bty="n", col=c("firebrick3","black","black","black","black"), lty=c(1,NA,NA,NA,NA),lwd=c(2,NA,NA,NA,NA),pch=c(NA,21,21,21,21), pt.bg=c(NA,colpal[4],colpal[3],colpal[2],colpal[1]), pt.cex=c(NA,2,2,2,2),cex=1)
}
# Point colors
if(input$pt_type=="Recreation Seasons"){
colpal <- colorspace::rainbow_hcl(2)
rec <- param.loads[param.loads$Rec_Season=="Rec Season",]
nonrec <- param.loads[param.loads$Rec_Season=="Not Rec Season",]
points(rec$Observed_Loading~rec$Flow_Percentile, pch=21, col="black", bg=colpal[1], cex=2)
points(nonrec$Observed_Loading~nonrec$Flow_Percentile, pch=21, col="black", bg=colpal[2], cex=2)
legend("topright",legend=c("TMDL","Loading - Rec", "Loading - Non-Rec"), bty="n", col=c("firebrick3","black","black"), lty=c(1,NA,NA),lwd=c(2,NA,NA),pch=c(NA,21,21), pt.bg=c(NA,colpal), pt.cex=c(NA,2,2),cex=1)
}
if(input$pt_type=="Irrigation Seasons"){
colpal <- colorspace::terrain_hcl(2)
irg <- param.loads[param.loads$Irg_Season=="Irrigation Season",]
nonirg <- param.loads[param.loads$Irg_Season=="Not Irrigation Season",]
points(irg$Observed_Loading~irg$Flow_Percentile, pch=21, col="black", bg=colpal[1], cex=2)
points(nonirg$Observed_Loading~nonirg$Flow_Percentile, pch=21, col="black", bg=colpal[2], cex=2)
legend("topright",legend=c("TMDL", "Loading - Irrigation", "Loading - No Irrigation"), bty="n", col=c("firebrick3","black","black"), lty=c(1,NA,NA),lwd=c(2,NA,NA),pch=c(NA,21,21), pt.bg=c(NA,colpal), pt.cex=c(NA,2,2),cex=1)
}
}else{
colpal <- colorspace::sequential_hcl(4)
# Add boxplots
param.loads$Flow_Cat = "High"
param.loads$Flow_Cat[param.loads$Flow_Percentile>10¶m.loads$Flow_Percentile<=40] = "Moist"
param.loads$Flow_Cat[param.loads$Flow_Percentile>40¶m.loads$Flow_Percentile<=60] = "MidRange"
param.loads$Flow_Cat[param.loads$Flow_Percentile>60¶m.loads$Flow_Percentile<=90] = "Dry"
param.loads$Flow_Cat[param.loads$Flow_Percentile>90¶m.loads$Flow_Percentile<=100] = "Low"
param.loads$Flow_Cat = factor(param.loads$Flow_Cat, levels= c("High","Moist", "MidRange","Dry","Low"))
boxplot(param.loads$Observed_Loading~param.loads$Flow_Cat, col=ggplot2::alpha(boxcolors[2],0.7), at = c(5,25,50,75,95),lty=1, xaxt="n", frame=FALSE, boxwex = 5, add=TRUE)
legend("topright",legend=c("TMDL", "Observed Loading"), bty="n", col=c("firebrick3","black"), lty=c(1,NA),lwd=c(2,NA),pch=c(NA,22), pt.bg=c(NA,ggplot2::alpha(boxcolors[2],0.7)), pt.cex=c(NA,2),cex=1)
}
})
output$LDC_Data <- renderDT(workbook$LDC_Data,
rownames = FALSE,selection='none',filter="top",
options = list(scrollY = '300px', paging = FALSE, scrollX=TRUE))
}