Displaying differences for changeset
 
display as  

src/java/cfa/CDPHE_lowFlowStats.java

@@ -7,10 +7,11 @@
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Calendar;
 import java.util.Date;
 
 /**
-* Last Updated: 16-December-2014
+* Last Updated: 6-January-2015
 * @author Tyler Wible
 * @since 16-December-2014
 */
@@ -20,7 +21,7 @@
         return "cdphe_lowflow.csv";
     }
     /**
-     * Calculate the CDPHE 'extreme value' design flow (see DFLOW user manual) 
+     * Calculate the CDPHE "extreme value" design flow (see DFLOW user manual) 
      * based on an 'm'-day average low flow of each year fitted to a Log-Pearson
      * Type III distribution and interpolated for a return period of 'R' years
      * @param mainFolder  the file path where the summary will be saved
@@ -148,7 +149,229 @@
         return designFlow;
     }
     /**
-     * Calculate the CDPHE 'human health' design flow (see DFLOW user manual) 
+     * Calculate the CDPHE "biologically-based" design flow (see DFLOW user manual) 
+     * which is an 'm'-day harmonic average low flow based on a certain excursion count (exceedance?)
+     * @param mainFolder  the file path where the summary will be saved
+     * @param stationID  the station ID for the current station, used to label the output
+     * @param stationName  the station name for the current station, used to label the  output
+     * @param flowData  flow data, column1 = dates (format yyyy-mm-dd), column2 = flow values
+     * @param m  the number of days to average for annual low flow analysis (m-day average)
+     * @param R  the desired return period of the m-day low flow (in years)
+     * @param clusterLength  the length of time for the definition of a 'cluster' of excursion 
+     * periods (default is 120). All excursion periods within this amount of time of each other 
+     * will be counted, subject to the clusterCountMax below.
+     * @param clusterCountMax  the upper count limit on how many excursion periods will be 
+     * counted within an excursion cluster (default is 5)
+     * @return
+     * @throws IOException
+     * @throws ParseException 
+     */
+    public double CDPHE_Biological(String mainFolder,
+                                   String stationID,
+                                   String stationName,
+                                   String[][] flowData,
+                                   int m,
+                                   int R,
+                                   int clusterLength,
+                                   int clusterCountMax) throws IOException, ParseException{
+        DoubleArray doubleArray =  new DoubleArray();
+        DoubleMath doubleMath = new DoubleMath();
+        
+        //Get today's date for output purposes
+        Date currentDate = new Date();
+        SimpleDateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
+        String today = desiredDateFormat.format(currentDate);
+        stationName = stationName.replace(",", "");
+        
+        //Initialize the summary result table
+        String[][] statsSummaryTable = new String[6][2];
+        statsSummaryTable[0][0] = "CDPHE DFlow Statistics for " + stationID + "; " + stationName + "; created on " + today;
+//        statsSummaryTable[1][0] = "Analysis Period";
+        statsSummaryTable[2][0] = "Biologically-based Design Low Flow, (" + m + "-day minimum) [cfs]";
+//        statsSummaryTable[3][0] = "Annual Low Flow (" + m + "-day minimum) [cfs]";
+        statsSummaryTable[4][0] = "Low Flow Statistics based on 'DFLOW' User's Manual:";
+        statsSummaryTable[5][0] = "Rossman, L.A. 'DFLOW User's Manual' U.S. Environmental Protection Agency Cincinnati, Ohio 45265.";
+        
+        statsSummaryTable[0][1] = "";
+        statsSummaryTable[1][1] = "";
+        statsSummaryTable[2][1] = "to be added later";
+        statsSummaryTable[3][1] = "--";
+        statsSummaryTable[4][1] = "";
+        statsSummaryTable[5][1] = "";
+        
+        //Calculate m-day statistics
+        Object[] resultArray = doubleArray.getMdayData(flowData, m, "harmonic");
+        ArrayList<String> average_Mday_date = (ArrayList<String>) resultArray[0];
+        ArrayList<Double> average_Mday = (ArrayList<Double>) resultArray[1];
+        
+        //Get extreme-value based design flow (as a trial design flow to start searching for the biologically based one, note this still uses an arithmetic mean)
+        double trialDFLOW = CDPHE_ExtremeValue(mainFolder, stationID, stationName, flowData, m, R, "04-01");
+        double trialDFLOW_excursions = countExcursions(average_Mday_date, average_Mday, m, trialDFLOW, clusterLength, clusterCountMax);
+        
+        //Get allowed number of excursions
+        double A = flowData.length/(R * 365.25);
+        
+        //Loop until the Method of False Position (Carnahan et al., 1969) converges for the limits of the design flow
+        double flow_lowerBound = 0, flow_upperBound = trialDFLOW, excursions_lowerBound = 0, excursions_upperBound = trialDFLOW_excursions;
+        double designFlow = -1;
+        boolean convergance = false;
+        while(!convergance){
+            
+            //Check for convergance
+            if( Math.abs(flow_upperBound - flow_lowerBound) <= (0.005*flow_lowerBound) ){
+                designFlow = flow_upperBound;
+                convergance = true;
+            }else if( Math.abs(A - excursions_lowerBound) <= (0.005*A) ){
+                designFlow = flow_lowerBound;
+                convergance = true;
+            }else if( Math.abs(A - excursions_upperBound) <= (0.005*A) ){
+                designFlow = flow_upperBound;
+                convergance = true;
+            }else{
+                //If convergance is not met, interpolate a new trial design flow and count it's excursions
+                trialDFLOW = flow_lowerBound + ( ((flow_upperBound - flow_lowerBound) * (A - excursions_lowerBound)) / (excursions_upperBound - excursions_lowerBound) );
+                trialDFLOW_excursions = countExcursions(average_Mday_date, average_Mday, m, trialDFLOW, clusterLength, clusterCountMax);
+                if(trialDFLOW_excursions <= A){
+                    flow_lowerBound = trialDFLOW;
+                    excursions_lowerBound = trialDFLOW_excursions;
+                }else{
+                    flow_upperBound = trialDFLOW;
+                    excursions_upperBound = trialDFLOW_excursions;
+                }
+            }
+        }
+        
+        //Call file writer for outputs fo flow statistics
+        statsSummaryTable[2][0] = String.valueOf(doubleMath.round(designFlow, 3));
+        writeStatsSummaryFile(mainFolder, statsSummaryTable);
+        
+        return designFlow;
+    }
+    /**
+     * Counts excursions of m-day average flows below the trialDFLOW based on an 
+     * excursion cluster length (excursionClusterLength) and a maximum number of 
+     * excursions counted per excursion cluster (clusterCountMax)
+     * @param averageMday_date  a list of begin and end dates of the m-day average flows, format: "yyyy-mm-dd to yyyy-mm-dd"
+     * @param averageMday  a list of m-day averages corresponding to the dates in the averageMday_date array (aka must be same size)
+     * @param m  the m-day averaging number
+     * @param trialDFLOW  trial design flow, m-day averages below this will be counted as excursions
+     * @param clusterLength  the size of a cluster (default is 120 days)
+     * @param clusterCountMax  the maximum number of excursions per cluster to be counted (default is 5)
+     * @return 
+     */
+    private double countExcursions(ArrayList<String> averageMday_date, ArrayList<Double> averageMday, int m, double trialDFLOW, int clusterLength, double clusterCountMax){
+        DoubleArray doubleArray = new DoubleArray();
+        DoubleMath doubleMath = new DoubleMath();
+        
+        //Determine number of excursions periods
+        ArrayList<String> periodList_dates = new ArrayList<String>();
+        ArrayList<Integer> periodList_lengths = new ArrayList<Integer>();
+        for(int i=0; i<averageMday.size(); i++){
+            //Check if m-day average flow is below the design flow (aka the start of an excursion period)
+            if(averageMday.get(i) < trialDFLOW){
+                //Check if this excursion period is a new one or the extension of the previous one
+                if(periodList_dates.size() > 0){
+                    //Get the date of the last excursion period
+                    String lastPeriodDates = periodList_dates.get(periodList_dates.size() - 1);
+                    String lastPeriod_beginDate = lastPeriodDates.substring(0,10);
+                    Calendar lastPeriod_endDate = doubleArray.getCalendar(lastPeriodDates.substring(lastPeriodDates.length() - 10), "daily");
+                    Calendar newPeriod_beginDate = doubleArray.getCalendar(averageMday_date.get(i).substring(0,10), "daily");
+                    String newPeriod_endDate = averageMday_date.get(i).substring(averageMday_date.get(i).length() - 10);
+                    
+                    if(lastPeriod_endDate.after(newPeriod_beginDate) || lastPeriod_endDate.equals(newPeriod_beginDate)){
+                        //If the lasted entered excursion period ends 'after' the new excursion period then this is just a continuation of it
+                        //(aka excursion period 1 starts on day 1 and lasts until day 4, excursion period 2 starts on day 2 and lasts until day 5,
+                        // therefore the whole thing is just 1 excursion period that lasts from day 1 to day 5)
+                        periodList_dates.set(periodList_dates.size() - 1, lastPeriod_beginDate + " to " + newPeriod_endDate);
+                        periodList_lengths.set(periodList_dates.size() - 1, getExcursionLength(lastPeriod_beginDate, newPeriod_endDate));
+                    }else{
+                        //This is a new excursion period, so add it to the list
+                        periodList_dates.add(averageMday_date.get(i));
+                        periodList_lengths.add(m);
+                    }
+                    
+                }else{
+                    //If this is the first excursion period, add it to the list
+                    periodList_dates.add(averageMday_date.get(i));
+                    periodList_lengths.add(m);
+                }
+            }
+        }
+        
+        //Group the excursions periods into excursion clusters
+        ArrayList<String> clusterList_dates = new ArrayList<String>();
+        ArrayList<Double> clusterList_lengths = new ArrayList<Double>();
+        double m_double = m;
+        for(int i=0; i<periodList_dates.size(); i++){
+            //Get the dates of the excursion period
+            String period_beginDate_String = periodList_dates.get(i).substring(0,10);
+            Calendar period_beginDate = doubleArray.getCalendar(period_beginDate_String, "daily");
+            //Check if this is the first excursion cluster or not
+            if(clusterList_dates.size() > 0){
+                //Check if this excursion period is within the cluster or not
+                String clusterDates = clusterList_dates.get(clusterList_dates.size() - 1);
+                Calendar cluster_endDate = doubleArray.getCalendar(clusterDates.substring(clusterDates.length() - 10), "daily");
+
+                if(period_beginDate.before(cluster_endDate) || period_beginDate.equals(cluster_endDate)){
+                    //If the lasted excursion period starts before the end of the current excursion cluster, add it's 
+                    //length to the cluster length (up to the maximum cluster count limit)
+                    double periodLength = periodList_lengths.get(i);
+                    double clusterCount = clusterList_lengths.get(clusterList_lengths.size() - 1) + (periodLength / m_double);
+                    if(clusterCount > clusterCountMax){
+                        clusterList_lengths.set(clusterList_lengths.size() - 1, clusterCountMax);
+                    }else{
+                        clusterList_lengths.set(clusterList_lengths.size() - 1, clusterCount);
+                    }
+                }else{
+                    //This is excursion period is in a new excursion cluster, so add it to the list
+                    String clusterEndDate = doubleArray.getDay(period_beginDate_String, clusterLength);
+                    double periodLength = periodList_lengths.get(i);
+                    double clusterCount = periodLength / m_double;
+                    if(clusterCount > clusterCountMax){
+                        clusterList_lengths.add(clusterCountMax);
+                    }else{
+                        clusterList_lengths.add(clusterCount);
+                    }
+                    clusterList_dates.add(period_beginDate_String + " to " + clusterEndDate);
+                }
+            }else{
+                //If this is the first excursion cluster, determine it's end date
+                String clusterEndDate = doubleArray.getDay(period_beginDate_String, clusterLength);
+                double periodLength = periodList_lengths.get(i);
+                double clusterCount = periodLength / m_double;
+                if(clusterCount > clusterCountMax){
+                    clusterList_lengths.add(clusterCountMax);
+                }else{
+                    clusterList_lengths.add(clusterCount);
+                }
+                clusterList_dates.add(period_beginDate_String + " to " + clusterEndDate);
+            }
+        }
+        
+        //Sum the sizes of the excursion clusters
+        double excursionCountTotal = doubleMath.sum(clusterList_lengths);
+        return excursionCountTotal;
+    }
+    /**
+     * Counts how many days exist between the provided begin date and end date (beginDate cannot equal endDate)
+     * @param beginDate  the begin date (format yyyy-MM-dd)
+     * @param endDate  the end date (format yyyy-MM-dd)
+     * @return 
+     */
+    private int getExcursionLength(String beginDate, String endDate){
+        DoubleArray doubleArray = new DoubleArray();
+        
+        //Determine how many days pass before "nextDay" == "endDay"
+        String nextDay = doubleArray.getDay(beginDate, 1);
+        int excursionLength = 2;
+        while(!nextDay.equals(endDate)){
+            nextDay = doubleArray.getDay(nextDay, 1);
+            excursionLength++;
+        }
+        return excursionLength;
+    }
+    /**
+     * Calculate the CDPHE "human health" design flow (see DFLOW user manual) 
      * which is the harmonic mean of the flows
      * @param mainFolder  the file path where the summary will be saved
      * @param stationID  the station ID for the current station, used to label the output

src/java/cfa/DoubleArray.java

@@ -49,7 +49,7 @@
     }
 }
 /**
-* Last Updated: 16-December-2014
+* Last Updated: 6-January-2015
 * @author Tyler Wible
 * @since 21-June-2012
 */
@@ -1459,29 +1459,22 @@
     }
     /**
      * Gets the day that is 'direction' amount before/after the provided date
-     * @param date  the current date
+     * @param date  the current date (formatted yyyy-MM-dd)
      * @param direction  the magnitude and direction of days from 'date' that is desired 
      * (i.e. a value of -2 would yield the date of 2 days ago while a value of 1 will yield tomorrow's date)
      * @return returns the string value of the date that is 'direction' away formatted as yyyy-MM-dd
      */
     public String getDay(String date, int direction){
         //Parse Date
-        double year = Double.parseDouble(date.substring(0,4));
-        double month = Double.parseDouble(date.substring(5,7));
-        double day = Double.parseDouble(date.substring(8));
-        int yearInt = (int) year;
-        int monthInt = (int) month;
-        int dayInt = (int) day;
+        Calendar calendar = getCalendar(date, "daily");
         
         //Get next date
-        Calendar calendar = new GregorianCalendar(yearInt, monthInt, dayInt);
         calendar.add(Calendar.DAY_OF_MONTH, direction);
         
         //Parse next date
-        yearInt = calendar.get(Calendar.YEAR);
-        monthInt = calendar.get(Calendar.MONTH);
-        dayInt = calendar.get(Calendar.DAY_OF_MONTH);
-        
+        int yearInt = calendar.get(Calendar.YEAR);
+        int monthInt = calendar.get(Calendar.MONTH) + 1;//because calendar months are zero based (aka 0 = January, etc.)
+        int dayInt = calendar.get(Calendar.DAY_OF_MONTH);
         
         //Check for a single digit month, if so make it a 2 digit month starting with a zero
         String monthString = String.valueOf(monthInt);
@@ -1501,6 +1494,58 @@
         return nextDate;
     }
     /**
+     * Converts the provided date into a calendar object
+     * @param date  the current date (formatted yyyy-MM-dd)
+     * @param dateFormat  a flag for what format the date is in:
+     * "15-min" for 'yyyy-MM-dd HH:mm' format
+     * "daily" for 'yyyy-MM-dd' format
+     * "monthly" for 'yyyy-MM' format
+     * "yearly" for 'yyyy' format
+     * @return
+     */
+    public Calendar getCalendar(String date, String dateFormat){
+        //Parse Date
+        Calendar newDate = Calendar.getInstance();
+        if(dateFormat.equalsIgnoreCase("15-min")){//yyyy-MM-dd HH:mm
+                double min = Double.parseDouble(date.substring(14));
+                double hr = Double.parseDouble(date.substring(11,13));
+                double d = Double.parseDouble(date.substring(8,10));
+                double m = Double.parseDouble(date.substring(5,7));
+                double y = Double.parseDouble(date.substring(0,4));
+                int year = (int) y;
+                int month = (int) m;
+                int day = (int) d;
+                int hour = (int) hr;
+                int minute = (int) min;
+                newDate.set(year, month - 1, day, hour, minute, 0);
+        }else if(dateFormat.equalsIgnoreCase("daily")){//yyyy-MM-dd
+                double d = Double.parseDouble(date.substring(8));
+                double m = Double.parseDouble(date.substring(5,7));
+                double y = Double.parseDouble(date.substring(0,4));
+                int year = (int) y;
+                int month = (int) m;
+                int day = (int) d;
+                newDate.set(year, month - 1, day, 12, 0, 0);
+        }else if(dateFormat.equalsIgnoreCase("monthly")){//yyyy-MM
+                double m = Double.parseDouble(date.substring(5,7));
+                double y = Double.parseDouble(date.substring(0,4));
+                int year = (int) y;
+                int month = (int) m;
+                newDate.set(year, month - 1, 1, 12, 0, 0);
+        }else if(dateFormat.equalsIgnoreCase("yearly")){//yyyy
+                double y = Double.parseDouble(date.substring(0,4));
+                int year = (int) y;
+                newDate.set(year, 0, 1, 12, 0, 0);
+        }
+        
+        //Update the remaining fields of the calendary (day of year, etc.) by adding 
+        //+1 to the day and then removing it so the entire calendar is the correct date
+        newDate.add(Calendar.DAY_OF_MONTH, 1);
+        newDate.add(Calendar.DAY_OF_MONTH, -1);
+        
+        return newDate;
+    }
+    /**
      * Determines if the provided year (as an integer) is a leap year or not taking into 
      * account for leap years every 4 years, not every 100 years, and leap years every 
      * 400 years (this has to do with round off errors in the length of a day that propagate 

src/java/cfa/FlowStatistics.java

@@ -13,7 +13,7 @@
 import java.util.GregorianCalendar;
 
 /**
-* Last Updated: 16-December-2014
+* Last Updated: 6-January-2015
 * @author Tyler Wible
 * @since 29-June-2011
 */
@@ -79,178 +79,188 @@
         stationName = stationName.replace(",", "");
         
         //Initialize the summary result table
-        int summarySize = 50;
-        if(showMonthlyTF) summarySize = 170;
+        int summarySize = 60;
+        if(showMonthlyTF) summarySize = 180;
         
         String[][] statsSummaryTable = new String[summarySize][1];
         statsSummaryTable[0][0] = "Flow Statistics for " + stationID + "; " + stationName + "; created on " + today;
         statsSummaryTable[1][0] = "Analysis Period (calendar year)";
-        statsSummaryTable[2][0] = "Maximum (1-day) [cfs]";
-        statsSummaryTable[3][0] = "Date of Maximum (1-day)";
-        statsSummaryTable[4][0] = "Minimum (1-day) [cfs]";
-        statsSummaryTable[5][0] = "Date of Minimum (1-day)";
-        statsSummaryTable[6][0] = "Maximum (3-day) [cfs]";
-        statsSummaryTable[7][0] = "Dates of Maximum (3-day)";
-        statsSummaryTable[8][0] = "Minimum (3-day) [cfs]";
-        statsSummaryTable[9][0] = "Dates of Minimum (3-day)";
-        statsSummaryTable[10][0] = "Maximum (7-day) [cfs]";
-        statsSummaryTable[11][0] = "Dates of Maximum (7-day)";
-        statsSummaryTable[12][0] = "Minimum (7-day) [cfs]";
-        statsSummaryTable[13][0] = "Dates of Minimum (7-day)";
-        statsSummaryTable[14][0] = "Minimum (7-day) / Annual Average  [cfs]";
-        statsSummaryTable[15][0] = "Maximum (30-day) [cfs]";
-        statsSummaryTable[16][0] = "Dates of Maximum (30-day)";
-        statsSummaryTable[17][0] = "Minimum (30-day) [cfs]";
-        statsSummaryTable[18][0] = "Dates of Minimum (30-day)";
-        statsSummaryTable[19][0] = "Maximum (90-day) [cfs]";
-        statsSummaryTable[20][0] = "Dates of Maximum (90-day)";
-        statsSummaryTable[21][0] = "Minimum (90-day) [cfs]";
-        statsSummaryTable[22][0] = "Dates of Minimum (90-day)";
-        statsSummaryTable[23][0] = "Number of Zero Flow Days";
-        statsSummaryTable[24][0] = "Number of Flow Reversals";
-        statsSummaryTable[25][0] = "Number of Flow Rises";
-        statsSummaryTable[26][0] = "Number of Flow Falls";
-        statsSummaryTable[27][0] = "Number of High Pulses (> " + highPercentile + " percentile)";
-        statsSummaryTable[28][0] = "Threshold for High Pulses (> " + highPercentile + " percentile) [cfs]";
-        statsSummaryTable[29][0] = "Average Duration of High Pulses (> " + highPercentile + " percentile) [days]";
-        statsSummaryTable[30][0] = "Number of Low Pulses (< " + lowPercentile + " percentile)";
-        statsSummaryTable[31][0] = "Threshold for Low Pulses (< " + lowPercentile + " percentile) [cfs]";
-        statsSummaryTable[32][0] = "Average Duration of Low Pulses (< " + lowPercentile + " percentile) [days]";
-        statsSummaryTable[33][0] = "Average Positive Difference Between Consecutive Days [cfs]";
-        statsSummaryTable[34][0] = "Average Negative Difference Between Consecutive Days [cfs]";
-        statsSummaryTable[35][0] = "Temporal Centriod of Discharge [Day of Calendary Year]";
-        statsSummaryTable[36][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Maximum) [cfs]";
-        statsSummaryTable[37][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Minimum) [cfs]";
-        statsSummaryTable[38][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Upper Quartile) [cfs]";
-        statsSummaryTable[39][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Lower Quartile) [cfs]";
-        statsSummaryTable[40][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Median) [cfs]";
-        statsSummaryTable[41][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Average) [cfs]";
-        statsSummaryTable[42][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Standard Deviation) [cfs]";
-        statsSummaryTable[43][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Variance)";
-        statsSummaryTable[44][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Skewness)";
-        statsSummaryTable[45][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Coefficient of Variation)";
+        statsSummaryTable[2][0] = "Maximum [cfs]";
+        statsSummaryTable[3][0] = "Minimum [cfs]";
+        statsSummaryTable[4][0] = "Upper Quartile [cfs]";
+        statsSummaryTable[5][0] = "Lower Quartile [cfs]";
+        statsSummaryTable[6][0] = "Median [cfs]";
+        statsSummaryTable[7][0] = "Average [cfs]";
+        statsSummaryTable[8][0] = "Standard Deviation [cfs]";
+        statsSummaryTable[9][0] = "Variance";
+        statsSummaryTable[10][0] = "Skewness";
+        statsSummaryTable[11][0] = "Coefficient of Variation";
+        statsSummaryTable[12][0] = "Maximum (1-day) [cfs]";
+        statsSummaryTable[13][0] = "Date of Maximum (1-day)";
+        statsSummaryTable[14][0] = "Minimum (1-day) [cfs]";
+        statsSummaryTable[15][0] = "Date of Minimum (1-day)";
+        statsSummaryTable[16][0] = "Maximum (3-day) [cfs]";
+        statsSummaryTable[17][0] = "Dates of Maximum (3-day)";
+        statsSummaryTable[18][0] = "Minimum (3-day) [cfs]";
+        statsSummaryTable[19][0] = "Dates of Minimum (3-day)";
+        statsSummaryTable[20][0] = "Maximum (7-day) [cfs]";
+        statsSummaryTable[21][0] = "Dates of Maximum (7-day)";
+        statsSummaryTable[22][0] = "Minimum (7-day) [cfs]";
+        statsSummaryTable[23][0] = "Dates of Minimum (7-day)";
+        statsSummaryTable[24][0] = "Minimum (7-day) / Annual Average  [cfs]";
+        statsSummaryTable[25][0] = "Maximum (30-day) [cfs]";
+        statsSummaryTable[26][0] = "Dates of Maximum (30-day)";
+        statsSummaryTable[27][0] = "Minimum (30-day) [cfs]";
+        statsSummaryTable[28][0] = "Dates of Minimum (30-day)";
+        statsSummaryTable[29][0] = "Maximum (90-day) [cfs]";
+        statsSummaryTable[30][0] = "Dates of Maximum (90-day)";
+        statsSummaryTable[31][0] = "Minimum (90-day) [cfs]";
+        statsSummaryTable[32][0] = "Dates of Minimum (90-day)";
+        statsSummaryTable[33][0] = "Number of Zero Flow Days";
+        statsSummaryTable[34][0] = "Number of Flow Reversals";
+        statsSummaryTable[35][0] = "Number of Flow Rises";
+        statsSummaryTable[36][0] = "Number of Flow Falls";
+        statsSummaryTable[37][0] = "Number of High Pulses (> " + highPercentile + " percentile)";
+        statsSummaryTable[38][0] = "Threshold for High Pulses (> " + highPercentile + " percentile) [cfs]";
+        statsSummaryTable[39][0] = "Average Duration of High Pulses (> " + highPercentile + " percentile) [days]";
+        statsSummaryTable[40][0] = "Number of Low Pulses (< " + lowPercentile + " percentile)";
+        statsSummaryTable[41][0] = "Threshold for Low Pulses (< " + lowPercentile + " percentile) [cfs]";
+        statsSummaryTable[42][0] = "Average Duration of Low Pulses (< " + lowPercentile + " percentile) [days]";
+        statsSummaryTable[43][0] = "Average Positive Difference Between Consecutive Days [cfs]";
+        statsSummaryTable[44][0] = "Average Negative Difference Between Consecutive Days [cfs]";
+        statsSummaryTable[45][0] = "Temporal Centriod of Discharge [Day of Calendary Year]";
+        statsSummaryTable[46][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Maximum) [cfs]";
+        statsSummaryTable[47][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Minimum) [cfs]";
+        statsSummaryTable[48][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Upper Quartile) [cfs]";
+        statsSummaryTable[49][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Lower Quartile) [cfs]";
+        statsSummaryTable[50][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Median) [cfs]";
+        statsSummaryTable[51][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Average) [cfs]";
+        statsSummaryTable[52][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Standard Deviation) [cfs]";
+        statsSummaryTable[53][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Variance)";
+        statsSummaryTable[54][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Skewness)";
+        statsSummaryTable[55][0] = "Season " + seasonBegin + " to " + seasonEnd + " (Coefficient of Variation)";
         
         if(showMonthlyTF){
-            statsSummaryTable[46][0] = "January (Maximum) [cfs]";
-            statsSummaryTable[47][0] = "January (Minimum) [cfs]";
-            statsSummaryTable[48][0] = "January (Upper Quartile) [cfs]";
-            statsSummaryTable[49][0] = "January (Lower Quartile) [cfs]";
-            statsSummaryTable[50][0] = "January (Median) [cfs]";
-            statsSummaryTable[51][0] = "January (Average) [cfs]";
-            statsSummaryTable[52][0] = "January (Standard Deviation) [cfs]";
-            statsSummaryTable[53][0] = "January (Variance)";
-            statsSummaryTable[54][0] = "January (Skewness)";
-            statsSummaryTable[55][0] = "January (Coefficient of Variation)";
-            statsSummaryTable[56][0] = "February (Maximum) [cfs]";
-            statsSummaryTable[57][0] = "February (Minimum) [cfs]";
-            statsSummaryTable[58][0] = "February (Upper Quartile) [cfs]";
-            statsSummaryTable[59][0] = "February (Lower Quartile) [cfs]";
-            statsSummaryTable[60][0] = "February (Median) [cfs]";
-            statsSummaryTable[61][0] = "February (Average) [cfs]";
-            statsSummaryTable[62][0] = "February (Standard Deviation) [cfs]";
-            statsSummaryTable[63][0] = "February (Variance)";
-            statsSummaryTable[64][0] = "February (Skewness)";
-            statsSummaryTable[65][0] = "February (Coefficient of Variation)";
-            statsSummaryTable[66][0] = "March (Maximum) [cfs]";
-            statsSummaryTable[67][0] = "March (Minimum) [cfs]";
-            statsSummaryTable[68][0] = "March (Upper Quartile) [cfs]";
-            statsSummaryTable[69][0] = "March (Lower Quartile) [cfs]";
-            statsSummaryTable[70][0] = "March (Median) [cfs]";
-            statsSummaryTable[71][0] = "March (Average) [cfs]";
-            statsSummaryTable[72][0] = "March (Standard Deviation) [cfs]";
-            statsSummaryTable[73][0] = "March (Variance)";
-            statsSummaryTable[74][0] = "March (Skewness)";
-            statsSummaryTable[75][0] = "March (Coefficient of Variation)";
-            statsSummaryTable[76][0] = "April (Maximum) [cfs]";
-            statsSummaryTable[77][0] = "April (Minimum) [cfs]";
-            statsSummaryTable[78][0] = "April (Upper Quartile) [cfs]";
-            statsSummaryTable[79][0] = "April (Lower Quartile) [cfs]";
-            statsSummaryTable[80][0] = "April (Median) [cfs]";
-            statsSummaryTable[81][0] = "April (Average) [cfs]";
-            statsSummaryTable[82][0] = "April (Standard Deviation) [cfs]";
-            statsSummaryTable[83][0] = "April (Variance)";
-            statsSummaryTable[84][0] = "April (Skewness)";
-            statsSummaryTable[85][0] = "April (Coefficient of Variation)";
-            statsSummaryTable[86][0] = "May (Maximum) [cfs]";
-            statsSummaryTable[87][0] = "May (Minimum) [cfs]";
-            statsSummaryTable[88][0] = "May (Upper Quartile) [cfs]";
-            statsSummaryTable[89][0] = "May (Lower Quartile) [cfs]";
-            statsSummaryTable[90][0] = "May (Median) [cfs]";
-            statsSummaryTable[91][0] = "May (Average) [cfs]";
-            statsSummaryTable[92][0] = "May (Standard Deviation) [cfs]";
-            statsSummaryTable[93][0] = "May (Variance)";
-            statsSummaryTable[94][0] = "May (Skewness)";
-            statsSummaryTable[95][0] = "May (Coefficient of Variation)";
-            statsSummaryTable[96][0] = "June (Maximum) [cfs]";
-            statsSummaryTable[97][0] = "June (Minimum) [cfs]";
-            statsSummaryTable[98][0] = "June (Upper Quartile) [cfs]";
-            statsSummaryTable[99][0] = "June (Lower Quartile) [cfs]";
+            statsSummaryTable[56][0] = "January (Maximum) [cfs]";
+            statsSummaryTable[57][0] = "January (Minimum) [cfs]";
+            statsSummaryTable[58][0] = "January (Upper Quartile) [cfs]";
+            statsSummaryTable[59][0] = "January (Lower Quartile) [cfs]";
+            statsSummaryTable[60][0] = "January (Median) [cfs]";
+            statsSummaryTable[61][0] = "January (Average) [cfs]";
+            statsSummaryTable[62][0] = "January (Standard Deviation) [cfs]";
+            statsSummaryTable[63][0] = "January (Variance)";
+            statsSummaryTable[64][0] = "January (Skewness)";
+            statsSummaryTable[65][0] = "January (Coefficient of Variation)";
+            statsSummaryTable[66][0] = "February (Maximum) [cfs]";
+            statsSummaryTable[67][0] = "February (Minimum) [cfs]";
+            statsSummaryTable[68][0] = "February (Upper Quartile) [cfs]";
+            statsSummaryTable[69][0] = "February (Lower Quartile) [cfs]";
+            statsSummaryTable[70][0] = "February (Median) [cfs]";
+            statsSummaryTable[71][0] = "February (Average) [cfs]";
+            statsSummaryTable[72][0] = "February (Standard Deviation) [cfs]";
+            statsSummaryTable[73][0] = "February (Variance)";
+            statsSummaryTable[74][0] = "February (Skewness)";
+            statsSummaryTable[75][0] = "February (Coefficient of Variation)";
+            statsSummaryTable[76][0] = "March (Maximum) [cfs]";
+            statsSummaryTable[77][0] = "March (Minimum) [cfs]";
+            statsSummaryTable[78][0] = "March (Upper Quartile) [cfs]";
+            statsSummaryTable[79][0] = "March (Lower Quartile) [cfs]";
+            statsSummaryTable[80][0] = "March (Median) [cfs]";
+            statsSummaryTable[81][0] = "March (Average) [cfs]";
+            statsSummaryTable[82][0] = "March (Standard Deviation) [cfs]";
+            statsSummaryTable[83][0] = "March (Variance)";
+            statsSummaryTable[84][0] = "March (Skewness)";
+            statsSummaryTable[85][0] = "March (Coefficient of Variation)";
+            statsSummaryTable[86][0] = "April (Maximum) [cfs]";
+            statsSummaryTable[87][0] = "April (Minimum) [cfs]";
+            statsSummaryTable[88][0] = "April (Upper Quartile) [cfs]";
+            statsSummaryTable[89][0] = "April (Lower Quartile) [cfs]";
+            statsSummaryTable[90][0] = "April (Median) [cfs]";
+            statsSummaryTable[91][0] = "April (Average) [cfs]";
+            statsSummaryTable[92][0] = "April (Standard Deviation) [cfs]";
+            statsSummaryTable[93][0] = "April (Variance)";
+            statsSummaryTable[94][0] = "April (Skewness)";
+            statsSummaryTable[95][0] = "April (Coefficient of Variation)";
+            statsSummaryTable[96][0] = "May (Maximum) [cfs]";
+            statsSummaryTable[97][0] = "May (Minimum) [cfs]";
+            statsSummaryTable[98][0] = "May (Upper Quartile) [cfs]";
+            statsSummaryTable[99][0] = "May (Lower Quartile) [cfs]";
+            statsSummaryTable[100][0] = "May (Median) [cfs]";
+            statsSummaryTable[101][0] = "May (Average) [cfs]";
+            statsSummaryTable[102][0] = "May (Standard Deviation) [cfs]";
+            statsSummaryTable[103][0] = "May (Variance)";
+            statsSummaryTable[104][0] = "May (Skewness)";
+            statsSummaryTable[105][0] = "May (Coefficient of Variation)";
+            statsSummaryTable[106][0] = "June (Maximum) [cfs]";
+            statsSummaryTable[107][0] = "June (Minimum) [cfs]";
+            statsSummaryTable[108][0] = "June (Upper Quartile) [cfs]";
+            statsSummaryTable[109][0] = "June (Lower Quartile) [cfs]";
             statsSummaryTable[100][0] = "June (Median) [cfs]";
-            statsSummaryTable[101][0] = "June (Average) [cfs]";
-            statsSummaryTable[102][0] = "June (Standard Deviation) [cfs]";
-            statsSummaryTable[103][0] = "June (Variance)";
-            statsSummaryTable[104][0] = "June (Skewness)";
-            statsSummaryTable[105][0] = "June (Coefficient of Variation)";
-            statsSummaryTable[106][0] = "July (Maximum) [cfs]";
-            statsSummaryTable[107][0] = "July (Minimum) [cfs]";
-            statsSummaryTable[108][0] = "July (Upper Quartile) [cfs]";
-            statsSummaryTable[109][0] = "July (Lower Quartile) [cfs]";
-            statsSummaryTable[110][0] = "July (Median) [cfs]";
-            statsSummaryTable[111][0] = "July (Average) [cfs]";
-            statsSummaryTable[112][0] = "July (Standard Deviation) [cfs]";
-            statsSummaryTable[113][0] = "July (Variance)";
-            statsSummaryTable[114][0] = "July (Skewness)";
-            statsSummaryTable[115][0] = "July (Coefficient of Variation)";
-            statsSummaryTable[116][0] = "August (Maximum) [cfs]";
-            statsSummaryTable[117][0] = "August (Minimum) [cfs]";
-            statsSummaryTable[118][0] = "August (Upper Quartile) [cfs]";
-            statsSummaryTable[119][0] = "August (Lower Quartile) [cfs]";
-            statsSummaryTable[120][0] = "August (Median) [cfs]";
-            statsSummaryTable[121][0] = "August (Average) [cfs]";
-            statsSummaryTable[122][0] = "August (Standard Deviation) [cfs]";
-            statsSummaryTable[123][0] = "August (Variance)";
-            statsSummaryTable[124][0] = "August (Skewness)";
-            statsSummaryTable[125][0] = "August (Coefficient of Variation)";
-            statsSummaryTable[126][0] = "September (Maximum) [cfs]";
-            statsSummaryTable[127][0] = "September (Minimum) [cfs]";
-            statsSummaryTable[128][0] = "September (Upper Quartile) [cfs]";
-            statsSummaryTable[129][0] = "September (Lower Quartile) [cfs]";
-            statsSummaryTable[130][0] = "September (Median) [cfs]";
-            statsSummaryTable[131][0] = "September (Average) [cfs]";
-            statsSummaryTable[132][0] = "September (Standard Deviation) [cfs]";
-            statsSummaryTable[133][0] = "September (Variance)";
-            statsSummaryTable[134][0] = "September (Skewness)";
-            statsSummaryTable[135][0] = "September (Coefficient of Variation)";
-            statsSummaryTable[136][0] = "October (Maximum) [cfs]";
-            statsSummaryTable[137][0] = "October (Minimum) [cfs]";
-            statsSummaryTable[138][0] = "October (Upper Quartile) [cfs]";
-            statsSummaryTable[139][0] = "October (Lower Quartile) [cfs]";
-            statsSummaryTable[140][0] = "October (Median) [cfs]";
-            statsSummaryTable[141][0] = "October (Average) [cfs]";
-            statsSummaryTable[142][0] = "October (Standard Deviation) [cfs]";
-            statsSummaryTable[143][0] = "October (Variance)";
-            statsSummaryTable[144][0] = "October (Skewness)";
-            statsSummaryTable[145][0] = "October (Coefficient of Variation)";
-            statsSummaryTable[146][0] = "November (Maximum) [cfs]";
-            statsSummaryTable[147][0] = "November (Minimum) [cfs]";
-            statsSummaryTable[148][0] = "November (Upper Quartile) [cfs]";
-            statsSummaryTable[149][0] = "November (Lower Quartile) [cfs]";
-            statsSummaryTable[150][0] = "November (Median) [cfs]";
-            statsSummaryTable[151][0] = "November (Average) [cfs]";
-            statsSummaryTable[152][0] = "November (Standard Deviation) [cfs]";
-            statsSummaryTable[153][0] = "November (Variance)";
-            statsSummaryTable[154][0] = "November (Skewness)";
-            statsSummaryTable[155][0] = "November (Coefficient of Variation)";
-            statsSummaryTable[156][0] = "December (Maximum) [cfs]";
-            statsSummaryTable[157][0] = "December (Minimum) [cfs]";
-            statsSummaryTable[158][0] = "December (Upper Quartile) [cfs]";
-            statsSummaryTable[159][0] = "December (Lower Quartile) [cfs]";
-            statsSummaryTable[160][0] = "December (Median) [cfs]";
-            statsSummaryTable[161][0] = "December (Average) [cfs]";
-            statsSummaryTable[162][0] = "December (Standard Deviation) [cfs]";
-            statsSummaryTable[163][0] = "December (Variance)";
-            statsSummaryTable[164][0] = "December (Skewness)";
-            statsSummaryTable[165][0] = "December (Coefficient of Variation)";
+            statsSummaryTable[111][0] = "June (Average) [cfs]";
+            statsSummaryTable[112][0] = "June (Standard Deviation) [cfs]";
+            statsSummaryTable[113][0] = "June (Variance)";
+            statsSummaryTable[114][0] = "June (Skewness)";
+            statsSummaryTable[115][0] = "June (Coefficient of Variation)";
+            statsSummaryTable[116][0] = "July (Maximum) [cfs]";
+            statsSummaryTable[117][0] = "July (Minimum) [cfs]";
+            statsSummaryTable[118][0] = "July (Upper Quartile) [cfs]";
+            statsSummaryTable[119][0] = "July (Lower Quartile) [cfs]";
+            statsSummaryTable[120][0] = "July (Median) [cfs]";
+            statsSummaryTable[121][0] = "July (Average) [cfs]";
+            statsSummaryTable[122][0] = "July (Standard Deviation) [cfs]";
+            statsSummaryTable[123][0] = "July (Variance)";
+            statsSummaryTable[124][0] = "July (Skewness)";
+            statsSummaryTable[125][0] = "July (Coefficient of Variation)";
+            statsSummaryTable[126][0] = "August (Maximum) [cfs]";
+            statsSummaryTable[127][0] = "August (Minimum) [cfs]";
+            statsSummaryTable[128][0] = "August (Upper Quartile) [cfs]";
+            statsSummaryTable[129][0] = "August (Lower Quartile) [cfs]";
+            statsSummaryTable[130][0] = "August (Median) [cfs]";
+            statsSummaryTable[131][0] = "August (Average) [cfs]";
+            statsSummaryTable[132][0] = "August (Standard Deviation) [cfs]";
+            statsSummaryTable[133][0] = "August (Variance)";
+            statsSummaryTable[134][0] = "August (Skewness)";
+            statsSummaryTable[135][0] = "August (Coefficient of Variation)";
+            statsSummaryTable[136][0] = "September (Maximum) [cfs]";
+            statsSummaryTable[137][0] = "September (Minimum) [cfs]";
+            statsSummaryTable[138][0] = "September (Upper Quartile) [cfs]";
+            statsSummaryTable[139][0] = "September (Lower Quartile) [cfs]";
+            statsSummaryTable[140][0] = "September (Median) [cfs]";
+            statsSummaryTable[141][0] = "September (Average) [cfs]";
+            statsSummaryTable[142][0] = "September (Standard Deviation) [cfs]";
+            statsSummaryTable[143][0] = "September (Variance)";
+            statsSummaryTable[144][0] = "September (Skewness)";
+            statsSummaryTable[145][0] = "September (Coefficient of Variation)";
+            statsSummaryTable[146][0] = "October (Maximum) [cfs]";
+            statsSummaryTable[147][0] = "October (Minimum) [cfs]";
+            statsSummaryTable[148][0] = "October (Upper Quartile) [cfs]";
+            statsSummaryTable[149][0] = "October (Lower Quartile) [cfs]";
+            statsSummaryTable[150][0] = "October (Median) [cfs]";
+            statsSummaryTable[151][0] = "October (Average) [cfs]";
+            statsSummaryTable[152][0] = "October (Standard Deviation) [cfs]";
+            statsSummaryTable[153][0] = "October (Variance)";
+            statsSummaryTable[154][0] = "October (Skewness)";
+            statsSummaryTable[155][0] = "October (Coefficient of Variation)";
+            statsSummaryTable[156][0] = "November (Maximum) [cfs]";
+            statsSummaryTable[157][0] = "November (Minimum) [cfs]";
+            statsSummaryTable[158][0] = "November (Upper Quartile) [cfs]";
+            statsSummaryTable[159][0] = "November (Lower Quartile) [cfs]";
+            statsSummaryTable[160][0] = "November (Median) [cfs]";
+            statsSummaryTable[161][0] = "November (Average) [cfs]";
+            statsSummaryTable[162][0] = "November (Standard Deviation) [cfs]";
+            statsSummaryTable[163][0] = "November (Variance)";
+            statsSummaryTable[164][0] = "November (Skewness)";
+            statsSummaryTable[165][0] = "November (Coefficient of Variation)";
+            statsSummaryTable[166][0] = "December (Maximum) [cfs]";
+            statsSummaryTable[167][0] = "December (Minimum) [cfs]";
+            statsSummaryTable[168][0] = "December (Upper Quartile) [cfs]";
+            statsSummaryTable[169][0] = "December (Lower Quartile) [cfs]";
+            statsSummaryTable[170][0] = "December (Median) [cfs]";
+            statsSummaryTable[171][0] = "December (Average) [cfs]";
+            statsSummaryTable[172][0] = "December (Standard Deviation) [cfs]";
+            statsSummaryTable[173][0] = "December (Variance)";
+            statsSummaryTable[174][0] = "December (Skewness)";
+            statsSummaryTable[175][0] = "December (Coefficient of Variation)";
         }
         statsSummaryTable[statsSummaryTable.length - 4][0] = "Flow Statistics based on Indicators of Hydrologic Alteration from:";
         statsSummaryTable[statsSummaryTable.length - 3][0] = "B.D. Richter; J.V. Baumgartner; J. Powell; D.P. Braun. 1996. 'A Method For Assessing Hydrologic Aleration Within Ecosystems.' Conservation Biology 10(4): 1163-1174.";
@@ -502,7 +512,7 @@
         ArrayList<Double> dec_data = new ArrayList<Double>();
         
         //Calculate 1-day statistics
-        ArrayList<Double> average_1Day = new ArrayList<Double>();
+        ArrayList<Double> allData = new ArrayList<Double>();
         ArrayList<Double> diffPositive = new ArrayList<Double>();
         ArrayList<Double> diffNegative = new ArrayList<Double>();
         double max_1day = -9999;
@@ -519,7 +529,7 @@
             double month = Double.parseDouble(flowData[i][0].substring(5,7));
             int month_int =  (int) month;
             double value = Double.parseDouble(flowData[i][1]);
-            average_1Day.add(value);
+            allData.add(value);
             
             //Store data for monthly averages
             switch (month_int) {
@@ -596,7 +606,7 @@
             oldValue = value;
             date1 = date2;
         }
-        double mean_all = doubleMath.meanArithmetic(average_1Day);
+        double mean_all = doubleMath.meanArithmetic(allData);
         double centroid = centroidSum / sum;
         
         //Calculate 3-day statistics
@@ -645,8 +655,8 @@
         }
         
         //Calculate Pulse Information
-        double highLimit = doubleMath.Percentile_function(average_1Day, highPercentile);
-        double lowLimit = doubleMath.Percentile_function(average_1Day, lowPercentile);
+        double highLimit = doubleMath.Percentile_function(allData, highPercentile);
+        double lowLimit = doubleMath.Percentile_function(allData, lowPercentile);
         ArrayList<Double> highPulses = new ArrayList<Double>();
         ArrayList<Double> lowPulses = new ArrayList<Double>();
         int ctr_highPulse = 0, ctr_lowPulse = 0;
@@ -716,48 +726,58 @@
         
         
         //Build Flow Statistics Summary for output
-        int summarySize = 50;
-        if(showMonthlyTF) summarySize = 170;
+        int summarySize = 60;
+        if(showMonthlyTF) summarySize = 180;
         String[] additionalSummary = new String[summarySize];
         additionalSummary[0] = "";//blank
         additionalSummary[1] = dataHeader;//Method
-        additionalSummary[2] = String.valueOf(doubleMath.round(max_1day,3));//Maximum (1-day)
-        additionalSummary[3] = max_1day_date;//Date of Maximum (1-day)
-        additionalSummary[4] = String.valueOf(doubleMath.round(min_1day,3));//Minimum (1-day)
-        additionalSummary[5] = min_1day_date;//Date of Minimum (1-day)
-        additionalSummary[6] = String.valueOf(doubleMath.round(max_3day,3));//Maximum (3-day)
-        additionalSummary[7] = max_3day_date;//Dates of Maximum (3-day)
-        additionalSummary[8] = String.valueOf(doubleMath.round(min_3day,3));//Minimum (3-day)
-        additionalSummary[9] = min_3day_date;//Dates of Minimum (3-day)
-        additionalSummary[10] = String.valueOf(doubleMath.round(max_7day,3));//Maximum (7-day)
-        additionalSummary[11] = max_7day_date;//Dates of Maximum (7-day)
-        additionalSummary[12] = String.valueOf(doubleMath.round(min_7day,3));//Minimum (7-day)
-        additionalSummary[13] = min_7day_date;//Dates of Minimum (7-day)
-        additionalSummary[14] = String.valueOf(doubleMath.round(min_7day_ave,3));//Minimum (7-day)
-        additionalSummary[15] = String.valueOf(doubleMath.round(max_30day,3));//Maximum (30-day)
-        additionalSummary[16] = max_30day_date;//Dates of Maximum (30-day)
-        additionalSummary[17] = String.valueOf(doubleMath.round(min_30day,3));//Minimum (30-day)
-        additionalSummary[18] = min_30day_date;//Dates of Minimum (30-day)
-        additionalSummary[19] = String.valueOf(doubleMath.round(max_90day,3));//Maximum (90-day)
-        additionalSummary[20] = max_90day_date;//Dates of Maximum (90-day)
-        additionalSummary[21] = String.valueOf(doubleMath.round(min_90day,3));//Minimum (90-day)
-        additionalSummary[22] = min_90day_date;//Dates of Minimum (90-day)
-        additionalSummary[23] = String.valueOf(ctr_zero);//Number of Zero Flow Days
-        additionalSummary[24] = String.valueOf(ctr_reversals);//Number of Flow Reversals
-        additionalSummary[25] = String.valueOf(ctr_rises);//Number of Flow Rises
-        additionalSummary[26] = String.valueOf(ctr_falls);//Number of Flow Falls
-        additionalSummary[27] = String.valueOf(ctr_highPulse);//Number of High Pulses
-        additionalSummary[28] = String.valueOf(doubleMath.round(highLimit,1));//Threshold for High Pulses
-        additionalSummary[29] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(highPulses), 3));//Average Duration of High Pulses
-        additionalSummary[30] = String.valueOf(ctr_lowPulse);//Number of Low Pulses
-        additionalSummary[31] = String.valueOf(doubleMath.round(lowLimit,1));//Threshold for Low Pulses
-        additionalSummary[32] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(lowPulses), 3));//Average Duration of Low Pulses
-        additionalSummary[33] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(diffPositive),3));//Average Positive Difference Between Consecutive Days
-        additionalSummary[34] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(diffNegative),3));//Average Negative Difference Between Consecutive Days
-        additionalSummary[35] = String.valueOf(doubleMath.round(centroid,2));//Temporal centroid of annual discharge (Julian day, not water-year day)
+        additionalSummary[2] = String.valueOf(doubleMath.round(doubleMath.max(allData), 3));//Maximum, overall
+        additionalSummary[3] = String.valueOf(doubleMath.round(doubleMath.min(allData), 3));//Minimum, overall
+        additionalSummary[4] = String.valueOf(doubleMath.round(doubleMath.Percentile_function(allData,0.75), 3));//Upper Quartile, overall
+        additionalSummary[5] = String.valueOf(doubleMath.round(doubleMath.Percentile_function(allData,0.25), 3));//Lower Quartile, overall
+        additionalSummary[6] = String.valueOf(doubleMath.round(doubleMath.Median(allData), 3));//Median, overall
+        additionalSummary[7] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(allData), 3));//Average, overall
+        additionalSummary[8] = String.valueOf(doubleMath.round(doubleMath.StandardDeviationSample(allData), 3));//Standard Deviation, overall
+        additionalSummary[9] = String.valueOf(doubleMath.round(doubleMath.VarianceSample(allData), 3));//Variance, overall
+        additionalSummary[10] = String.valueOf(doubleMath.round(doubleMath.SkewnessSample(allData), 3));//Skewness, overall
+        additionalSummary[11] = String.valueOf(doubleMath.round(doubleMath.CoefficientOfVariation(allData), 3));//Coefficient of Variation, overall
+        additionalSummary[12] = String.valueOf(doubleMath.round(max_1day,3));//Maximum (1-day)
+        additionalSummary[13] = max_1day_date;//Date of Maximum (1-day)
+        additionalSummary[14] = String.valueOf(doubleMath.round(min_1day,3));//Minimum (1-day)
+        additionalSummary[15] = min_1day_date;//Date of Minimum (1-day)
+        additionalSummary[16] = String.valueOf(doubleMath.round(max_3day,3));//Maximum (3-day)
+        additionalSummary[17] = max_3day_date;//Dates of Maximum (3-day)
+        additionalSummary[18] = String.valueOf(doubleMath.round(min_3day,3));//Minimum (3-day)
+        additionalSummary[19] = min_3day_date;//Dates of Minimum (3-day)
+        additionalSummary[20] = String.valueOf(doubleMath.round(max_7day,3));//Maximum (7-day)
+        additionalSummary[21] = max_7day_date;//Dates of Maximum (7-day)
+        additionalSummary[22] = String.valueOf(doubleMath.round(min_7day,3));//Minimum (7-day)
+        additionalSummary[23] = min_7day_date;//Dates of Minimum (7-day)
+        additionalSummary[24] = String.valueOf(doubleMath.round(min_7day_ave,3));//Minimum (7-day)
+        additionalSummary[25] = String.valueOf(doubleMath.round(max_30day,3));//Maximum (30-day)
+        additionalSummary[26] = max_30day_date;//Dates of Maximum (30-day)
+        additionalSummary[27] = String.valueOf(doubleMath.round(min_30day,3));//Minimum (30-day)
+        additionalSummary[28] = min_30day_date;//Dates of Minimum (30-day)
+        additionalSummary[29] = String.valueOf(doubleMath.round(max_90day,3));//Maximum (90-day)
+        additionalSummary[30] = max_90day_date;//Dates of Maximum (90-day)
+        additionalSummary[31] = String.valueOf(doubleMath.round(min_90day,3));//Minimum (90-day)
+        additionalSummary[32] = min_90day_date;//Dates of Minimum (90-day)
+        additionalSummary[33] = String.valueOf(ctr_zero);//Number of Zero Flow Days
+        additionalSummary[34] = String.valueOf(ctr_reversals);//Number of Flow Reversals
+        additionalSummary[35] = String.valueOf(ctr_rises);//Number of Flow Rises
+        additionalSummary[36] = String.valueOf(ctr_falls);//Number of Flow Falls
+        additionalSummary[37] = String.valueOf(ctr_highPulse);//Number of High Pulses
+        additionalSummary[38] = String.valueOf(doubleMath.round(highLimit,1));//Threshold for High Pulses
+        additionalSummary[39] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(highPulses), 3));//Average Duration of High Pulses
+        additionalSummary[40] = String.valueOf(ctr_lowPulse);//Number of Low Pulses
+        additionalSummary[41] = String.valueOf(doubleMath.round(lowLimit,1));//Threshold for Low Pulses
+        additionalSummary[42] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(lowPulses), 3));//Average Duration of Low Pulses
+        additionalSummary[43] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(diffPositive),3));//Average Positive Difference Between Consecutive Days
+        additionalSummary[44] = String.valueOf(doubleMath.round(doubleMath.meanArithmetic(diffNegative),3));//Average Negative Difference Between Consecutive Days
+        additionalSummary[45] = String.valueOf(doubleMath.round(centroid,2));//Temporal centroid of annual discharge (Julian day, not water-year day)
         
         //Add seasonal stats summary
-        int index = 36;
+        int index = 46;
         resultArray = addSimpleStatsSummary(additionalSummary, seasonal_data, index);
         additionalSummary = (String[]) resultArray[0];
         index = (int) resultArray[1];

src/java/cfa/guiTimeseries_Model.java

@@ -40,7 +40,7 @@
 import org.jfree.data.xy.XYSeriesCollection;
 
 /**
-* Last Updated: 16-December-2014
+* Last Updated: 6-January-2015
 * @author Tyler Wible
 * @since 24-June-2011
 */
@@ -70,6 +70,13 @@
     double highPercentile = 0.75;
     double lowPercentile = 0.25;
     boolean showMonthlyStatsTF = false;
+    boolean calcCDPHElowflowTF = true;
+    String CDPHE_lowFlowType = "all";//"extreme-value";//"biological";//"human-health";//
+    int CDPHE_m = 3;//m-day average                          //only used if CDPHE_lowFlowType == "all" or "extreme-value" or "biological"
+    int CDPHE_R = 10;//R-year return period for cdphe flows  //only used if CDPHE_lowFlowType == "all" or "extreme-value" or "biological"
+    String CDPHE_waterYearBegin = "04-01";//"MM-dd"          //only used if CDPHE_lowFlowType == "all" or "extreme-value"
+    int CDPHE_clusterLength = 120;                           //only used if CDPHE_lowFlowType == "all" or "biological"
+    int CDPHE_clusterCountMax = 5;                           //only used if CDPHE_lowFlowType == "all" or "biological"
     String userData = "";//"Date\tFlow\n1999-04-29\t8.3\n1999-05-09\t60.2\n1999-05-29\t20.1";//
     boolean mergeDatasets = false;//true;// 
     String mergeMethod = "user";//"public";//"max";//"average";//"min";//
@@ -80,6 +87,9 @@
     String end = "?";
     String units = "?";
     String dataSource = "?";
+    double extremeValueDFLOW = -1;
+    double biologicalDFLOW = -1;
+    double humanHealthDFLOW = -1;
     double max = -1;
     double min = -1;
     double upperQuartile = -1;
@@ -205,6 +215,15 @@
     public String getMax(){
         return String.valueOf(max);
     }
+    public String getCDPHE_ExtremeValueDFLOW(){
+        return String.valueOf(extremeValueDFLOW);
+    }
+    public String getCDPHE_BiologicalDFLOW(){
+        return String.valueOf(biologicalDFLOW);
+    }
+    public String getCDPHE_HumanHealthDFLOW(){
+        return String.valueOf(humanHealthDFLOW);
+    }
     public String getMin(){
         return String.valueOf(min);
     }
@@ -417,6 +436,27 @@
     public void setShowMonthlyStatsTF(boolean showMonthlyStatsTF) {
         this.showMonthlyStatsTF = showMonthlyStatsTF;
     }
+    public void setCalcCDPHElowflowTF(boolean calcCDPHElowflowTF) {
+        this.calcCDPHElowflowTF = calcCDPHElowflowTF;
+    }
+    public void setCDPHE_lowFlowType(String CDPHE_lowFlowType) {
+        this.CDPHE_lowFlowType = CDPHE_lowFlowType;
+    }
+    public void setCDPHE_m(int CDPHE_m) {
+        this.CDPHE_m = CDPHE_m;
+    }
+    public void setCDPHE_R(int CDPHE_R) {
+        this.CDPHE_R = CDPHE_R;
+    }
+    public void setCDPHE_waterYearBegin(String CDPHE_waterYearBegin) {
+        this.CDPHE_waterYearBegin = CDPHE_waterYearBegin;
+    }
+    public void setCDPHE_clusterLength(int CDPHE_clusterLength) {
+        this.CDPHE_clusterLength = CDPHE_clusterLength;
+    }
+    public void setCDPHE_clusterCountMax(int CDPHE_clusterCountMax) {
+        this.CDPHE_clusterCountMax = CDPHE_clusterCountMax;
+    }
     public void setUserData(String userData) {
         this.userData = userData;
     }
@@ -1834,6 +1874,29 @@
             //Calculate Hydrologic Indicators of Alteration
             flowStats.calculateAllStatisticsSummaries(mainFolder, stationID, stationName, sortedData_combined, highPercentile, lowPercentile, 0, 0, showMonthlyStatsTF,
                         seasonBegin, seasonEnd, period1Begin, period1End, period2Begin, period2End, period3Begin, period3End);
+            
+            //Calculate CDPHE design low flow
+            if(calcCDPHElowflowTF){
+                CDPHE_lowFlowStats cdphe_lowflow = new CDPHE_lowFlowStats();
+                if(CDPHE_lowFlowType.equalsIgnoreCase("extreme-value")){
+                    //Calculate the Extreme-value based design flow:
+                    this.extremeValueDFLOW = cdphe_lowflow.CDPHE_ExtremeValue(mainFolder, stationID, stationName, sortedData_combined, CDPHE_m, CDPHE_R, CDPHE_waterYearBegin);
+                    
+                }else if(CDPHE_lowFlowType.equalsIgnoreCase("biological")){
+                    //Calculate the Biologically based design flow:
+                    this.biologicalDFLOW = cdphe_lowflow.CDPHE_Biological(mainFolder, stationID, stationName, sortedData_combined, CDPHE_m, CDPHE_R, CDPHE_clusterLength, CDPHE_clusterCountMax);
+                    
+                }else if(CDPHE_lowFlowType.equalsIgnoreCase("human-health")){
+                    //Calculate the Human-health based design flow:
+                    this.humanHealthDFLOW = cdphe_lowflow.CDPHE_HumanHealth(mainFolder, stationID, stationName, sortedData_combined);
+                    
+                }else if(CDPHE_lowFlowType.equalsIgnoreCase("all")){
+                    //Calculate all 3 design flows
+                    this.extremeValueDFLOW = cdphe_lowflow.CDPHE_ExtremeValue(mainFolder, stationID, stationName, sortedData_combined, CDPHE_m, CDPHE_R, CDPHE_waterYearBegin);
+                    this.biologicalDFLOW = cdphe_lowflow.CDPHE_Biological(mainFolder, stationID, stationName, sortedData_combined, CDPHE_m, CDPHE_R, CDPHE_clusterLength, CDPHE_clusterCountMax);
+                    this.humanHealthDFLOW = cdphe_lowflow.CDPHE_HumanHealth(mainFolder, stationID, stationName, sortedData_combined);
+                }
+            }
         }else{
             //Report that there cannot be flow statistics for water quality data
             String[][] errorMessage = {{"Error"," Cannot compute flow statistics as indicators of hydrologic alteration using water quality data."},{"This output is only applicable for flow analysis.",""}};

src/java/m/cfa/Baseflow_V1_0Req.json

@@ -54,7 +54,7 @@
     },
     {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column values in a tab-delimited format. Only used if merge_datasets == 'true' or database == 'UserData'.",
       "value": ""
     },
     {

src/java/m/cfa/Drought_V1_0Req.json

@@ -59,7 +59,7 @@
     },
     {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column values in a tab-delimited format. Only used if merge_datasets == 'true' or database == 'UserData'.",
       "value": ""
     },
     {

src/java/m/cfa/DurationCurve_V1_0Req.json

@@ -116,7 +116,7 @@
     },
     {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column flow values in a tab-delimited format. If analysis type == 'ldc' then this must be followed by a '$$' delimiter then the water quality data with one line header, first column dates (yyyy-mm-dd format), second column water quality test values in a tab-delimited format. Only used if merge_datasets == 'true' or database == 'UserData'.",
       "value": ""
     },
     {

src/java/m/cfa/Flood_V1_0Req.json

@@ -65,7 +65,7 @@
     },
     {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column values in a tab-delimited format. Only used if merge_datasets == 'true' or database == 'UserData'.",
       "value": ""
     },
     {

src/java/m/cfa/LOADEST_V1_0Req.json

@@ -73,7 +73,7 @@
     },
     {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column flow values in a tab-delimited format followed by a '$$' delimiter then the water quality data with one line header, first column dates (yyyy-mm-dd format), second column water quality test values in a tab-delimited format. Only used if merge_datasets == 'true' or database == 'UserData'.",
       "value": ""
     },
     {

src/java/m/cfa/Timeseries15min_V1_0Req.json

@@ -33,7 +33,7 @@
     },
     {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column values in a tab-delimited format. Only used if merge_datasets == 'true' or database == 'UserData'.",
       "value": ""
     },
     {

src/java/m/cfa/Timeseries_V1_0.java

@@ -52,6 +52,13 @@
                 model.setHighPercentile(m.get("highPercentile").getDouble(VALUE));
                 model.setLowPercentile(m.get("lowPercentile").getDouble(VALUE));
                 model.setShowMonthlyStatsTF(m.get("showMonthlyStatsTF").getBoolean(VALUE));
+                model.setCalcCDPHElowflowTF(m.get("calcCDPHElowflowTF").getBoolean(VALUE));
+                model.setCDPHE_lowFlowType(m.get("CDPHE_lowFlowType").getString(VALUE));
+                model.setCDPHE_m(m.get("CDPHE_m").getInt(VALUE));
+                model.setCDPHE_R(m.get("CDPHE_R").getInt(VALUE));
+                model.setCDPHE_waterYearBegin(m.get("CDPHE_waterYearBegin").getString(VALUE));
+                model.setCDPHE_clusterLength(m.get("CDPHE_clusterLength").getInt(VALUE));
+                model.setCDPHE_clusterCountMax(m.get("CDPHE_clusterCountMax").getInt(VALUE));
                 model.setUserData(m.get("user_data").getString(VALUE));
                 model.setMergeDatasets(m.get("merge_datasets").getBoolean(VALUE));
                 model.setMergeMethod(m.get("merge_method").getString(VALUE));
@@ -89,6 +96,10 @@
         result.put(JSONUtils.data("end", model.getEnd()));
         result.put(JSONUtils.data("units", model.getUnits()));
         result.put(JSONUtils.data("data_source", model.getDataSource()));
+        //Get CDPHE low flow results
+        result.put(JSONUtils.data("cdphe_extremeValueDFLOW", model.getCDPHE_ExtremeValueDFLOW()));
+        result.put(JSONUtils.data("cdphe_biologicalDFLOW", model.getCDPHE_BiologicalDFLOW()));
+        result.put(JSONUtils.data("cdphe_humanHealthDFLOW", model.getCDPHE_HumanHealthDFLOW()));
         //Get all data results
         result.put(JSONUtils.data("max", model.getMax()));
         result.put(JSONUtils.data("min", model.getMin()));

src/java/m/cfa/Timeseries_V1_0Req.json

@@ -115,12 +115,51 @@
     },
     {
       "name": "showMonthlyStatsTF",
-      "description": "If true, and the 'wq_test' is flow, then monthly flow statistics will be included in the flow-statistics result file If false, only annual and period of record statistics will be included.",
+      "description": "If true, and wq_test == 'flow', then monthly flow statistics will be included in the flow-statistics result file. If false, only annual and period of record statistics will be included.",
       "value": false
     },
     {
+      "name": "calcCDPHElowflowTF",
+      "description": "If true, and wq_test == 'flow', then design low-flow(s) will be calculated using the proceedure outlined by Colorado Division of Public Health and Environment's (CDPHE) DFLOW program and the inputs provided. If false, no CDPHE desing flow(s) will be calculated.",
+      "value": false
+    },
+    {
+      "name": "CDPHE_lowFlowType",
+      "description": "The type of CDPHE low flow to be calculated, 'all' performs all the low flow calculations (all | extreme-value | biological | human-health). Only used if calcCDPHElowflowTF is True.",
+      "value": "all"
+    },
+    {
+      "name": "CDPHE_m",
+      "description": "The m-day average to be taken (default for 'chronic' water quality criteria is 4 days, default for 'acute' water quality criteria is 1 day). Only used if calcCDPHElowflowTF is True and CDPHE_lowFlowType == 'all' or 'extreme-value' or 'biological'.",
+      "unit": "days",
+      "value": 4
+    },
+    {
+      "name": "CDPHE_R",
+      "description": "The return period of the design low flow to be calcualted. Only used if calcCDPHElowflowTF is True and CDPHE_lowFlowType == 'all' or 'extreme-value' or 'biological'.",
+      "unit": "years",
+      "value": 10
+    },
+    {
+      "name": "CDPHE_waterYearBegin",
+      "description": "Start date of the 'water year' to be analyzed, formatted MM-dd (ex. June 1st would be entered as '06-01'). Only used if calcCDPHElowflowTF is True and CDPHE_lowFlowType == 'all' or 'extreme-value'.",
+      "value": "04-01"
+    },
+    {
+      "name": "CDPHE_clusterLength",
+      "description": "The length of an excursion 'cluster' of multiple low flow excursion periods (default is 120 days). Only used if calcCDPHElowflowTF is True and CDPHE_lowFlowType == 'all' or 'biological'.",
+      "unit": "days",
+      "value": 120
+    },
+    {
+      "name": "CDPHE_clusterCountMax",
+      "description": "The maximum number of low flow excursions counted per excursion cluster (default is 5). Only used if calcCDPHElowflowTF is True and CDPHE_lowFlowType == 'all' or 'biological'.",
+      "unit": "",
+      "value": 5
+    },
+    {
       "name": "user_data",
-      "description": "String containing user uploaded data with one line header, first column dates, second column values in a tab-delimited format.",
+      "description": "String containing user uploaded data with one line header, first column dates (yyyy-mm-dd format), second column values in a tab-delimited format. Only used if merge_datasets is True or database == UserData.",
       "value": ""
     },
     {