guiFlood_Model.java [src/java/cfa] Revision: 4daefd1ac3a5cce6d2af07d219b133db7ce0b7a4  Date: Thu Sep 26 16:17:42 MDT 2013
package cfa;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import org.apache.commons.math.ArgumentOutsideDomainException;

/**
* Last Updated: 26-August-2013
* @author Tyler Wible
* @since 13-June-2012
*/
public class guiFlood_Model {
    String mainFolder = "C:/Projects/TylerWible/CodeDirectories/NetBeans/CSIP/data/CFA";//The output location of the graph
    String organizationName = "USGS";//"Colorado Dept. of Public Health & Environment";//
    String stationID = "07374000";//"16557000";//"11501000";//"000028";//
    String stationName = "Current StationName: " + stationID;
    String analysisType = "B17";//Which method to use for flood analysis (currently only B17 is supported)
    String beginDate = "";
    String endDate = "";
    double gg = 0.23490029573440552;
    double MSERbar = 0.302;
    boolean showLargeFloods = false;
    boolean plotref = true;
    boolean plottype = true;
    String userData = "";//"Date\tFlow\n1999-04-29\t80000.3\n1999-05-09\t60.2\n1999-05-29\t20.1";
    boolean mergeDatasets = false;//true;//
    String mergeMethod = "user";//"public";//"max";//"average";//"min";//
    
    //Outputs
    String len = "-1";
    String start = "?";
    String end = "?";
    
    //Gets
    public File getOutput() {
        return new File(mainFolder, "flood_summary.txt");
    }
    public String getGraph() {
        return "flood_graph.jpg";
    }
    public String getLen() {
        return len;
    }
    public String getStart() {
        return start;
    }
    public String getEnd() {
        return end;
    }
    
    
    //Sets
    public void setMainFolder(String mainFolder) {
        this.mainFolder = mainFolder;
    }
    public void setOrganizationName(String organizationName) {
        this.organizationName = organizationName;
    }
    public void setStationID(String stationID) {
        this.stationID = stationID;
    }
    public void setStationName(String stationName) {
        this.stationName = stationName;
    }
    public void setAnalysisType(String analysisType) {
        this.analysisType = analysisType;
    }
    public void setBeginDate(String beginDate) {
        this.beginDate = beginDate;
    }
    public void setEndDate(String endDate) {
        this.endDate = endDate;
    }
    public void setSkewness(double gg) {
        this.gg = gg;
    }
    public void setMeanSquareError(double MSERbar) {
        this.MSERbar = MSERbar;
    }
    public void setShowLargeFloods(boolean showLargeFloods) {
        this.showLargeFloods = showLargeFloods;
    }
    public void setPlotType(boolean plottype) {
        this.plottype = plottype;
    }
    public void setPlotReference(boolean plotref) {
        this.plotref = plotref;
    }
    public void setUserData(String userData) {
        this.userData = userData;
    }
    public void setMergeDatasets(boolean mergeDatasets) {
        this.mergeDatasets = mergeDatasets;
    }
    public void setMergeMethod(String mergeMethod) {
        this.mergeMethod = mergeMethod;
    }
    /**
     * Converts the string array into a double array.  First it substrings the year from the 
     * first column (the date of the flows) and then only converts the year into a double.  
     * Then converts the flow values into doubles.
     * @param stringData  The ArrayList<String> array containing dates (YYYY-mm-dd) in the first column 
     * and flow values (cfs) in the second column
     * @param beginDate  the user defined begin date
     * @param endDate  the user defined end date
     * @return returns a double[][] array the same size as the provided string array containing 
     * the first column of years and the second column of flow values
     */
    public double[][] convertUSGSpeakData(ArrayList<String> stringData, String beginDate, String endDate){
        String beginYear = beginDate.substring(0,4);
        String endYear = endDate.substring(0,4);
        int ctr = 0;

        System.out.println("Current Peak Data: ");
        for(int i=0; i<stringData.size(); i++){
            System.out.println(stringData.get(i));
            String[] f = stringData.get(i).split("\t");
            String year = f[0].substring(0,4);

            //Only keep flood data years within the user defined date range
            if(year.compareToIgnoreCase(beginYear) >=0 && year.compareToIgnoreCase(endYear) <= 0){
                ctr++;
            }
        }
        if(ctr == 1){
            System.out.println("There is " + ctr + " flood year in the current analysis");
        }else{
            System.out.println("There are " + ctr + " flood years in the current analysis");
        }

        //Initialize the return array
        double[][] doubleData = new double[ctr][2];
        ctr = 0;

        for(int i=0; i<stringData.size(); i++){
            String[] f = stringData.get(i).split("\t");
            String year = f[0].substring(0,4);

            //Only keep flood data years within the user defined date range
            if(year.compareToIgnoreCase(beginYear) >=0 && year.compareToIgnoreCase(endYear) <= 0){
                //convert the strings into doubles
                doubleData[ctr][0] = Double.valueOf(year);//year
                doubleData[ctr][1] = Double.valueOf(f[1]);//flow value
                ctr++;
            }
        }

        return doubleData;
    }
    /**
     * Calculates a unique list of years contained in the flow data from STORET then proceeds to 
     * calculate the annual maximum flow value for each of the unique years and returns the list 
     * of years and flow values as a double array
     * @param stringData  string[][] array containing the output of STORET_unzip_FDC.main which contains
     * dates (YYYY-mm-dd) in the first column and flow values (cfs) in the second column
     * @return returns a double[][] array the same size as the provided string array containing 
     * the first column of years and the second column of flow values
     */
    public double[][] convertSTORETpeakData(String[][] sortableData){

        Arrays.sort(sortableData, new DateComparator());
        //Find a list of unique years for which an annual maximum flow will be calculated later
        int ctr=0;
        for(int i=0; i<(sortableData.length); i++){
            if(i == 0){
                ctr++;
                continue;
            }
            String year1 = sortableData[i-1][0].substring(0,4);
            String year2 = sortableData[i][0].substring(0,4);
            if (!year1.equals(year2)){
                ctr++;
            }
        }
        String[] uniqueYears = new String[ctr];
        ctr=0;
        for(int i=0; i<(sortableData.length); i++){
            if(i==0){
                uniqueYears[ctr] = sortableData[i][0].substring(0,4);
                ctr++;
                continue;
            }
            String year1 = sortableData[i-1][0].substring(0,4);
            String year2 = sortableData[i][0].substring(0,4);
            if (!year1.equals(year2)){
                uniqueYears[ctr] = sortableData[i][0].substring(0,4);
                ctr++;
            }
        }


        //Loop through and find the annual maximum flow value for each unique year
        double[][] peakFlowData = new double[uniqueYears.length][2];
        for(int i=0; i<uniqueYears.length; i++){
            peakFlowData[i][0] = Double.parseDouble(uniqueYears[i]);
            peakFlowData[i][1] = 0;
            for(int j=0; j<sortableData.length; j++){
                String currentYear = sortableData[j][0].substring(0,4);
                if(uniqueYears[i].equals(currentYear)){
                    double flowValue = Double.parseDouble(sortableData[j][1]);
                    if(Double.compare(flowValue, peakFlowData[i][1]) > 0){//If current value larger than "max" change the max to the current value
                        peakFlowData[i][1] = flowValue;
                    }
                }
            }
        }

        return peakFlowData;
    }
    /**
     * Writes out the dynamically created summary table to be displayed to the user along with the flood graph.  
     * This function exits when completed
     * @param dynamicSummary  string[][] array to be written as each line of the text file
     * @param partialpath  the partial folder path of the file to be written
     * @throws IOException
     */
    public void writeSummary(String[][] dynamicSummary, String partialpath) throws IOException{
        String path = partialpath + File.separator + "flood_summary.txt";
        FileWriter writer =  new FileWriter(path, false);
        PrintWriter print_line = new PrintWriter(writer);

        //Output data to text file
        String currentLine = "";
        for(int i=0; i < dynamicSummary.length; i++) {
            for(int j=0; j<dynamicSummary[i].length; j++){
                if(j == 0){
                    currentLine = dynamicSummary[i][j];
                }else{
                    currentLine = currentLine + "\t" + dynamicSummary[i][j];
                }
            }
            print_line.printf("%s" + "%n", 	"$$" + currentLine);//Separate the rows with $$ to make substrings easier later in the interface
            System.out.println(currentLine);
        }
        print_line.close();
        writer.close();
        System.out.println("Text File located at:\t" + path);
    }
    /**
     * Writes out the error message, if any, for finding the file and then exits the program
     * @param error  string to be written as an error message
     * @throws IOException
     */
    public void writeError(String error) throws IOException{
        //Output error
        throw new IOException("Error encountered. Please see the following message for details: \n" + error);
    }
    public void run() throws ArgumentOutsideDomainException, IOException {
        //If no date input, make it the maximum of available data
        if(beginDate == null || beginDate.equalsIgnoreCase("")){
            beginDate = "1900-01-01";
        }
        if(endDate == null || endDate.equalsIgnoreCase("")){
            // Pull current date for upper limit of data search
            DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
            Date currentDate = new Date();
            endDate = new String(desiredDateFormat.format(currentDate));
        }
        System.out.println("inside guiFlood_Model");
        DoubleArray doubleArray = new DoubleArray();
        DurationCurve durationCurve =  new DurationCurve();
        User_Data user_Data = new User_Data();
        USGS_Data usgs_Data = new USGS_Data();
        STORET_Data storet_Data = new STORET_Data();

        //Decide which analysis to perform
        if(analysisType.equalsIgnoreCase("B17")){
            double[][] peakFlowData = new double[0][0];
            if(mergeDatasets){
                //Get the user data to combine to public data for flow analysis
                String[][] sortableData_user = user_Data.readUserFile(userData, beginDate, endDate);
                if(sortableData_user.length==0){
                    String errorMessage = "There is no available uploaded data for station '" + stationID + "' and the specified date range";
                    writeError(errorMessage);
                }
                String[][] sortedData_user = durationCurve.removeDuplicateDates(sortableData_user);
                double[][] peakFlowData_user = convertSTORETpeakData(sortedData_user);
                
                if(organizationName.equalsIgnoreCase("USGS")){
                    System.out.println("calling getUSGSPeakData");
                    //Search for USGS peak flow data
                    ArrayList<String> peakData = usgs_Data.getUSGSPeakData(stationID);
                    peakFlowData = convertUSGSpeakData(peakData, beginDate, endDate);

                }else{
                    //Search for STORET peak flow data
                    System.out.println("calling downloadSTORET");
                    String zip_location = storet_Data.downloadSTORET(mainFolder, organizationName, stationID, "flow", beginDate, endDate);

                    //Unzip results file and extract all flow data
                    String[][] peakData = storet_Data.Unzip_STORETDownloadFiles(zip_location, "flow", true);
                    
                    //Calculate the peak annual values for the complete data record from STORET
                    peakFlowData = convertSTORETpeakData(peakData);
                }
                
                //Merge the two datasets (if user data is empty nothing will be merged)
                peakFlowData = doubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);

                
            }else{
                //Get the data for flood analysis
                if(organizationName.equalsIgnoreCase("USGS")){
                    System.out.println("calling getUSGSPeakData");
                    //Search for USGS peak flow data
                    ArrayList<String> peakData = usgs_Data.getUSGSPeakData(stationID);
                    peakFlowData = convertUSGSpeakData(peakData, beginDate, endDate);

                }else if(organizationName.equalsIgnoreCase("UserData")){
                    //Find the user uploaded data file and uses this for a timeseries graph
                    String[][] flowData = user_Data.readUserFile(userData, beginDate, endDate);

                    //Calculate the peak annual values for the complete data record from the user file 
                    //(Due to the similarity beween the peakData returned by the STORET search and the UserData 
                    //search this STORET function can be used on UserData
                    peakFlowData = convertSTORETpeakData(flowData);

                }else{
                    //Search for STORET peak flow data
                    System.out.println("calling downloadSTORET");
                    String zip_location = storet_Data.downloadSTORET(mainFolder, organizationName, stationID, "flow", beginDate, endDate);

                    //Unzip results file and extract all flow data
                    String[][] peakData = storet_Data.Unzip_STORETDownloadFiles(zip_location, "flow", true);

                    //Calculate the peak annual values for the complete data record from STORET
                    peakFlowData = convertSTORETpeakData(peakData);
                }
            }
            
            //Check if any data exists
            if (peakFlowData.length==0){
                String errorMessage = "There is no available flow data for station '" + stationID + "' and the specified date range. Error: Flood0001";
                writeError(errorMessage);
            }
            
            String graphTitle = stationID + "-" + stationName + " Agency: " + organizationName;
            if(graphTitle.length() > 65){
                graphTitle = stationID + " Agency: " + organizationName;
            }
            
            //Run Bulletin 17 function and return graph
            System.out.println("Calling B17");
            Bulletin17B bulletin17B = new Bulletin17B();
            String[][] dataSummary = bulletin17B.b17(peakFlowData, gg, MSERbar, mainFolder, graphTitle, showLargeFloods, plotref, plottype);
            if(dataSummary[0][0].contains("Error")){
                writeError(dataSummary[0][0]);
            }

            //Append summary of inputs to the data summary
            this.len = String.valueOf(peakFlowData.length);
            this.start = String.valueOf(peakFlowData[0][0]);
            this.end = String.valueOf(peakFlowData[peakFlowData.length - 1][0]);

            //Write out the data summary to be displayed with the graph
            writeSummary(dataSummary, mainFolder);
            
        }else{
            throw new IOException("Error: Flood method is not 'B17'");
        }
    }
    public static void main(String[] args) throws ArgumentOutsideDomainException, IOException{
        guiFlood_Model floodModel = new guiFlood_Model();
        
        //Set Inputs
//        assert args.length > 0;
//        floodModel.setMainFolder(args[0]);    //The output location of the graph
//        floodModel.setFileName(args[1]);      //The name of the output graph and summary text file
//        floodModel.setFilePath2(args[2]);     //The file location of the B17 csv files containing the various tables (pscale, KNtable, etc.)
//        floodModel.setOrganizationName(args[3]);  //Supervising organization of the station (only used for STORET stations)
//        floodModel.setStationID(args[4]);         //The station ID used to retrieve the station's flood data
//        floodModel.setStationName(args[5]);       //The station name used to title the graph
//        floodModel.setAnalysisType(args[6]);      //Which method to use for flood analysis (currently only B17 is supported)
//        floodModel.setBeginDate(args[7]);         //begin date of analysis
//        floodModel.setEndDate(args[8]);           //end date of analysis
//        floodModel.setSkewness(Double.parseDouble(args[9]));          //Generalized regional skew
//        floodModel.setMeanSquareError(Double.parseDouble(args[10]));  //the error value for the generalized regional skew estimation
//        floodModel.setShowLargeFloods(Boolean.parseBoolean(args[11]));//Whether or not to show the five largest floods
//        floodModel.setPlotType(Boolean.parseBoolean(args[12]));       //Whether or not to show the reference lines and flow values for the 2, 5, 10, 25, and 100 year floods
//        floodModel.setPlotReference(Boolean.parseBoolean(args[13]));  //Whether or not the legend is shown inside the graph (true = inside, false = outside)
        
        //Run Model
        floodModel.run();

    }
}