guiFlood_Model.java [src/java/cfa] Revision: af6981c8e32f99e7dfe23929483fbaa0c6d867f3  Date: Wed Feb 05 09:43:06 MST 2014
package cfa;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import org.apache.commons.math.ArgumentOutsideDomainException;

/**
* Last Updated: 5-February-2014
* @author Tyler Wible
* @since 13-June-2012
*/
public class guiFlood_Model {
    String mainFolder = "C:/Projects/TylerWible/CodeDirectories/NetBeans/CSIP/data/CFA";//The output location of the graph
    String organizationName = "USGS";//"Colorado Dept. of Public Health & Environment";//
    String stationID = "07374000";//"16557000";//"11501000";//"000028";//
    String stationName = "Current StationName: " + stationID;
    String analysisType = "B17";//Which method to use for flood analysis (currently only B17 is supported)
    String beginDate = "";
    String endDate = "";
    double gg = 0.23490029573440552;
    double MSERbar = 0.302;
    boolean showLargeFloods = false;
    boolean plotref = true;
    boolean plottype = true;
    String userData = "";//"Date\tFlow\n1999-04-29\t80000.3\n1999-05-09\t60.2\n1999-05-29\t20.1";
    boolean mergeDatasets = false;//true;//
    String mergeMethod = "user";//"public";//"max";//"average";//"min";//
    
    //Outputs
    String len = "-1";
    String start = "?";
    String end = "?";
    
    //Gets
    public File getOutput() {
        return new File(mainFolder, "flood_summary.txt");
    }
    public String getGraph() {
        return "flood_graph.jpg";
    }
    public String getLen() {
        return len;
    }
    public String getStart() {
        return start;
    }
    public String getEnd() {
        return end;
    }
    
    
    //Sets
    public void setMainFolder(String mainFolder) {
        this.mainFolder = mainFolder;
    }
    public void setOrganizationName(String organizationName) {
        this.organizationName = organizationName;
    }
    public void setStationID(String stationID) {
        this.stationID = stationID;
    }
    public void setStationName(String stationName) {
        this.stationName = stationName;
    }
    public void setAnalysisType(String analysisType) {
        this.analysisType = analysisType;
    }
    public void setBeginDate(String beginDate) {
        this.beginDate = beginDate;
    }
    public void setEndDate(String endDate) {
        this.endDate = endDate;
    }
    public void setSkewness(double gg) {
        this.gg = gg;
    }
    public void setMeanSquareError(double MSERbar) {
        this.MSERbar = MSERbar;
    }
    public void setShowLargeFloods(boolean showLargeFloods) {
        this.showLargeFloods = showLargeFloods;
    }
    public void setPlotType(boolean plottype) {
        this.plottype = plottype;
    }
    public void setPlotReference(boolean plotref) {
        this.plotref = plotref;
    }
    public void setUserData(String userData) {
        this.userData = userData;
    }
    public void setMergeDatasets(boolean mergeDatasets) {
        this.mergeDatasets = mergeDatasets;
    }
    public void setMergeMethod(String mergeMethod) {
        this.mergeMethod = mergeMethod;
    }
    /**
     * Writes out the dynamically created summary table to be displayed to the user along with the flood graph.  
     * This function exits when completed
     * @param dynamicSummary  string[][] array to be written as each line of the text file
     * @param partialpath  the partial folder path of the file to be written
     * @throws IOException
     */
    public void writeSummary(String[][] dynamicSummary, String partialpath) throws IOException{
        String path = partialpath + File.separator + "flood_summary.txt";
        FileWriter writer =  new FileWriter(path, false);
        PrintWriter print_line = new PrintWriter(writer);

        //Output data to text file
        String currentLine = "";
        for(int i=0; i < dynamicSummary.length; i++) {
            for(int j=0; j<dynamicSummary[i].length; j++){
                if(j == 0){
                    currentLine = dynamicSummary[i][j];
                }else{
                    currentLine = currentLine + "\t" + dynamicSummary[i][j];
                }
            }
            print_line.printf("%s" + "%n", 	"$$" + currentLine);//Separate the rows with $$ to make substrings easier later in the interface
            System.out.println(currentLine);
        }
        print_line.close();
        writer.close();
        System.out.println("Text File located at:\t" + path);
    }
    /**
     * Writes out the error message, if any, for finding the file and then exits the program
     * @param error  string array to be written as each line of an error message
     * @throws IOException
     */
    public void writeError(ArrayList<String> error) throws IOException{
        //Output data to text file
        String errorContents = error.get(0);
        for(int i=1; i<error.size(); i++){
            errorContents = errorContents + "\n" + error.get(i);
        }
        throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
    }
    public void run() throws ArgumentOutsideDomainException, IOException {
        //If no date input, make it the maximum of available data
        if(beginDate == null || beginDate.equalsIgnoreCase("")){
            beginDate = "1900-01-01";
        }
        if(endDate == null || endDate.equalsIgnoreCase("")){
            // Pull current date for upper limit of data search
            DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
            Date currentDate = new Date();
            endDate = desiredDateFormat.format(currentDate);
        }
        
        //Decide which analysis to perform
        if(analysisType.equalsIgnoreCase("B17")){
            //Check if any flow data exists
            Data data = new Data();
            double[][] peakFlowData = data.extractFloodData(mainFolder, organizationName, stationID, beginDate, endDate, userData);
            
            //Check if merging the datasets is desired, if so get the user data
            double[][] peakFlowData_user = new double[0][0];
            if(mergeDatasets){
                //Get the user data to combine to public data for flow analysis]
                User_Data user_Data = new User_Data();
                String[][] sortableData_user = user_Data.readUserFile(userData, beginDate, endDate);
                
                //Removed duplicate dates convert the timeseries into an annual peak series
                DoubleArray doubleArray = new DoubleArray();
                String[][] sortedData_user = doubleArray.removeDuplicateDates(sortableData_user);
                peakFlowData_user = doubleArray.convertSTORETpeakData(sortedData_user);
            }
            
            //Merge the two datasets (if user data is empty nothing will be merged)
            DoubleArray doubleArray = new DoubleArray();
            double[][] peakFlowData_combined = doubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);
            if(peakFlowData_combined.length == 0){
                ArrayList<String> errorMessage = new ArrayList<>();
                if(peakFlowData.length == 0){
                    String database = "USGS";
                    if(!organizationName.equals("USGS")){
                        database = "STORET";
                    }
                    errorMessage.add("There is no available flood data in the " + database + "database for station '" + stationID + "' and the specified date range.");
                }
                if(peakFlowData_user.length == 0){
                    errorMessage.add("There is no available uploaded flow data for station '" + stationID + "' and the specified date range");
                }
                writeError(errorMessage);
            }
            
            String graphTitle = stationID + "-" + stationName + " Agency: " + organizationName;
            if(graphTitle.length() > 65){
                graphTitle = stationID + " Agency: " + organizationName;
            }
            
            //Run Bulletin 17 function and return graph
            Bulletin17B bulletin17B = new Bulletin17B();
            String[][] dataSummary = bulletin17B.b17(peakFlowData_combined, gg, MSERbar, mainFolder, graphTitle, showLargeFloods, plotref, plottype);
            if(dataSummary[0][0].contains("Error")){
                ArrayList<String> errorMessage = new ArrayList<>();
                errorMessage.add(dataSummary[0][0]);
                writeError(errorMessage);
            }

            //Append summary of inputs to the data summary
            this.len = String.valueOf(peakFlowData.length);
            this.start = String.valueOf(peakFlowData[0][0]);
            this.end = String.valueOf(peakFlowData[peakFlowData.length - 1][0]);

            //Write out the data summary to be displayed with the graph
            writeSummary(dataSummary, mainFolder);
            
        }else{
            throw new IOException("Error: Flood analysis method specified is not 'B17'");
        }
    }
    public static void main(String[] args) throws ArgumentOutsideDomainException, IOException{
        guiFlood_Model floodModel = new guiFlood_Model();
        
        //Set Inputs
//        assert args.length > 0;
//        floodModel.setMainFolder(args[0]);    //The output location of the graph
//        floodModel.setFileName(args[1]);      //The name of the output graph and summary text file
//        floodModel.setFilePath2(args[2]);     //The file location of the B17 csv files containing the various tables (pscale, KNtable, etc.)
//        floodModel.setOrganizationName(args[3]);  //Supervising organization of the station (only used for STORET stations)
//        floodModel.setStationID(args[4]);         //The station ID used to retrieve the station's flood data
//        floodModel.setStationName(args[5]);       //The station name used to title the graph
//        floodModel.setAnalysisType(args[6]);      //Which method to use for flood analysis (currently only B17 is supported)
//        floodModel.setBeginDate(args[7]);         //begin date of analysis
//        floodModel.setEndDate(args[8]);           //end date of analysis
//        floodModel.setSkewness(Double.parseDouble(args[9]));          //Generalized regional skew
//        floodModel.setMeanSquareError(Double.parseDouble(args[10]));  //the error value for the generalized regional skew estimation
//        floodModel.setShowLargeFloods(Boolean.parseBoolean(args[11]));//Whether or not to show the five largest floods
//        floodModel.setPlotType(Boolean.parseBoolean(args[12]));       //Whether or not to show the reference lines and flow values for the 2, 5, 10, 25, and 100 year floods
//        floodModel.setPlotReference(Boolean.parseBoolean(args[13]));  //Whether or not the legend is shown inside the graph (true = inside, false = outside)
        
        //Run Model
        floodModel.run();

    }
}