guiFlood_Model.java [src/java/cfa] Revision: 0541b55839a0a37eb36186d5fa85e782d5e5b42d  Date: Thu Jan 15 09:23:29 MST 2015
package cfa;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import org.apache.commons.math.ArgumentOutsideDomainException;

/**
* Last Updated: 15-January-2014
* @author Tyler Wible
* @since 13-June-2012
*/
public class guiFlood_Model {
    String mainFolder = "C:/Projects/TylerWible/CodeDirectories/NetBeans/CSIP/data/CFA";//The output location of the graph
    String database = "USGS";//"CDWR";//"STORET";//"UserData";//
    String organizationName = "USGS";//"Co. Division of Water Resources";//"Colorado Dept. of Public Health & Environment";//
    String stationID = "06764880";//"16557000";//"11501000";//"CLAGRECO";//"000028";//
    String stationName = "South Platte River at Roscoe, Nebr.";//"Cache La Poudre Near Greeley";//"BIG THOMPSON R NEAR MOUTH";//
    String analysisType = "B17";//Which method to use for flood analysis (currently only B17 is supported)
    String beginDate = "";
    String endDate = "";
    double gg = 0.23490029573440552;
    double MSERbar = 0.302;
    boolean showLargeFloods = false;
    boolean plotref = true;
    boolean plottype = true;
    String userData  = "";//"Date\tFlow\n2012-04-29\t80000.3\n2013-05-09\t60.2\n2014-05-29\t20.1";
    boolean mergeDatasets = false;//true;//
    String mergeMethod = "user";//"public";//"max";//"average";//"min";//
    
    //Outputs
    String len = "-1";
    String start = "?";
    String end = "?";
    String dataSource = "?";
    
    //Gets
    public File getOutput() {
        return new File(mainFolder, "flood_summary.txt");
    }
    public String getGraph() {
        return "flood_graph.jpg";
    }
    public String getLen() {
        return len;
    }
    public String getStart() {
        return start;
    }
    public String getEnd() {
        return end;
    }
    public String getDataSource(){
        return dataSource;
    }
    
    
    //Sets
    public void setMainFolder(String mainFolder) {
        this.mainFolder = mainFolder;
    }
    public void setDatabase(String database) {
        this.database = database;
    }
    public void setOrganizationName(String organizationName) {
        this.organizationName = organizationName;
    }
    public void setStationID(String stationID) {
        this.stationID = stationID;
    }
    public void setStationName(String stationName) {
        this.stationName = stationName;
    }
    public void setAnalysisType(String analysisType) {
        this.analysisType = analysisType;
    }
    public void setBeginDate(String beginDate) {
        this.beginDate = beginDate;
    }
    public void setEndDate(String endDate) {
        this.endDate = endDate;
    }
    public void setSkewness(double gg) {
        this.gg = gg;
    }
    public void setMeanSquareError(double MSERbar) {
        this.MSERbar = MSERbar;
    }
    public void setShowLargeFloods(boolean showLargeFloods) {
        this.showLargeFloods = showLargeFloods;
    }
    public void setPlotType(boolean plottype) {
        this.plottype = plottype;
    }
    public void setPlotReference(boolean plotref) {
        this.plotref = plotref;
    }
    public void setUserData(String userData) {
        this.userData = userData;
    }
    public void setMergeDatasets(boolean mergeDatasets) {
        this.mergeDatasets = mergeDatasets;
    }
    public void setMergeMethod(String mergeMethod) {
        this.mergeMethod = mergeMethod;
    }
    /**
     * Writes out the dynamically created summary table to be displayed to the user along with the flood graph.  
     * This function exits when completed
     * @param dynamicSummary  string[][] array to be written as each line of the text file
     * @throws IOException
     */
    public void writeSummary(String[][] dynamicSummary) throws IOException{
        String path = mainFolder + File.separator + getOutput().getName();
        FileWriter writer =  new FileWriter(path, false);
        PrintWriter print_line = new PrintWriter(writer);

        //Output data to text file
        String currentLine = "";
        for(int i=0; i < dynamicSummary.length; i++) {
            for(int j=0; j<dynamicSummary[i].length; j++){
                if(j == 0){
                    currentLine = dynamicSummary[i][j];
                }else{
                    currentLine = currentLine + "\t" + dynamicSummary[i][j];
                }
            }
            print_line.printf("%s" + "\r\n", currentLine);//Separate the rows with $$ to make substrings easier later in the interface
            System.out.println(currentLine);
        }
        print_line.close();
        writer.close();
        System.out.println("Text File located at:\t" + path);
    }
    /**
     * Writes out the error message, if any, for finding the file and then exits the program
     * @param error  string array to be written as each line of an error message
     * @throws IOException
     */
    public void writeError(ArrayList<String> error) throws IOException{
        //Output data to text file
        String errorContents = error.get(0);
        for(int i=1; i<error.size(); i++){
            errorContents = errorContents + "\n" + error.get(i);
        }
        throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
    }
    public void run() throws ArgumentOutsideDomainException, IOException, Exception {
        //If no date input, make it the maximum of available data
        if(beginDate == null || beginDate.equalsIgnoreCase("")){
            beginDate = "1850-01-01";
        }
        if(endDate == null || endDate.equalsIgnoreCase("")){
            // Pull current date for upper limit of data search
            DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
            Date currentDate = new Date();
            endDate = desiredDateFormat.format(currentDate);
        }
        
        //Decide which analysis to perform
        if(analysisType.equalsIgnoreCase("B17")){
            //Check if any flow data exists
            Data data = new Data();
            double[][] peakFlowData = data.extractFloodData(mainFolder, database, organizationName, stationID, beginDate, endDate, userData);
            
            //Check if merging the datasets is desired, if so get the user data
            double[][] peakFlowData_user = new double[0][0];
            if(mergeDatasets){
                peakFlowData_user = data.extractFloodData(mainFolder, "UserData", "", "", beginDate, endDate, userData);
            }
            
            //Merge the two datasets (if user data is empty nothing will be merged)
            DoubleArray doubleArray = new DoubleArray();
            double[][] peakFlowData_combined = doubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);
            if(peakFlowData_combined.length == 0){
                ArrayList<String> errorMessage = new ArrayList<String>();
                if(peakFlowData.length == 0){
                    errorMessage.add("There is no available flood data in the " + database + " database for station '" + stationID + "' and the specified date range.");
                    if(database.equalsIgnoreCase("CDWR")){
                        errorMessage.add("The CDWR database is sensitive to the begin date used, try specifying a later begin date");
                    }
                }
                if(peakFlowData_user.length == 0){
                    errorMessage.add("There is no available uploaded flow data for station '" + stationID + "' and the specified date range");
                }
                writeError(errorMessage);
            }
            
            //Run Bulletin 17 function and return graph
            Bulletin17B bulletin17B = new Bulletin17B();
            String[][] dataSummary = bulletin17B.b17(peakFlowData_combined, gg, MSERbar, mainFolder, database, stationID, stationName, showLargeFloods, plotref, plottype);

            //Get today's date for the source reference
            Date currentDate = new Date();
            SimpleDateFormat sourceDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
            String today = sourceDateFormat.format(currentDate);
            if(database.equalsIgnoreCase("USGS")){
                this.dataSource = "Stream flow data retrieved from the U.S. Geological Survey, National Water Information System: Web Interface. http://waterdata.usgs.gov/nwis, accessed: " + today;
            }else if(database.equalsIgnoreCase("CDWR")){
                this.dataSource = "Stream flow data retrieved from the U.S. Environmental Protection Agency, STORET. http://www.epa.gov/storet/index.html accessed: " + today;
            }else if(database.equalsIgnoreCase("CDWR")){
                this.dataSource = "Stream flow data retrieved from the Colorado Division of Water Resources, CDWR. http://www.dwr.state.co.us accessed: " + today;
            }
            this.len = String.valueOf(peakFlowData_combined.length);
            this.start = String.valueOf(peakFlowData_combined[0][0]);
            this.end = String.valueOf(peakFlowData_combined[peakFlowData_combined.length - 1][0]);

            //Write out the data summary to be displayed with the graph
            writeSummary(dataSummary);
            
        }else{
            throw new IOException("Error: Flood analysis method specified is not 'B17'");
        }
    }
    public static void main(String[] args) throws ArgumentOutsideDomainException, IOException, Exception{
        guiFlood_Model floodModel = new guiFlood_Model();
        
        //Set Inputs
//        assert args.length > 0;
//        floodModel.setMainFolder(args[0]);    //The output location of the graph
//        floodModel.setFileName(args[1]);      //The name of the output graph and summary text file
//        floodModel.setFilePath2(args[2]);     //The file location of the B17 csv files containing the various tables (pscale, KNtable, etc.)
//        floodModel.setOrganizationName(args[3]);  //Supervising organization of the station (only used for STORET stations)
//        floodModel.setStationID(args[4]);         //The station ID used to retrieve the station's flood data
//        floodModel.setStationName(args[5]);       //The station name used to title the graph
//        floodModel.setAnalysisType(args[6]);      //Which method to use for flood analysis (currently only B17 is supported)
//        floodModel.setBeginDate(args[7]);         //begin date of analysis
//        floodModel.setEndDate(args[8]);           //end date of analysis
//        floodModel.setSkewness(Double.parseDouble(args[9]));          //Generalized regional skew
//        floodModel.setMeanSquareError(Double.parseDouble(args[10]));  //the error value for the generalized regional skew estimation
//        floodModel.setShowLargeFloods(Boolean.parseBoolean(args[11]));//Whether or not to show the five largest floods
//        floodModel.setPlotType(Boolean.parseBoolean(args[12]));       //Whether or not to show the reference lines and flow values for the 2, 5, 10, 25, and 100 year floods
//        floodModel.setPlotReference(Boolean.parseBoolean(args[13]));  //Whether or not the legend is shown inside the graph (true = inside, false = outside)
        
        //Run Model
        floodModel.run();

    }
}