guiFlood_Model.java [src/java/m/cfa/flood] Revision: fae2c38c5ab71042ad506de8b970cded23828934  Date: Tue Nov 20 11:38:25 MST 2018
package m.cfa.flood;

import WaterData.WaterData;
import WaterData.WaterDataInterface;
import m.cfa.DoubleArray;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;

/**
* Last Updated: 20-June-2017
* @author Tyler Wible
* @since 13-June-2012
*/
public class guiFlood_Model {
    String directory = "F:/Projects/TylerWible/CodeDirectories/NetBeans/data/CFA";
    String database = "USGS";//"CDWR";//"STORET";//"CDSN";//"UserData";//
    String orgId = "n/a";//"n/a";//"21COL001";//"CITYFTCO_WQX";//"n/a";//
    String stationId = "06764880";//"CLAGRECO";//"000028";//"1EFF";//"n/a";//
    String stationName = "South Platte River at Roscoe, Nebr.";//"Cache La Poudre Near Greeley";//"BIG THOMPSON R NEAR MOUTH";//"n/a";//"n/a";//
    String analysisType = "B17";
    String startDate = "";
    String endDate = "";
    double gg = 0.23490029573440552;
    double MSERbar = 0.302;
    boolean showLargeFloods = false;
    boolean plotref = true;
    boolean plottype = true;
    boolean mergeDatasets = false;//true;//
    String mergeMethod = "user";//"public";//"max";//"average";//"min";//
    String userData  = "";//"Date\tFlow\n2012-04-29\t80000.3\n2013-05-09\t60.2\n2014-05-29\t20.1";
    
    //Outputs
    String len = "-1";
    String start = "?";
    String end = "?";
    String dataSource = "?";
    String skewErrorMessage = "?";
    
    //Gets
    public File getOutput(){ return new File(directory, "flood_summary.txt"); }
    public String getGraph(){ return "flood_graph.jpg"; }
    public String getLen(){ return len; }
    public String getStart(){ return start; }
    public String getEnd(){ return end; }
    public String getDataSource(){ return dataSource; }
    public String getSkewErrorMessage(){ return skewErrorMessage; }
    
    //Sets
    public void setDirectory(String directory_str){ directory = directory_str; }
    public void setDatabase(String database_str){ database = database_str; }
    public void setOrganizationID(String orgId_str){ orgId = orgId_str; }
    public void setStationId(String stationId_str){ stationId = stationId_str; }
    public void setStationName(String stationName_str){ stationName = stationName_str; }
    public void setAnalysisType(String analysisType_str){ analysisType = analysisType_str; }
    public void setStartDate(String startDate_str){ startDate = startDate_str; }
    public void setEndDate(String endDate_str){ endDate = endDate_str; }
    public void setSkewness(double gg_dbl){ gg = gg_dbl; }
    public void setMeanSquareError(double MSERbar_dbl){ MSERbar = MSERbar_dbl; }
    public void setShowLargeFloods(boolean showLargeFloods_TF){ showLargeFloods = showLargeFloods_TF; }
    public void setPlotType(boolean plottype_TF){ plottype = plottype_TF; }
    public void setPlotReference(boolean plotref_TF){ plotref = plotref_TF; }
    public void setMergeDatasets(boolean mergeDatasets_TF){ mergeDatasets = mergeDatasets_TF; }
    public void setMergeMethod(String mergeMethod_str){ mergeMethod = mergeMethod_str; }
    public void setUserData(String userData_str){ userData = userData_str; }
    /**
     * Writes out the dynamically created summary table to be displayed to the user along with the flood graph.  
     * This function exits when completed
     * @param dynamicSummary  string[][] array to be written as each line of the text file
     * @throws IOException
     */
    public void writeSummary(String[][] dynamicSummary) throws IOException{
        String path = directory + File.separator + getOutput().getName();
        FileWriter writer =  new FileWriter(path, false);
        PrintWriter print_line = new PrintWriter(writer);

        //Output data to text file
        String currentLine = "";
        for(int i=0; i < dynamicSummary.length; i++) {
            for(int j=0; j<dynamicSummary[i].length; j++){
                if(j == 0){
                    currentLine = dynamicSummary[i][j];
                }else{
                    currentLine = currentLine + "\t" + dynamicSummary[i][j];
                }
            }
            print_line.printf("%s" + "\r\n", currentLine);//Separate the rows with $$ to make substrings easier later in the interface
        }
        print_line.close();
        writer.close();
        System.out.println("Text File located at:\t" + path);
    }
    /**
     * Writes out the error message, if any, for finding the file and then exits the program
     * @param error  string array to be written as each line of an error message
     * @throws IOException
     */
    public void writeError(ArrayList<String> error) throws IOException{
        //Output data to text file
        String errorContents = error.get(0);
        for(int i=1; i<error.size(); i++){
            errorContents = errorContents + "\n" + error.get(i);
        }
        throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
    }
    public void run(File storetResourceFile) throws IOException, Exception {
        //If no date input, make it the maximum of available data
        if(startDate == null || startDate.equalsIgnoreCase("")){
            startDate = "1850-01-01";
        }
        if(endDate == null || endDate.equalsIgnoreCase("")){
            // Pull current date for upper limit of data search
            DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
            Date currentDate = new Date();
            endDate = desiredDateFormat.format(currentDate);
        }
        
        //Decide which analysis to perform
        if(analysisType.equalsIgnoreCase("B17")){
            //Check if any flow data exists
            WaterDataInterface waterLib = WaterData.getNewWaterDataInterface(database, userData, storetResourceFile);
            double[][] peakFlowData = waterLib.extractFloodData_formatted(directory, orgId, stationId, startDate, endDate);
            
            //Check if merging the datasets is desired, if so get the user data
            double[][] peakFlowData_user = new double[0][0];
            if(mergeDatasets){
                WaterDataInterface waterLibUser = WaterData.getNewWaterDataInterface("UserData", userData, storetResourceFile);
                peakFlowData_user = waterLibUser.extractFloodData_formatted(directory, orgId, stationId, startDate, endDate);
            }
            
            //Merge the two datasets (if user data is empty nothing will be merged)
            double[][] peakFlowData_combined = DoubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);
            if(peakFlowData_combined.length == 0){
                ArrayList<String> errorMessage = new ArrayList<>();
                if(peakFlowData.length == 0){
                    errorMessage.add("There is no available flood data in the " + database + " database for station '" + stationId + "' and the specified date range.");
                    if(database.equalsIgnoreCase("CDWR")){
                        errorMessage.add("The CDWR database is sensitive to the begin date used, try specifying a later begin date");
                    }
                }
                if(peakFlowData_user.length == 0){
                    errorMessage.add("There is no available uploaded flow data for station '" + stationId + "' and the specified date range");
                }
                writeError(errorMessage);
            }
            
            //Run Bulletin 17 function and return graph
            Bulletin17B bulletin17B = new Bulletin17B();
            String[][] dataSummary = bulletin17B.b17(peakFlowData_combined, gg, MSERbar, directory, database, stationId, stationName, showLargeFloods, plotref, plottype);

            //Get today's date for the source reference
            Date currentDate = new Date();
            SimpleDateFormat sourceDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm");
            String today = sourceDateFormat.format(currentDate);
            if(database.equalsIgnoreCase("USGS")){
                dataSource = "Stream flow data retrieved from the U.S. Geological Survey, National Water Information System: Web Interface. http://waterdata.usgs.gov/nwis, accessed: " + today;
            }else if(database.equalsIgnoreCase("CDWR")){
                dataSource = "Stream flow data retrieved from the U.S. Environmental Protection Agency, STORET. http://www.epa.gov/storet/index.html accessed: " + today;
            }else if(database.equalsIgnoreCase("CDWR")){
                dataSource = "Stream flow data retrieved from the Colorado Division of Water Resources, CDWR. http://www.dwr.state.co.us accessed: " + today;
            }else{
                dataSource = "Stream flow data provided by the user. Flow analysis accessed: " + today;
            }
            len = String.valueOf(peakFlowData_combined.length);
            start = String.valueOf(peakFlowData_combined[0][0]);
            end = String.valueOf(peakFlowData_combined[peakFlowData_combined.length - 1][0]);
            skewErrorMessage = bulletin17B.skewErrorMessage;

            //Write out the data summary to be displayed with the graph
            writeSummary(dataSummary);
            
        }else{
            throw new IOException("Error: Flood analysis method specified is not 'B17'");
        }
    }
    public static void main(String[] args) throws IOException, Exception{
        //Run Model
        guiFlood_Model floodModel = new guiFlood_Model();
        floodModel.run(null);
    }
}