guiFlood_Model.java [src/java/m/cfa/flood] Revision:   Date:
package m.cfa.flood;

import WaterData.WaterData;
import WaterData.WaterDataInterface;
import csip.api.server.Executable;
import m.cfa.DoubleArray;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Scanner;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;

/**
 * Last Updated: 9-April-2019
 *
 * @author Tyler Wible
 * @since 13-June-2012
 */
public class guiFlood_Model {

    String directory = "C:/Projects/TylerWible_repos/NetBeans/data/CFA";
    String database = "USGS";//"CDWR";//"STORET";//"CDSN";//"UserData";//
    String orgId = "n/a";//"n/a";//"21COL001";//"CITYFTCO_WQX";//"n/a";//
    String stationId = "06764880";//"CLAGRECO";//"000028";//"1EFF";//"n/a";//
    String stationName = "South Platte River at Roscoe, Nebr.";//"Cache La Poudre Near Greeley";//"BIG THOMPSON R NEAR MOUTH";//"n/a";//"n/a";//
    String analysisType = "B17";
    String startDate = "";
    String endDate = "";
    double skewness = 0.23490029573440552;
    double meanSquareError = 0.302;
    boolean showLargeFloods = false;
    boolean plotref = true;
    boolean plottype = true;
    boolean mergeDatasets = false;//true;//
    String mergeMethod = "user";//"public";//"max";//"average";//"min";//
    String userData = "";//"Date\tFlood\n2012-04-29\t80000.3\n2013-05-09\t60.2\n2014-05-29\t20.1\n2015-04-29\t80000.3\n2016-05-09\t60.2\n2017-05-29\t20.1\n2018-04-29\t80000.3\n2019-05-09\t60.2\n2020-05-29\t20.1\n2021-04-29\t80000.3\n2022-05-09\t60.2\n2023-05-29\t20.1";
    Executable c17Exe = null;

    //Outputs
    String len = "-1";
    String start = "?";
    String end = "?";
    String dataSource = "?";
    String skewErrorMessage = "?";
    double stationSkew = Double.NaN;
    double weightedGenSkew = Double.NaN;
    JSONArray lowOutliersStationSkew = new JSONArray();
    JSONArray highOutliersStationSkew = new JSONArray();
    JSONArray lowOutliersWeightedGenSkew = new JSONArray();
    JSONArray highOutliersWeightedGenSkew = new JSONArray();
    private String atSiteSkewOption = "";
    private double highSystematicThreshold = Double.NaN;
    private String lowOutlierMethod = "";
    private double lowOutlierThreshold = Double.NaN;
    JSONArray flowIntervals = new JSONArray();
    JSONArray perceptionThresholds = new JSONArray();
    public Bulletin17CResults stationResult;
    public Bulletin17CResults weightedResult;

    //Gets
    public File getOutputWeightedGenSkew() {
        return new File( directory, "flood_summary.txt" );
    }

    public File getOutputStationSkew() {
        return new File( directory, "flood_summary_stationskew.txt" );
    }

    public String getGraph() {
        return "flood_graph.jpg";
    }

    public String getLen() {
        return len;
    }

    public String getStart() {
        return start;
    }

    public String getEnd() {
        return end;
    }

    public String getDataSource() {
        return dataSource;
    }

    public String getSkewErrorMessage() {
        return skewErrorMessage;
    }

    public String getStationSkew() {
        return String.valueOf( stationSkew );
    }

    public String getWeightedGenSkew() {
        return String.valueOf( weightedGenSkew );
    }

    public JSONArray getLowOutliersStationSkew() {
        return lowOutliersStationSkew;
    }

    public JSONArray getHighOutliersStationSkew() {
        return highOutliersStationSkew;
    }

    public JSONArray getLowOutliersWeightedGenSkew() {
        return lowOutliersWeightedGenSkew;
    }

    public JSONArray getHighOutliersWeightedGenSkew() {
        return highOutliersWeightedGenSkew;
    }

    public String getAtSiteSkewOption() {
        return atSiteSkewOption;
    }

    public double getHighSystematicThreshold() {
        return highSystematicThreshold;
    }

    public String getLowOutlierMethod() {
        return lowOutlierMethod;
    }

    public double getLowOutlierThreshold() {
        return lowOutlierThreshold;
    }

    public Executable getC17Exe() {
        return c17Exe;
    }

    //Sets
    public void setDirectory( String directory_str ) {
        directory = directory_str;
    }

    public void setDatabase( String database_str ) {
        database = database_str;
    }

    public void setOrganizationID( String orgId_str ) {
        orgId = orgId_str;
    }

    public void setStationId( String stationId_str ) {
        stationId = stationId_str;
    }

    public void setStationName( String stationName_str ) {
        stationName = stationName_str;
    }

    public void setAnalysisType( String analysisType_str ) {
        analysisType = analysisType_str;
    }

    public void setStartDate( String startDate_str ) {
        startDate = startDate_str;
    }

    public void setEndDate( String endDate_str ) {
        endDate = endDate_str;
    }

    public void setSkewness( double skewness ) {
        this.skewness = skewness;
    }

    public void setMeanSquareError( double meanSquareError ) {
        this.meanSquareError = meanSquareError;
    }

    public void setShowLargeFloods( boolean showLargeFloods_TF ) {
        showLargeFloods = showLargeFloods_TF;
    }

    public void setPlotType( boolean plottype_TF ) {
        plottype = plottype_TF;
    }

    public void setPlotReference( boolean plotref_TF ) {
        plotref = plotref_TF;
    }

    public void setMergeDatasets( boolean mergeDatasets_TF ) {
        mergeDatasets = mergeDatasets_TF;
    }

    public void setMergeMethod( String mergeMethod_str ) {
        mergeMethod = mergeMethod_str;
    }

    public void setUserData( String userData_str ) {
        userData = userData_str;
    }

    public void setAtSiteSkewOption( String atSiteSkewOption ) {
        this.atSiteSkewOption = atSiteSkewOption;
    }

    public void setHighSystematicThreshold( double highSystematicThreshold ) {
        this.highSystematicThreshold = highSystematicThreshold;
    }

    public void setLowOutlierMethod( String lowOutlierMethod ) {
        this.lowOutlierMethod = lowOutlierMethod;
    }

    public void setLowOutlierThreshold( double lowOutlierThreshold ) {
        this.lowOutlierThreshold = lowOutlierThreshold;
    }
    
    public void setFlowIntervals( JSONArray flowIntervals ) {
        this.flowIntervals = flowIntervals;
    }
    
    public void setPerceptionThresholds( JSONArray perceptionThresholds ) {
        this.perceptionThresholds = perceptionThresholds;
    }

    public void setExecutable( Executable e ) {
        c17Exe = e;
    }

    /**
     * Writes out the dynamically created summary table to be displayed to the
     * user along with the flood graph. This function exits when completed
     *
     * @param dynamicSummary string[][] array to be written as each line of the
     * text file
     * @throws IOException
     */
    public void writeSummary( String[][] dynamicSummary, String resultFileName ) throws IOException {
        String path = directory + File.separator + resultFileName;
        FileWriter writer = new FileWriter( path, false );
        PrintWriter print_line = new PrintWriter( writer );

        //Output data to text file
        String currentLine = "";
        for ( int i = 0; i < dynamicSummary.length; i++ ) {
            for ( int j = 0; j < dynamicSummary[ i ].length; j++ ) {
                if ( j == 0 ) {
                    currentLine = dynamicSummary[ i ][ j ];
                } else {
                    currentLine = currentLine + "\t" + dynamicSummary[ i ][ j ];
                }
            }
            print_line.printf( "%s" + "\r\n", currentLine );//Separate the rows with $$ to make substrings easier later in the interface
        }
        print_line.close();
        writer.close();
        System.out.println( "Text File located at:\t" + path );
    }

    /**
     * Writes out the error message, if any, for finding the file and then exits
     * the program
     *
     * @param error string array to be written as each line of an error message
     * @throws IOException
     */
    public void writeError( ArrayList<String> error ) throws IOException {
        //Output data to text file
        String errorContents = error.get( 0 );
        for ( int i = 1; i < error.size(); i++ ) {
            errorContents = errorContents + "\n" + error.get( i );
        }
        throw new IOException( "Error encountered. Please see the following message for details: \n" + errorContents );
    }

    /**
     * Writes out the inputs files required for the peakfqsa.exe software
     *
     * @param peakFlows string[][] array of peak flow data to be written as each
     * line of the text file
     * @throws IOException
     */
    private void writeC17inputFiles( double[][] peakFlows ) throws IOException, JSONException {
        //Fix Start and End year to match the USGS data file.
        String startYear = new DecimalFormat( "#" ).format( peakFlows[ 0 ][ 0 ] );
        String endYear = new DecimalFormat( "#" ).format( peakFlows[ peakFlows.length - 1 ][ 0 ] );
        this.setStartDate( startYear + "-01-01" ); //for simplicity, month and day are extracted later anyway.
        this.setEndDate( endYear + "-01-01" );

        //Write input data file for peakfasq.exe
        String path = directory + File.separator + "c17Weighted.spc";
        try ( FileWriter fileWriter = new FileWriter( path, false ); PrintWriter writer = new PrintWriter( fileWriter ) ) {
            //First the model options
            writer.println( "STATION     " + this.stationId + "\n" );
            writer.println( "I     c17Weighted.spc\n" );
            writer.println( "CSV     YES\n" );
            writer.println( "BEGYEAR     " + this.startDate.split( "-" )[ 0 ] + "\n" );
            writer.println( "ENDYEAR     " + this.endDate.split( "-" )[ 0 ] + "\n" );
            writer.println( "GENSKEW     " + String.valueOf( this.skewness ) + "\n" );
            writer.println( "SKEWMSE     " + String.valueOf( this.meanSquareError ) + "\n" );
            writer.println( "PP_ALPHA     0.05\n" );
            writer.println( "A_S_SKEW_OPT     " + this.atSiteSkewOption + "\n" ); //TODO: read in from json params
            if ( this.highSystematicThreshold != Double.NaN ) {
                writer.println( "HISYS     " + this.highSystematicThreshold + "\n" ); //TODO: read in from json params
            }
            if ( !this.lowOutlierMethod.isEmpty() ) {
                writer.println( "LOMETHOD     " + this.lowOutlierMethod + "\n" ); //TODO: read in from json params
            }
            if ( this.lowOutlierMethod.equalsIgnoreCase( "FIXED" ) ) {
                writer.println( "LOTHRESH     " + this.lowOutlierThreshold + "\n" ); //TODO: read in from json params
            }
            writer.println( "SKEWOPT     WEIGHTED\n" ); //TODO: Run model twice, once with weighted, once with station
            if (this.perceptionThresholds.length() > 0) {
                for (int i=0; i<this.perceptionThresholds.length(); i++) {
                    JSONObject singlePerceptionThreshold = (JSONObject) this.perceptionThresholds.get(i);
                    String startStr = (String) singlePerceptionThreshold.get("start_year");
                    String endStr = (String) singlePerceptionThreshold.get("end_year");
                    String lowerStr = (String) singlePerceptionThreshold.get("min");
                    String upperStr = (String) singlePerceptionThreshold.get("max");
                    if (upperStr.equalsIgnoreCase("inf")) {
                        upperStr = "1.00E+010";
                    }
                    writer.println( "THRESHOLD     " + startStr + "     " + endStr + "     " + lowerStr + "     " + upperStr + "\n" ); //Just something to make it run for now.
               }
            } else {
                writer.println( "THRESHOLD     " + startYear + "     " + endYear + "     0     1.00E+010\n" ); //Just something to make it run for now.
            }

            //Then the peak flow values.
            for ( double[] peakFlow : peakFlows ) {
                //Convert values to strings to lop off decimal places.
                DecimalFormat df = new DecimalFormat( "#" );
                String year = df.format( peakFlow[ 0 ] );
                String rate = df.format( peakFlow[ 1 ] );

                boolean simpleValue = true;
                String lowerFlowInterval = null;
                String upperFlowInterval = null;
                if (this.flowIntervals.length() > 0) {
                    for (int i=0; i<this.flowIntervals.length(); i++) {
                        JSONObject singlePerceptionThreshold = (JSONObject) this.flowIntervals.get(i);
                        double waterYear = Double.parseDouble(singlePerceptionThreshold.get("waterYear").toString());
                        lowerFlowInterval = singlePerceptionThreshold.get("Ql").toString();
                        upperFlowInterval = singlePerceptionThreshold.get("Qu").toString();
                        if ( Double.compare(waterYear, peakFlow[0]) == 0 ){
                            double dischargeRate = Double.parseDouble(rate);
                            double lowerFlowIntervalNumber = Double.parseDouble(lowerFlowInterval);
                            double upperFlowIntervalNumber = Double.parseDouble(upperFlowInterval);
                            if ( Double.compare(lowerFlowIntervalNumber, dischargeRate) != 0 ||
                                    Double.compare(upperFlowIntervalNumber, dischargeRate) != 0) {
                                //If the upper and lower flow interval are the same
                                //And the flow intervals match the discharge, then this is a simple one
                                simpleValue = false;
                            }
                            break;
                        }
                    }
                }

                if (simpleValue){
                    //write Gage Discharge (known) values to file
                    writer.println( "Q     " + year + "     " + rate + "\n" );
                } else {
                    //write Flow Intervale values to file
                    writer.println( "QINT     " + year + "     " + lowerFlowInterval + "     " + upperFlowInterval + "\n" );
                }
            }
        }
        System.out.println( "Weighted spc file located at:\t" + path );

        path = directory + File.separator + "c17Station.spc";
        try ( FileWriter fileWriter = new FileWriter( path, false ); PrintWriter writer = new PrintWriter( fileWriter ) ) {
            writer.print( "STATION     " + this.stationId + "\n" );
            writer.print( "I     c17Station.spc\n" );
            writer.print( "CSV     YES\n" );
            writer.print( "BEGYEAR     " + this.startDate.split( "-" )[ 0 ] + "\n" );
            writer.print( "ENDYEAR     " + this.endDate.split( "-" )[ 0 ] + "\n" );
            writer.print( "GENSKEW     " + String.valueOf( this.skewness ) + "\n" );
            writer.print( "SKEWMSE     " + String.valueOf( this.meanSquareError ) + "\n" );
            writer.print( "PP_ALPHA     0.05\n" );
            writer.print( "A_S_SKEW_OPT     " + this.atSiteSkewOption + "\n" ); //TODO: read in from json params
            if ( this.highSystematicThreshold != Double.NaN ) {
                writer.print( "HISYS     " + this.highSystematicThreshold + "\n" ); //TODO: read in from json params
            }
            if ( !this.lowOutlierMethod.isEmpty() ) {
                writer.print( "LOMETHOD     " + this.lowOutlierMethod + "\n" ); //TODO: read in from json params
            }
            if ( this.lowOutlierMethod.equalsIgnoreCase( "FIXED" ) ) {
                writer.print( "LOTHRESH     " + this.lowOutlierThreshold + "\n" ); //TODO: read in from json params
            }
            writer.print( "SKEWOPT     STATION\n" );
            writer.print( "THRESHOLD     " + startYear + "     " + endYear + "     0     1.00E+010\n" ); //Just something to make it run for now.
            //TODO: writer.println("PCPT_TRESH     ");

            for ( double[] peakFlow : peakFlows ) {
                //Convert values to strings to lop off decimal places.
                DecimalFormat df = new DecimalFormat( "#" );
                String year = df.format( peakFlow[ 0 ] );
                String rate = df.format( peakFlow[ 1 ] );

                //write values to file
                writer.print( "Q     " + year + "     " + rate + "\n" );
            }
        }

        //Write cmd file for peakfasq.exe
        path = directory + File.separator + "c17.cmd";
        try ( FileWriter fileWriter = new FileWriter( path, false ); PrintWriter writer = new PrintWriter( fileWriter ) ) {
            writer.print( "c17Weighted.spc\n" );
            writer.print( "c17Station.spc\n" );
        }
    }

    public Bulletin17CResults readC17outputFile( String path , String skewResultType) throws FileNotFoundException {
        Bulletin17CResults finalResult = new Bulletin17CResults();

        boolean foundFreqValues = false;
        boolean foundWYValues = false;
        try ( Scanner scanner = new Scanner( new File( path ) ); ) {
            while ( scanner.hasNext() ) {
                String line = scanner.nextLine().trim();
                if ( line.startsWith( "PP," ) ) {
                    foundFreqValues = true;
                } else if ( line.startsWith( "WY," ) ) {
                    foundWYValues = true;
                } else if ( line.isEmpty() ) {
                    foundFreqValues = false;
                    foundWYValues = false;
                } else if ( line.startsWith( "Moments," ) ) {
                    String[] tokens = line.split( "," );
                    //double mean = Double.parseDouble( tokens[ 3 ] );
                    //double variance = Double.parseDouble( tokens[ 3 ] );
                    double resultSkewness = Double.parseDouble( tokens[ 3 ] );
                    if (skewResultType.equalsIgnoreCase("station")) {
                        stationSkew = resultSkewness;
                    }else if (skewResultType.equalsIgnoreCase("weighted")) {
                        weightedGenSkew = resultSkewness;
                    }
                } else if ( foundFreqValues ) {
                    String[] tokens = line.split( "," );
                    Bulletin17CFreqResult res = new Bulletin17CFreqResult();
                    res.probabilityPct = Double.parseDouble( tokens[ 0 ] );
                    res.returnPeriod = Double.parseDouble( tokens[ 1 ] );
                    res.zScore = Double.parseDouble( tokens[ 2 ] );
                    res.estimatedDischarge = Double.parseDouble( tokens[ 3 ] );
                    res.lowerCI = Double.parseDouble( tokens[ 4 ] );
                    res.upperCI = Double.parseDouble( tokens[ 5 ] );
                    finalResult.addResult( res );

                } else if ( foundWYValues ) {
                    String[] tokens = line.split( "," );
                    BulletinC17WYResult res = new BulletinC17WYResult();
                    res.waterYear = Integer.parseInt( tokens[ 0 ] );
                    res.probability = Double.parseDouble( tokens[ 1 ] );
                    res.zScore = Double.parseDouble( tokens[ 2 ] );
                    res.lowDischargeEstimate = Double.parseDouble( tokens[ 3 ] );
                    res.highDischargeEstimate = Double.parseDouble( tokens[ 4 ] );
                    res.fittedDischarge = Double.parseDouble( tokens[ 5 ] );
                    finalResult.addResult( res );
                }
            }
        }

        return finalResult;
    }

    public void readC17outputFiles() throws FileNotFoundException, IOException {
        String path = directory + File.separator + "c17Station.csv";
        this.stationResult = readC17outputFile( path , "station");

        path = directory + File.separator + "c17Weighted.csv";
        this.weightedResult = readC17outputFile( path , "weighted");
    }

    public void run() throws IOException, Exception {
        //If no date input, make it the maximum of available data
        if ( startDate == null || startDate.equalsIgnoreCase( "" ) ) {
            startDate = "1850-01-01";
        }
        if ( endDate == null || endDate.equalsIgnoreCase( "" ) ) {
            // Pull current date for upper limit of data search
            DateFormat desiredDateFormat = new SimpleDateFormat( "yyyy-MM-dd" );
            Date currentDate = new Date();
            endDate = desiredDateFormat.format( currentDate );
        }

        //Check if any flow data exists
        WaterDataInterface waterLib = WaterData.getNewWaterDataInterface( database, userData );
        double[][] peakFlowData = waterLib.extractFloodData_formatted( directory, orgId, stationId, startDate, endDate );
        dataSource = waterLib.getDataSourceCitation();

        //Check if merging the datasets is desired, if so get the user data
        double[][] peakFlowData_user = new double[ 0 ][ 0 ];
        if ( mergeDatasets ) {
            WaterDataInterface waterLibUser = WaterData.getNewWaterDataInterface( "UserData", userData );
            peakFlowData_user = waterLibUser.extractFloodData_formatted( directory, orgId, stationId, startDate, endDate );
        }

        //Merge the two datasets (if user data is empty nothing will be merged)
        double[][] peakFlowData_combined = DoubleArray.mergeData( peakFlowData, peakFlowData_user, mergeMethod );
        if ( peakFlowData_combined.length == 0 ) {
            ArrayList<String> errorMessage = new ArrayList<>();
            if ( peakFlowData.length == 0 ) {
                errorMessage.add( "There is no available flood data in the " + database + " database for station '" + stationId + "' and the specified date range." );
                if ( database.equalsIgnoreCase( "CDWR" ) ) {
                    errorMessage.add( "The CDWR database is sensitive to the begin date used, try specifying a later begin date" );
                }
            }
            if ( peakFlowData_user.length == 0 ) {
                errorMessage.add( "There is no available uploaded flow data for station '" + stationId + "' and the specified date range" );
            }
            writeError( errorMessage );
        }

        //Decide which analysis to perform
        if ( analysisType.equalsIgnoreCase( "b17" ) ) {

            //Run Bulletin 17 function and return graph
            Bulletin17B bulletin17B = new Bulletin17B();
            Object[] returnArrayStationSkew = bulletin17B.b17( peakFlowData_combined, Double.NaN, meanSquareError, directory, database, stationId, stationName, showLargeFloods, plotref, plottype );
            String[][] dataSummaryStationSkew = (String[][]) returnArrayStationSkew[ 0 ];
            stationSkew = (double) returnArrayStationSkew[ 1 ];
            lowOutliersStationSkew = (JSONArray) returnArrayStationSkew[ 2 ];
            highOutliersStationSkew = (JSONArray) returnArrayStationSkew[ 3 ];
            Object[] returnArrayWeightedGenSkew = bulletin17B.b17( peakFlowData_combined, skewness, meanSquareError, directory, database, stationId, stationName, showLargeFloods, plotref, plottype );
            String[][] dataSummaryWeightedGenSkew = (String[][]) returnArrayWeightedGenSkew[ 0 ];
            weightedGenSkew = (double) returnArrayWeightedGenSkew[ 1 ];
            lowOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[ 2 ];
            highOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[ 3 ];
            len = String.valueOf( peakFlowData_combined.length );
            start = String.valueOf( peakFlowData_combined[ 0 ][ 0 ] );
            end = String.valueOf( peakFlowData_combined[ peakFlowData_combined.length - 1 ][ 0 ] );
            skewErrorMessage = bulletin17B.skewErrorMessage;

            //Write out the data summary to be displayed with the graph
            writeSummary( dataSummaryWeightedGenSkew, getOutputWeightedGenSkew().getName() );
            writeSummary( dataSummaryStationSkew, getOutputStationSkew().getName() );

        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {

            writeC17inputFiles( peakFlowData_combined );
            File inputFile = new File( directory + File.separator + "c17.cmd" );
            c17Exe.addArguments( inputFile.getAbsolutePath() );
            c17Exe.exec();

            readC17outputFiles();

        } else {
            throw new IOException( "Error: Flood analysis method specified is not 'B17' or 'C17'" );
        }
    }

    public static void main( String[] args ) throws IOException, Exception {
        //Run Model
        guiFlood_Model floodModel = new guiFlood_Model();
        floodModel.run();
    }
}