guiFlood_Model.java [src/java/m/cfa/flood] Revision: dc6ae1f64daf0fe7cbaba1d0e799a57738d47cfb Date: Thu Feb 29 11:40:24 MST 2024
package m.cfa.flood;
import WaterData.WaterData;
import WaterData.WaterDataInterface;
import m.cfa.DoubleArray;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import org.codehaus.jettison.json.JSONArray;
/**
* Last Updated: 9-April-2019
* @author Tyler Wible
* @since 13-June-2012
*/
public class guiFlood_Model {
String directory = "C:/Projects/TylerWible_repos/NetBeans/data/CFA";
String database = "USGS";//"CDWR";//"STORET";//"CDSN";//"UserData";//
String orgId = "n/a";//"n/a";//"21COL001";//"CITYFTCO_WQX";//"n/a";//
String stationId = "06764880";//"CLAGRECO";//"000028";//"1EFF";//"n/a";//
String stationName = "South Platte River at Roscoe, Nebr.";//"Cache La Poudre Near Greeley";//"BIG THOMPSON R NEAR MOUTH";//"n/a";//"n/a";//
String analysisType = "B17";
String startDate = "";
String endDate = "";
double gg = 0.23490029573440552;
double MSERbar = 0.302;
boolean showLargeFloods = false;
boolean plotref = true;
boolean plottype = true;
boolean mergeDatasets = false;//true;//
String mergeMethod = "user";//"public";//"max";//"average";//"min";//
String userData = "";//"Date\tFlood\n2012-04-29\t80000.3\n2013-05-09\t60.2\n2014-05-29\t20.1\n2015-04-29\t80000.3\n2016-05-09\t60.2\n2017-05-29\t20.1\n2018-04-29\t80000.3\n2019-05-09\t60.2\n2020-05-29\t20.1\n2021-04-29\t80000.3\n2022-05-09\t60.2\n2023-05-29\t20.1";
//Outputs
String len = "-1";
String start = "?";
String end = "?";
String dataSource = "?";
String skewErrorMessage = "?";
double stationSkew = Double.NaN;
double weightedGenSkew = Double.NaN;
JSONArray lowOutliersStationSkew = new JSONArray();
JSONArray highOutliersStationSkew = new JSONArray();
JSONArray lowOutliersWeightedGenSkew = new JSONArray();
JSONArray highOutliersWeightedGenSkew = new JSONArray();
//Gets
public File getOutputWeightedGenSkew(){ return new File(directory, "flood_summary.txt"); }
public File getOutputStationSkew(){ return new File(directory, "flood_summary_stationskew.txt"); }
public String getGraph(){ return "flood_graph.jpg"; }
public String getLen(){ return len; }
public String getStart(){ return start; }
public String getEnd(){ return end; }
public String getDataSource(){ return dataSource; }
public String getSkewErrorMessage(){ return skewErrorMessage; }
public String getStationSkew(){ return String.valueOf(stationSkew); }
public String getWeightedGenSkew(){ return String.valueOf(weightedGenSkew); }
public JSONArray getLowOutliersStationSkew(){ return lowOutliersStationSkew; }
public JSONArray getHighOutliersStationSkew(){ return highOutliersStationSkew; }
public JSONArray getLowOutliersWeightedGenSkew(){ return lowOutliersWeightedGenSkew; }
public JSONArray getHighOutliersWeightedGenSkew(){ return highOutliersWeightedGenSkew; }
//Sets
public void setDirectory(String directory_str){ directory = directory_str; }
public void setDatabase(String database_str){ database = database_str; }
public void setOrganizationID(String orgId_str){ orgId = orgId_str; }
public void setStationId(String stationId_str){ stationId = stationId_str; }
public void setStationName(String stationName_str){ stationName = stationName_str; }
public void setAnalysisType(String analysisType_str){ analysisType = analysisType_str; }
public void setStartDate(String startDate_str){ startDate = startDate_str; }
public void setEndDate(String endDate_str){ endDate = endDate_str; }
public void setSkewness(double gg_dbl){ gg = gg_dbl; }
public void setMeanSquareError(double MSERbar_dbl){ MSERbar = MSERbar_dbl; }
public void setShowLargeFloods(boolean showLargeFloods_TF){ showLargeFloods = showLargeFloods_TF; }
public void setPlotType(boolean plottype_TF){ plottype = plottype_TF; }
public void setPlotReference(boolean plotref_TF){ plotref = plotref_TF; }
public void setMergeDatasets(boolean mergeDatasets_TF){ mergeDatasets = mergeDatasets_TF; }
public void setMergeMethod(String mergeMethod_str){ mergeMethod = mergeMethod_str; }
public void setUserData(String userData_str){ userData = userData_str; }
/**
* Writes out the dynamically created summary table to be displayed to the user along with the flood graph.
* This function exits when completed
* @param dynamicSummary string[][] array to be written as each line of the text file
* @throws IOException
*/
public void writeSummary(String[][] dynamicSummary, String resultFileName) throws IOException{
String path = directory + File.separator + resultFileName;
FileWriter writer = new FileWriter(path, false);
PrintWriter print_line = new PrintWriter(writer);
//Output data to text file
String currentLine = "";
for(int i=0; i < dynamicSummary.length; i++) {
for(int j=0; j<dynamicSummary[i].length; j++){
if(j == 0){
currentLine = dynamicSummary[i][j];
}else{
currentLine = currentLine + "\t" + dynamicSummary[i][j];
}
}
print_line.printf("%s" + "\r\n", currentLine);//Separate the rows with $$ to make substrings easier later in the interface
}
print_line.close();
writer.close();
System.out.println("Text File located at:\t" + path);
}
/**
* Writes out the error message, if any, for finding the file and then exits the program
* @param error string array to be written as each line of an error message
* @throws IOException
*/
public void writeError(ArrayList<String> error) throws IOException{
//Output data to text file
String errorContents = error.get(0);
for(int i=1; i<error.size(); i++){
errorContents = errorContents + "\n" + error.get(i);
}
throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
}
public void run() throws IOException, Exception {
//If no date input, make it the maximum of available data
if(startDate == null || startDate.equalsIgnoreCase("")){
startDate = "1850-01-01";
}
if(endDate == null || endDate.equalsIgnoreCase("")){
// Pull current date for upper limit of data search
DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date currentDate = new Date();
endDate = desiredDateFormat.format(currentDate);
}
//Decide which analysis to perform
if(analysisType.equalsIgnoreCase("B17")){
//Check if any flow data exists
WaterDataInterface waterLib = WaterData.getNewWaterDataInterface(database, userData);
double[][] peakFlowData = waterLib.extractFloodData_formatted(directory, orgId, stationId, startDate, endDate);
dataSource = waterLib.getDataSourceCitation();
//Check if merging the datasets is desired, if so get the user data
double[][] peakFlowData_user = new double[0][0];
if(mergeDatasets){
WaterDataInterface waterLibUser = WaterData.getNewWaterDataInterface("UserData", userData);
peakFlowData_user = waterLibUser.extractFloodData_formatted(directory, orgId, stationId, startDate, endDate);
}
//Merge the two datasets (if user data is empty nothing will be merged)
double[][] peakFlowData_combined = DoubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);
if(peakFlowData_combined.length == 0){
ArrayList<String> errorMessage = new ArrayList<>();
if(peakFlowData.length == 0){
errorMessage.add("There is no available flood data in the " + database + " database for station '" + stationId + "' and the specified date range.");
if(database.equalsIgnoreCase("CDWR")){
errorMessage.add("The CDWR database is sensitive to the begin date used, try specifying a later begin date");
}
}
if(peakFlowData_user.length == 0){
errorMessage.add("There is no available uploaded flow data for station '" + stationId + "' and the specified date range");
}
writeError(errorMessage);
}
//Run Bulletin 17 function and return graph
Bulletin17B bulletin17B = new Bulletin17B();
Object[] returnArrayStationSkew = bulletin17B.b17(peakFlowData_combined, Double.NaN, MSERbar, directory, database, stationId, stationName, showLargeFloods, plotref, plottype);
String[][] dataSummaryStationSkew = (String[][]) returnArrayStationSkew[0];
stationSkew = (double) returnArrayStationSkew[1];
lowOutliersStationSkew = (JSONArray) returnArrayStationSkew[2];
highOutliersStationSkew = (JSONArray) returnArrayStationSkew[3];
Object[] returnArrayWeightedGenSkew = bulletin17B.b17(peakFlowData_combined, gg, MSERbar, directory, database, stationId, stationName, showLargeFloods, plotref, plottype);
String[][] dataSummaryWeightedGenSkew = (String[][]) returnArrayWeightedGenSkew[0];
weightedGenSkew = (double) returnArrayWeightedGenSkew[1];
lowOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[2];
highOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[3];
len = String.valueOf(peakFlowData_combined.length);
start = String.valueOf(peakFlowData_combined[0][0]);
end = String.valueOf(peakFlowData_combined[peakFlowData_combined.length - 1][0]);
skewErrorMessage = bulletin17B.skewErrorMessage;
//Write out the data summary to be displayed with the graph
writeSummary(dataSummaryWeightedGenSkew, getOutputWeightedGenSkew().getName());
writeSummary(dataSummaryStationSkew, getOutputStationSkew().getName());
}else{
throw new IOException("Error: Flood analysis method specified is not 'B17'");
}
}
public static void main(String[] args) throws IOException, Exception{
//Run Model
guiFlood_Model floodModel = new guiFlood_Model();
floodModel.run();
}
}