Bulletin17C.java [src/java/m/cfa/flood] Revision: Date:
package m.cfa.flood;
import WaterData.WaterData;
import WaterData.WaterDataException;
import WaterData.WaterDataInterface;
import csip.api.server.Executable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import m.cfa.DoubleArray;
import m.cfa.FlowComparator;
/**
* @author Lucas Yaege
*/
public class Bulletin17C {
String workingDirectory = "D:/Projects/Bulletin17C/work";//"/od/projects/cfa/GUI_FlowAnalysis";
String database = "USGS";//"CDWR";//"STORET";//"CDSN";//"UserData";//
String orgId = "n/a";//"n/a";//"21COL001";//"CITYFTCO_WQX";//"n/a";//
String stationId = "06741510";//"CLAGRECO";//"000028";//"1EFF";//"n/a";//
String stationName = "BIG THOMPSON RIVER AT LOVELAND, CO.";//"Cache La Poudre Near Greeley";//"BIG THOMPSON R NEAR MOUTH";//"n/a";//"n/a";//
String analysisType = "17C";//"HYSEP";//
String startDate = "";//"1900-01-01";
String endDate = "";//"2002-09-01";
//int ndmin = 10;
//int ndmax = 10;
boolean mergeDatasets = false;//true;//
String mergeMethod = "user";//"public";//"max";//"average";//"min";//
String userData = "";//"Date\tFlow\n1999-04-29\t8.3\n1999-05-09\t60.2\n1999-05-29\t20.1";
//Outputs
String len = "-1";
String start = "?";
String end = "?";
String dataSource = "?";
double bflowStream_max = -1;
double bflowStream_min = -1;
double bflowStream_median = -1;
double bflowStream_mean = -1;
double bflow1_max = -1;
double bflow1_min = -1;
double bflow1_median = -1;
double bflow1_mean = -1;
double bflow2_max = -1;
double bflow2_min = -1;
double bflow2_median = -1;
double bflow2_mean = -1;
double bflow3_max = -1;
double bflow3_min = -1;
double bflow3_median = -1;
double bflow3_mean = -1;
//Gets
public File getBaseflow_out(){ return new File(workingDirectory, "baseflow.out"); }
public File getBaseflow_dat(){ return new File(workingDirectory, "baseflow.dat"); }
public String getGraph() { return "baseflow_graph.jpg"; }
public File getTimeseriesOutput(){ return new File(workingDirectory, "baseflow_timeseries.out"); }//for use with JSHighCharts
public String getLen(){ return len; }
public String getStart(){ return start; }
public String getEnd(){ return end; }
public String getDataSource(){ return dataSource; }
public String getDatabase(){ return this.database; }
public String getBFLOWstream_Max(){ return String.valueOf(bflowStream_max); }
public String getBFLOWstream_Min(){ return String.valueOf(bflowStream_min); }
public String getBFLOWstream_Median(){ return String.valueOf(bflowStream_median); }
public String getBFLOWstream_Mean(){ return String.valueOf(bflowStream_mean); }
public String getBFLOWpass1_Max(){ return String.valueOf(bflow1_max); }
public String getBFLOWpass1_Min(){ return String.valueOf(bflow1_min); }
public String getBFLOWpass1_Median(){ return String.valueOf(bflow1_median); }
public String getBFLOWpass1_Mean(){ return String.valueOf(bflow1_mean); }
public String getBFLOWpass2_Max(){ return String.valueOf(bflow2_max); }
public String getBFLOWpass2_Min(){ return String.valueOf(bflow2_min); }
public String getBFLOWpass2_Median(){ return String.valueOf(bflow2_median); }
public String getBFLOWpass2_Mean(){ return String.valueOf(bflow2_mean); }
public String getBFLOWpass3_Max(){ return String.valueOf(bflow3_max); }
public String getBFLOWpass3_Min(){ return String.valueOf(bflow3_min); }
public String getBFLOWpass3_Median(){ return String.valueOf(bflow3_median); }
public String getBFLOWpass3_Mean(){ return String.valueOf(bflow3_mean); }
//Sets
public void setWorkingDirectory(String dir){ this.workingDirectory = dir; }
public void setDatabase(String database){ this.database = database; }
public void setOrganizationID(String orgId){ this.orgId = orgId; }
public void setStationId(String stationId){ this.stationId = stationId; }
public void setStationName(String stationName){ this.stationName = stationName; }
public void setAnalysisType(String analysisType){ this.analysisType = analysisType; }
public void setStartDate(String startDate){ this.startDate = startDate; }
public void setEndDate(String endDate){ this.endDate = endDate; }
//public void setNDMIN(int ndmin){ this.ndmin = ndmin; }
//public void setNDMAX(int ndmax){ this.ndmax = ndmax; }
public void setMergeDatasets(boolean mergeDatasets){ this.mergeDatasets = mergeDatasets; }
public void setMergeMethod(String mergeMethod){ this.mergeMethod = mergeMethod; }
public void setUserData(String userData){ this.userData = userData; }
/**
* Writes out the error message, if any, for finding the file and then exits the program
* @param error string array to be written as each line of an error message
* @throws IOException
*/
public void writeError(ArrayList<String> error) throws IOException{
//Output data to text file
String errorContents = error.get(0);
for(int i=1; i<error.size(); i++){
errorContents = errorContents + "\n" + error.get(i);
}
throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
}
/**
* Writes out the inputs files required for the BFLOW.exe software
*
* @param dynamicSummary string[][] array to be written as each line of the
* text file
* @param partialpath the partial folder path of the file to be written
* @throws IOException
*/
private void writeC17inputFiles(String[][] dynamicSummary ) throws IOException {
//Write input data file for BFLOW
String path = workingDirectory + File.separator + "baseflow.txt";
try (FileWriter writer = new FileWriter(path, false); PrintWriter print_line = new PrintWriter(writer)) {
print_line.printf("%s" + "\r\n", "Date Flow");
for (String[] dynamicSummary1 : dynamicSummary) {
print_line.printf("%s" + "\r\n", dynamicSummary1[0] + " " + dynamicSummary1[1]);
}
}
System.out.println("Text File located at:\t" + path);
//Additionally write out the file "baseflow.lst" required for BFLOW to
//know what files to run on
String path2 = workingDirectory + File.separator + "file.lst";
try (FileWriter writer2 = new FileWriter(path2, false); PrintWriter print_line2 = new PrintWriter(writer2)) {
print_line2.printf("%s" + "\r\n", "!!Input for baseflow program:");
//print_line2.printf(" %d !NDMIN: minimum number of days for alpha calculation" + "\r\n", ndmin);
//print_line2.printf(" %d !NDMAX: maximum number of days for alpha calculation" + "\r\n", ndmax);
print_line2.printf("%s" + "\r\n", " 1 !IPRINT: daily print option (0-no; 1-yes)");
print_line2.printf("%s" + "\r\n", "");
print_line2.printf("%s" + "\r\n", "!!Daily stream data files");
print_line2.printf("%s" + "\r\n", " baseflow.txt baseflow.out");
}
System.out.println("Text File located at:\t" + path2);
}
public boolean run(Executable e) throws FileNotFoundException, WaterDataException, IOException{
String fileName = "b17";
//Check if any flow data exists
WaterDataInterface waterLib = WaterData.getNewWaterDataInterface(database, userData);
double[][] peakFlowData = waterLib.extractFloodData_formatted(workingDirectory, orgId, stationId, startDate, endDate);
dataSource = waterLib.getDataSourceCitation();
//Check if merging the datasets is desired, if so get the user data
double[][] peakFlowData_user = new double[0][0];
if(mergeDatasets){
WaterDataInterface waterLibUser = WaterData.getNewWaterDataInterface("UserData", userData);
peakFlowData_user = waterLibUser.extractFloodData_formatted(workingDirectory, orgId, stationId, startDate, endDate);
}
//Merge the two datasets (if user data is empty nothing will be merged)
double[][] peakFlowData_combined = DoubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);
if(peakFlowData_combined.length == 0){
ArrayList<String> errorMessage = new ArrayList<>();
if(peakFlowData.length == 0){
errorMessage.add("There is no available flood data in the " + database + " database for station '" + stationId + "' and the specified date range.");
if(database.equalsIgnoreCase("CDWR")){
errorMessage.add("The CDWR database is sensitive to the begin date used, try specifying a later begin date");
}
}
if(peakFlowData_user.length == 0){
errorMessage.add("There is no available uploaded flow data for station '" + stationId + "' and the specified date range");
}
writeError(errorMessage);
}
//To prep. for graphing sort userdata by flow
//Arrays.sort(sortedData_user, new FlowComparator());
//writeC17inputFiles(peakFlowData_combined);
try {
e.addArguments(fileName + ".spc");
e.exec();
} catch (IOException ex) {
Logger.getLogger(Bulletin17C.class.getName()).log(Level.SEVERE, "Error encountered running 17C executable", ex);
}
if (!new File(workingDirectory + File.pathSeparator + fileName + ".out").exists()) {
throw new FileNotFoundException(fileName + ".out");
}
return false;
}
}