guiBaseflow_Model.java [src/java/cfa] Revision: 4daefd1ac3a5cce6d2af07d219b133db7ce0b7a4 Date: Thu Sep 26 16:17:42 MDT 2013
package cfa;
import csip.Config;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import oms3.util.ProcessComponent;
import utils.BinUtils;
/**
* Last Updated: 26-August-2013
* @author Tyler Wible
* @since 15-June-2012
*/
public class guiBaseflow_Model {
//Inputs
String mainFolder = "C:/Projects/TylerWible/CodeDirectories/NetBeans/CSIP/data/CFA/Baseflow";//"/od/projects/cfa/GUI_FlowAnalysis";
String organizationName = "USGS";
String stationID = "06764880";
String stationName = "USGS BIG THOMPSON RIVER AT LOVELAND, CO.";
double drainageArea = 27.3;//Square miles
String modelType = "BFLOW";//"HYSEP";//
String beginDate = "";//"1900-01-01";
String endDate = "";//"2002-09-01";
int ndmin = 10;
int ndmax = 10;
String userData = "";//"Date\tFlow\n1999-04-29\t8.3\n1999-05-09\t60.2\n1999-05-29\t20.1";
boolean mergeDatasets = false;//true;//
String mergeMethod = "user";//"public";//"max";//"average";//"min";//
//Outputs
String len = "-1";
String start = "?";
String end = "?";
//Gets
public File getBaseflow_out() {
return new File(mainFolder, "baseflow.out");
}
public File getBaseflow_dat() {
return new File(mainFolder, "baseflow.dat");
}
public String getGraph() {
return "baseflow_graph.jpg";
}
public String getLen() {
return len;
}
public String getStart() {
return start;
}
public String getEnd() {
return end;
}
//Sets
public void setMainFolder(String mainFolder) {
this.mainFolder = mainFolder;
}
public void setOrganizationName(String organizationName) {
this.organizationName = organizationName;
}
public void setStationID(String stationID) {
this.stationID = stationID;
}
public void setStationName(String stationName) {
this.stationName = stationName;
}
public void setDrainageArea(double drainageArea) {
this.drainageArea = drainageArea;
}
public void setModelType(String modelType) {
this.modelType = modelType;
}
public void setBeginDate(String beginDate) {
this.beginDate = beginDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public void setNDMIN(int ndmin) {
this.ndmin = ndmin;
}
public void setNDMAX(int ndmax) {
this.ndmax = ndmax;
}
public void setUserData(String userData) {
this.userData = userData;
}
public void setMergeDatasets(boolean mergeDatasets) {
this.mergeDatasets = mergeDatasets;
}
public void setMergeMethod(String mergeMethod) {
this.mergeMethod = mergeMethod;
}
/**
* Takes the input String[][] and reformats it to the desired input for
* BFLOW (a two column tab delimited file) which is: the first column of
* dates (format = yyyymmdd) the second column of flow values
*
* @param allFlowData the String[][] output from getting USGS data from
* "USGS_read_FDC.OpenWebpage" or STORET data from
* @return returns the original array with the first column of yyyy-mm-dd
* dates reformated into yyyymmdd
*/
public String[][] reformatBFLOWdata(String[][] allFlowData) {
//Reformat array to match BFLOW inputs
String[][] reformatedData = new String[allFlowData.length][2];
for (int i = 0; i < allFlowData.length; i++) {
//Convert dates from yyyy-mm-dd format to yyyymmdd desired by BFLOW
String currentYear = allFlowData[i][0].substring(0, 4);
String currentMonth = allFlowData[i][0].substring(5, 7);
String currentDay = allFlowData[i][0].substring(8, 10);
String newDate = currentYear + currentMonth + currentDay;
reformatedData[i][0] = newDate; //date
reformatedData[i][1] = allFlowData[i][1]; //flow
}
//Create a summary of the method
this.len = String.valueOf(allFlowData.length);
this.start = allFlowData[0][0];
this.end = allFlowData[allFlowData.length - 1][0];
return reformatedData;
}
/**
* Takes the input String[][] and reformats it to the desired input for
* BFLOW (a two column tab delimited file) which is: the first column of
* dates (format = yyyymmdd) the second column of flow values
*
* @param allFlowData the String[][] output from getting USGS data from
* "USGS_read_FDC.OpenWebpage" or STORET data from
* @param drainageArea the drainage area (in square miles) of the streamflow
* station of interest. This value is needed because '.gsd' (WATSTORE) files
* are stream flow records per drainage area values
* @return returns an ArrayList<String> reformatted into a watstore file
* input
*/
public ArrayList<String> reformatHYSEPdata(String[][] allFlowData, double drainageArea) {
//Pre-process a few key points for input into the input file
int numberOfYears = 1;
int startYear = 1;
if (allFlowData.length > 1) {
startYear = Integer.parseInt(allFlowData[0][0].substring(0, 4));
String endYear = allFlowData[allFlowData.length - 1][0].substring(0, 4);
numberOfYears = Integer.parseInt(endYear) - startYear + 1;
}
//Reformat array to create a USGS ".gsd" file (which will then be passed to USGS' ANNIE program to create a type input
ArrayList<String> reformatedData = new ArrayList<String>();
//Header?
// ? stationID drainage area (mi^2)
reformatedData.add("H 01472157 " + drainageArea);
// ? stationID station name
reformatedData.add("N 01472157 Fake Station Name and ID");
// ? stationID ?????????? ???
reformatedData.add("2 01472157 0006000003 ENT");//This line is the separater for years?
int ctr = 0;
int currentYear = Integer.parseInt(allFlowData[ctr][0].substring(0, 4));
int currentMonth = Integer.parseInt(allFlowData[ctr][0].substring(5, 7));
int currentDay = Integer.parseInt(allFlowData[ctr][0].substring(8, 10));
for (int i = startYear; i < (startYear + numberOfYears); i++) {//Loop years for gsd(WATSTORE) file
for (int j = 1; j <= 12; j++) {//loop months
int[] maxDays = new int[4];
switch (j) {//Determine the number of days per 'week' in each month (gsd/WATSTORE files use an 8 or less day week to allow each month to have exactly 4 'weeks' keeping a consistent number of lines per year in the file)
case 1:
int[] jan = {8, 8, 8, 7};
maxDays = jan;
break;
case 2:
int[] feb = checkLeapYear(i);
maxDays = feb;
break;
case 3:
int[] mar = {8, 8, 8, 7};
maxDays = mar;
break;
case 4:
int[] apr = {8, 8, 8, 6};
maxDays = apr;
break;
case 5:
int[] may = {8, 8, 8, 7};
maxDays = may;
break;
case 6:
int[] jun = {8, 8, 8, 6};
maxDays = jun;
break;
case 7:
int[] jul = {8, 8, 8, 7};
maxDays = jul;
break;
case 8:
int[] aug = {8, 8, 8, 7};
maxDays = aug;
break;
case 9:
int[] sep = {8, 8, 8, 6};
maxDays = sep;
break;
case 10:
int[] oct = {8, 8, 8, 7};
maxDays = oct;
break;
case 11:
int[] nov = {8, 8, 8, 6};
maxDays = nov;
break;
case 12:
int[] dec = {8, 8, 8, 7};
maxDays = dec;
break;
default:
break;
}
int dayCounter = 1;
for (int k = 1; k <= 4; k++) {//loop "weeks" in month
String month = String.valueOf(j);
if (j < 10) {
month = "0" + String.valueOf(j);
}
String week = "3 01472157 " + i + month + "0" + k;
for (int l = 1; l <= maxDays[k - 1]; l++) {//loop days in "week"
if (currentYear == i && currentMonth == j && currentDay == dayCounter) {//If the dataset's current date is the date expected then use its corresponding flow value
String flowString = shortenedFlowString(allFlowData[ctr][1]);
week = week + flowString;
//Move to the next day in the dataset if it exists
ctr++;
if (ctr < allFlowData.length) {
currentYear = Integer.parseInt(allFlowData[ctr][0].substring(0, 4));
currentMonth = Integer.parseInt(allFlowData[ctr][0].substring(5, 7));
currentDay = Integer.parseInt(allFlowData[ctr][0].substring(8, 10));
}
} else {//Otherwise put in a blank
// week = week + " 0.0";
week = week + "-9999.0";
}
dayCounter++;
}
//Add this "week" to the data file
reformatedData.add(week);
}
}
//Add file delimiter for end of year
if (i + 1 < (startYear + numberOfYears)) {
reformatedData.add("2 01472157 0006000003 ENT");
}
}
//Create a summary of the method
this.len = String.valueOf(allFlowData.length);
this.start = allFlowData[0][0];
this.end = allFlowData[allFlowData.length - 1][0];
return reformatedData;
}
/**
* Determines if the provided year (as an integer) is a leap year or not
* taking into account for leap years every 4 years, not every 100 years,
* and leap years every 400 years (this has to do with round off errors in
* the length of a day that propagate over time). If support for >400 year
* leap year information is desired modify this subfunction.
*
* @param currentYear the current year
* @return an integer[] containing the number of days in each of the 4
* 'weeks' of february (a WATSTORE file 'week')
*/
public int[] checkLeapYear(int currentYear) {
//Determine how many days February should have based on if it is a leap year or not
boolean leapYear = false;
double currentYear_db = (double) currentYear;
//Determine if this year is a leap year (divide by 4) and take into account every 100 years it is not a leap year and every 400 it is
double yearUp4 = Math.ceil(currentYear_db / 4);
double yearDown4 = Math.floor(currentYear_db / 4);
double yearUp100 = Math.ceil(currentYear_db / 100);
double yearDown100 = Math.floor(currentYear_db / 100);
double yearUp400 = Math.ceil(currentYear_db / 400);
double yearDown400 = Math.floor(currentYear_db / 400);
if (yearUp400 == yearDown400) {
leapYear = true;
} else if (yearUp100 == yearDown100) {
leapYear = false;
} else if (yearUp4 == yearDown4) {
leapYear = true;
}
//Based on the leap year or not determine the number of days in February
int[] feb = {8, 8, 8, 4};
if (leapYear) {
feb[0] = 8;
feb[1] = 8;
feb[2] = 8;
feb[3] = 5;
}
return feb;
}
/**
* Shortens the provided flow value string, while trying to minimize error
* in reducing the decimals
*
* @param currentFlowValue a String.valueOf( current stream flow value )
* @return the above current flow value with a length = 6 as to meet the
* formatting requirements of USGS' WATSTORE file format
*/
public String shortenedFlowString(String currentFlowValue) {
if (currentFlowValue.length() > 7) {
//Determine if there is sufficient decimals that can be dropped from the flow while retaining accuracy
int index = currentFlowValue.indexOf(".");
if (-1 < index && index <= 7) {
currentFlowValue = currentFlowValue.substring(0, index);
} else {
System.err.println("Couldn't shorten flow value: " + currentFlowValue + " properly.");
currentFlowValue = currentFlowValue.substring(0, 7);
}
} else if (currentFlowValue.length() > 6) {
currentFlowValue = "" + currentFlowValue;
} else if (currentFlowValue.length() > 5) {
currentFlowValue = " " + currentFlowValue;
} else if (currentFlowValue.length() > 4) {
currentFlowValue = " " + currentFlowValue;
} else if (currentFlowValue.length() > 3) {
currentFlowValue = " " + currentFlowValue;
} else if (currentFlowValue.length() > 2) {
currentFlowValue = " " + currentFlowValue;
} else if (currentFlowValue.length() > 1) {
currentFlowValue = " " + currentFlowValue;
} else {
currentFlowValue = " " + currentFlowValue;
}
System.out.println(currentFlowValue);
return currentFlowValue;
}
/**
* Writes out the dynamically created summary table to be displayed to the
* user along with the flood graph
*
* @param dynamicSummary string[][] array to be written as each line of the
* text file
* @param partialpath the partial folder path of the file to be written
* @param fileName the name of the output file to be written
* @param ndmin the integer number of minimum baseflow days for BFLOW
* @param ndmax the integer number of maximum baseflow days for BFLOW
* @throws IOException
*/
public void writeBFLOWsummary(String[][] dynamicSummary, String partialpath, int ndmin, int ndmax) throws IOException {
//Write input data file for BFLOW
String path = partialpath + "/baseflow.txt";
FileWriter writer = new FileWriter(path, false);
PrintWriter print_line = new PrintWriter(writer);
print_line.printf("%s" + "%n", "Date Flow");
for (int i = 0; i < dynamicSummary.length; i++) {
print_line.printf("%s" + "%n", dynamicSummary[i][0] + " " + dynamicSummary[i][1]);
}
print_line.close();
writer.close();
System.out.println("Text File located at:\t" + path);
//Additionally write out the file "baseflow.lst" required for BFLOW to
//know what files to run on
FileWriter writer2 = new FileWriter(partialpath + "/file.lst", false);
PrintWriter print_line2 = new PrintWriter(writer2);
print_line2.printf("%s" + "%n", "!!Input for baseflow program:");
print_line2.printf(" %d !NDMIN: minimum number of days for alpha calculation" + "%n", ndmin);
print_line2.printf(" %d !NDMAX: maximum number of days for alpha calculation" + "%n", ndmax);
print_line2.printf("%s" + "%n", " 1 !IPRINT: daily print option (0-no; 1-yes)");
print_line2.printf("%s" + "%n", "");
print_line2.printf("%s" + "%n", "!!Daily stream data files");
print_line2.printf("%s" + "%n", " baseflow.txt baseflow.out");
print_line2.close();
writer2.close();
System.out.println("Text File located at:\t" + partialpath + "/file.lst");
}
/**
* Writes out the dynamically created summary table to be displayed to the
* user along with the flood graph
*
* @param fileContents ArrayList<String> array to be written as each line of
* the text file
* @param partialpath the partial folder path of the file to be written
* @throws IOException
*/
public void writeHYSEPsummary(ArrayList<String> fileContents, String partialpath) throws IOException {
String path = partialpath + "/hysep.gsd";
FileWriter write = new FileWriter(path, false);
PrintWriter print_line = new PrintWriter(write);
for (int i = 0; i < fileContents.size(); i++) {
print_line.printf("%s" + "%n", fileContents.get(i));
}
print_line.close();
System.out.println("Text File located at:\t" + path);
}
/**
* Writes out the error message, if any, for finding the file and then exits the program
* @param error string array to be written as each line of an error message
* @throws IOException
*/
public void writeError(ArrayList<String> error) throws IOException{
//Output data to text file
String errorContents = error.get(0);
for(int i=1; i<error.size(); i++){
errorContents = errorContents + "\n" + error.get(i);
}
throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
}
public void run() throws IOException, InterruptedException {
//If no date input, make it the maximum of available data
if(beginDate == null || beginDate.equalsIgnoreCase("")){
beginDate = "1900-01-01";
}
if(endDate == null || endDate.equalsIgnoreCase("")){
// Pull current date for upper limit of data search
DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date currentDate = new Date();
endDate = desiredDateFormat.format(currentDate);
}
String[][] sortableData = null;
if(organizationName.equalsIgnoreCase("USGS")){
//Search for USGS flow data
USGS_Data usgs_Data = new USGS_Data();
sortableData = usgs_Data.USGS_read_FDC(stationID, beginDate, endDate);
//If there is minimal flow data try getting WQ-flow data
USGS_Data usgs_data = new USGS_Data();
if(sortableData.length < 10){
//Retrieve WQ data from USGS website
String[][] WQData = usgs_data.USGS_read_LDC(stationID);
//Extract USGS water quality code 00061 for dischage in cfs
String[][] WQFlow1 = usgs_data.minimizeUSGSWQdata(WQData, "00061", beginDate, endDate);
//Extract USGS water quality code 30209 for discharge test in m^3/s (cms)
String[][] WQFlow2 = usgs_data.minimizeUSGSWQdata(WQData, "30209", beginDate, endDate);
//Convert the m^3 to ft^3/s
for(int i=0; i<WQFlow2.length; i++){
WQFlow2[i][1] = Double.toString((Double.parseDouble(WQFlow2[i][1]) * (3.2808399 * 3.2808399 * 3.2808399)));
}
//combine the WQ flows (cfs and converted cms) into a single variable to be used with the Flowdata
String[][] WQDataflows = usgs_data.mergeMinimizedWQdata(WQFlow1, WQFlow2);
//Combine flow data and WQ flow data into a variable of dates and flow values to be sorted
sortableData = usgs_data.mergeMinimizedWQdata(sortableData, WQDataflows);
}
}else if(organizationName.equalsIgnoreCase("UserData")) {
//Find the user uploaded data file and uses this for a timeseries graph
User_Data user_Data = new User_Data();
sortableData = user_Data.readUserFile(userData, beginDate, endDate);
}else{
//Search for STORET flow data
STORET_Data storet_Data = new STORET_Data();
String zip_location = storet_Data.downloadSTORET(mainFolder, organizationName, stationID, "flow", beginDate, endDate);
//Unzip results file and extract all flow data
sortableData = storet_Data.Unzip_STORETDownloadFiles(zip_location, "flow", true);
}
//If the user wants the datasets (public and user) merged then retrieve the second dataset (user)
String[][] sortableData_user = new String[0][0];
if(mergeDatasets){
User_Data user_Data = new User_Data();
sortableData_user = user_Data.readUserFile(userData, beginDate, endDate);
if(sortableData_user.length==0){
ArrayList<String> errorMessage = new ArrayList<String>();
errorMessage.add("There is no available uploaded data for station '" + stationID + "' and the specified date range");
writeError(errorMessage);
}
}
//Check if any data exists
if(sortableData.length == 0){
ArrayList<String> errorMessage = new ArrayList<String>();
errorMessage.add("There is no available flow data for station '" + stationID + "' and the specified date range.");
errorMessage.add("Error: Baseflow0001");
writeError(errorMessage);
}
//Sort the Data by date to remove duplicate date entries
DurationCurve durationCurve = new DurationCurve();
String[][] sortedData = durationCurve.removeDuplicateDates(sortableData);
String[][] sortedData_user = durationCurve.removeDuplicateDates(sortableData_user);
//Merge the two datasets (if user data is empty nothing will be merged)
DoubleArray doubleArray = new DoubleArray();
String[][] sortedData_combined = doubleArray.mergeData(sortedData, sortedData_user, mergeMethod);
//To prep. for graphing sort userdata by flow
Arrays.sort(sortedData_user, new FlowComparator());
if(modelType.equalsIgnoreCase("BFLOW")){
//Reformat the extracted data for BFLOW
String[][] inputFlowData = reformatBFLOWdata(sortedData_combined);
//Write out the BFLOW input files
writeBFLOWsummary(inputFlowData, mainFolder, ndmin, ndmax);
//Call BFLOW model
//Expected Input: "baseflow.txt" and "file.lst"
String binDir = Config.getString("m.bin.dir", "/tmp/csip/bin");
File bflow = BinUtils.unpackResource("/bin/win-x86/bflow.exe", new File(binDir));
ProcessComponent pc = new ProcessComponent();
if (File.pathSeparatorChar == ':') {
pc.exe = Config.getString("wine.path", "/usr/bin/wine");
pc.args = new String[]{bflow.toString()};
} else {
pc.exe = bflow.toString();
pc.args = new String[]{};
}
pc.working_dir = mainFolder;
pc.execute();
//Expected Output: "baseflow.dat" and "baseflow.out"
if (!new File(mainFolder, "baseflow.dat").exists()) {
throw new FileNotFoundException("baseflow.dat");
}
if (!new File(mainFolder, "baseflow.out").exists()) {
throw new FileNotFoundException("baseflow.out");
}
//Call graphing function for the outputs of the BFLOW model
BaseflowResults baseflowResults = new BaseflowResults();
baseflowResults.graphBFLOWresults(mainFolder, organizationName, stationID, stationName, sortedData_user);
}else if(modelType.equalsIgnoreCase("HYSEP")){
//Reformat data for HYSEP
ArrayList<String> inputFlowData = reformatHYSEPdata(sortedData, drainageArea);
//Write out the HYSEP input file
writeHYSEPsummary(inputFlowData, mainFolder);
//Call the HYSEP model (not functional yet)
//Call graphing function for the outputs of the BFLOW model
BaseflowResults baseflowResults = new BaseflowResults();
baseflowResults.graphHYSEPresults(mainFolder, organizationName, stationID, stationName);
}
}
public static void main(String[] args) throws IOException, InterruptedException, Exception {
guiBaseflow_Model baseflow_Model = new guiBaseflow_Model();
//Set inputs
// assert args.length > 0;
// baseflow_Model.setMainFolder(args[0]); //The output location of the graph
// baseflow_Model.setFileName(args[1]); //The name of the output graph and summary text file
// baseflow_Model.setOrganizationName(args[2]);//Supervising organization of the station (only used for STORET stations)
// baseflow_Model.setStationID(args[3]); //The station ID used to retrieve the station's flow data
// baseflow_Model.setDrainageArea(Double.parseDouble(args[4])); //The drainage area of the basin to the site of interest in square miles
// baseflow_Model.setModelType(args[5]); //The baseflow model to be run ("BFLOW" by the SWAT group or "HYSEP" by USGS)
// baseflow_Model.setBeginDate(args[6]); //Begin date of analysis
// baseflow_Model.setEndDate(args[7]); //End date of analysis
//Run model
baseflow_Model.run();
}
}