guiDataDownloads.java [src/java/m/datadownload] Revision: default Date:
package m.datadownload;
import AirData.AirData;
import AirData.AirDataInterface;
import WaterData.GroundwaterData;
import WaterData.GroundwaterDataInterface;
import WaterData.WaterData;
import WaterData.WaterDataException;
import WaterData.WaterDataInterface;
import datadownload.SNOTEL_Data;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* Last Updated: 19-June-2019
* @author Tyler Wible
* @since 12-April-2012
*/
public class guiDataDownloads {
//Inputs
String directory = "C:/Projects/TylerWible_repos/NetBeans/data/DataDownloads";
String beginDate = "";//user defined begin date
String endDate = "";//user defined end date
boolean USGSflowTF = true;//if true then flow data will be downloaded for USGS stations
boolean USGSwqTF = true;//if true then water quality data will be downloaded for USGS stations
boolean USGSfloodTF = true;//if true then flood data will be downloaded for USGS stations
boolean USGSstageDischargeTF = true;//if true then stage-discharge data will be downloaded for USGS stations
boolean USGSWellsDepthTF = true;//if true then depth-to-watertable data will be downloaded for USGSwell stations
boolean USGSWellsWatertableHeightTF = true;//if true then watertable-height data will be downloaded for USGSwell stations
boolean USGSWellwqTF = true;//if true then water quality data will be downloaded for USGSwell stations
boolean STORETflowTF = false;//if true then flow data will be downloaded for STORET stations
boolean STORETwqTF = true;//if true then water quality data will be downloaded for STORET stations
boolean CDWRflowTF = true;//if true then flow data will be downloaded for CDWR stations
boolean CDWRstageDischargeTF = true;//if true then stage-discharge data will be downloaded for CDWR stations
boolean NADPannualDataTF = false;//if true then annual data will be downloaded for NADP stations
boolean NADPmonthlyDataTF = false;//if true then monthly data will be downloaded for NADP stations
boolean NADPweeklyDataTF = false;//if true then weekly data will be downloaded for NADP stations
boolean CDSNflowTF = true;//if true then flow data will be downloaded for CDSN stations
boolean CDSNwqTF = true;//if true then water quality data will be downloaded for CDSN stations
String attributeFile = "Database\tOrg_ID\tStaID\tStaName\tLatitude\tLongitude\tCounty\tState\tCountry\tHUCNumber\tDrainArea\tElev\tElevUnits\tElevDatum\tBeginDate\tEndDate\tCoop_ID\tWBAN_ID\n" +
"USGS\tn/a\t07369654\tCoon Bayou Tributary near Tillar\t33.74982893\t-91.4115096\tDesha County\tARKANSAS\tUnited States\t8050002\t0.02\t145\tft\tNGVD29\t1995-03-05\t1996-12-16\tn/a\tn/a\n" +
"USGS Wells\tn/a\t332432091031501\tD0016 WASHINGTON\t33.409005\t-91.0542732\tWashington County\tMISSISSIPPI\tUnited States\t8030209\t\t125\tft\tNGVD29\t\t\tn/a\tn/a\n" +
"SNOTEL\tn/a\t330\tBeaver Divide\t40.61666667\t-111.1\tWasatch\tUTAH\tUnited States\t\t\t8280\tfeet\t\t1978-10-01\t\tn/a\tn/a\n" +
"STORET\t11NPSWRD\tCRLA_GSFRA_52\tPothole Creek (No.52 in GS report)\t42.936853\t-121.949314\tKLAMATH\tOREGON\t\t18010201\t--\t0\t\tNAD83\t\t\tn/a\tn/a\n" +
// "NADP\tn/a\tCO21\tManitou\t39.1008\t-105.0933\tTeller\tCOLORADO\tUSA\t\t\t2362\tm\t\t1978-10-17\t\tn/a\tn/a\n" +
// "CDSN\tCITYFTCO_WQX\tPBOXC\tCache La Poudre River above Boxelder Creek, Site C\t40.551282\t-105.008726\tLarimer\tCOLORADO\tUSA\t\t\t\t\t\t\t\tn/a\tn/a\n" +
"CDWR\tn/a\tDEUDITCO\tDEUEL SNYDER CANAL\t40.282657\t-103.848246\t\tColorado\tUnited States\t\t\t\t\t\t2008-03-31\t2014-09-25\tn/a\tn/a";//the contents of a tab-delimited input file
//Outputs
//Gets
public File getResults(){ return new File(directory, "downloadData.zip"); }
//Sets
public void setDirectory(String directory_str){ directory = directory_str; }
public void setAttributeFile(String attributeFile_str){ attributeFile = attributeFile_str; }
public void setBeginDate(String beginDate_str){ beginDate = beginDate_str; }
public void setEndDate(String endDate_str){ endDate = endDate_str; }
public void setUSGSflowTF(boolean USGSflow_TF){ USGSflowTF = USGSflow_TF; }
public void setUSGSwqTF(boolean USGSwq_TF){ USGSwqTF = USGSwq_TF; }
public void setUSGSfloodTF(boolean USGSflood_TF){ USGSfloodTF = USGSflood_TF; }
public void setUSGSstageDischargeTF(boolean USGSstageDischarge_TF){ USGSstageDischargeTF = USGSstageDischarge_TF; }
public void setUSGSWellsDepthTF(boolean USGSWellsDepth_TF){ USGSWellsDepthTF = USGSWellsDepth_TF; }
public void setUSGSWellsWatertableHeightTF(boolean USGSWellsWatertableHeight_TF){ USGSWellsWatertableHeightTF = USGSWellsWatertableHeight_TF; }
public void setUSGSWellwqTF(boolean USGSWellwq_TF){ USGSWellwqTF = USGSWellwq_TF; }
public void setSTORETflowTF(boolean STORETflow_TF){ STORETflowTF = STORETflow_TF; }
public void setSTORETwqTF(boolean STORETwq_TF){ STORETwqTF = STORETwq_TF; }
public void setCDWRflowTF(boolean CDWRflow_TF){ CDWRflowTF = CDWRflow_TF; }
public void setCDWRstageDischargeTF(boolean CDWRstageDischarge_TF){ CDWRstageDischargeTF = CDWRstageDischarge_TF; }
public void setNADPannualDataTF(boolean NADPannualData_TF){ NADPannualDataTF = NADPannualData_TF; }
public void setNADPmonthlyDataTF(boolean NADPmonthlyData_TF){ NADPmonthlyDataTF = NADPmonthlyData_TF; }
public void setNADPweeklyDataTF(boolean NADPweeklyData_TF){ NADPweeklyDataTF = NADPweeklyData_TF; }
public void setCDSNflowTF(boolean CDSNflow_TF){ CDSNflowTF = CDSNflow_TF; }
public void setCDSNwqTF(boolean CDSNwq_TF){ CDSNwqTF = CDSNwq_TF; }
/**
* Combines all of the results files into a single results zip file
* @param fileNameList list of each download result file to be included in the zip file
*/
public void zipResultsFiles(File[] fileNameList){
//Check if the output zip file already exists
File output = this.getResults();
if(output.isFile()){
boolean deleteTrue = output.delete();
if(!deleteTrue){
System.out.println("The zip file " + output.getName() + " already exists and was not properly deleted");
}
}
//Zip up the files in fileNameList into the output zip file
try{
byte[] buffer = new byte[1024];
FileOutputStream fileOutput = new FileOutputStream(this.getResults());
ZipOutputStream zipOutput = new ZipOutputStream(fileOutput);
zipOutput.setLevel(Deflater.DEFAULT_COMPRESSION);
for(int i=0; i<fileNameList.length; i++){
FileInputStream fileInput = new FileInputStream(fileNameList[i]);
zipOutput.putNextEntry(new ZipEntry(fileNameList[i].getName()));
int len;
while((len = fileInput.read(buffer)) > 0){
zipOutput.write(buffer, 0, len);
}
zipOutput.closeEntry();
fileInput.close();
}
zipOutput.close();
fileOutput.close();
}catch(FileNotFoundException ex){
Logger.getLogger(guiDataDownloads.class.getName()).log(Level.SEVERE, null, ex);
}catch (IOException ex) {
Logger.getLogger(guiDataDownloads.class.getName()).log(Level.SEVERE, null, ex);
}
System.out.println("Results files zipped at: " + directory + File.separator + this.getResults().getName());
//Loop through and delete the original files after zipping to save space
for(int i=0; i<fileNameList.length; i++){
File currentFile = fileNameList[i];
boolean deleteTrue = currentFile.delete();
if(!deleteTrue){
System.out.println("The file " + currentFile.getName() + " was not properly deleted after being zipped");
}
}
}
/**
* Writes out the error message, if any, for finding the file and then exits the program
* @param error string array to be written as each line of an error message
* @throws IOException
*/
public void writeError(ArrayList<String> error) throws IOException{
//Output data to text file
String errorContents = error.get(0);
for(int i=1; i<error.size(); i++){
errorContents = errorContents + "\n" + error.get(i);
}
throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
}
/**
* Writes out the contents of the data query to a text file
* @param textData string array to be written as each line of the text file
* @param fileNameList
* @param fileName the name of the output file to be written
* @return
* @throws IOException
*/
public ArrayList<String> writeOutputFile(ArrayList<String> textData, ArrayList<String> fileNameList, String fileName, String database) throws IOException{
String path = directory + "/" + fileName;
FileWriter write = new FileWriter(path, false);
PrintWriter print_line = new PrintWriter(write);
//Output data to text file
for(int i = 0; i < textData.size(); i++) {
if(database.equalsIgnoreCase("STORET")){
print_line.printf("%s" + "\n", textData.get(i));
}else{
print_line.printf("%s" + "\r\n", textData.get(i));
}
}
print_line.close();
System.out.println("Text File located at:\t" + path);
fileNameList.add(fileName);
return fileNameList;
}
public void run() throws IOException, ParseException, InterruptedException, Exception {
long startTime = System.currentTimeMillis();
//If no date input, make it the maximum of available data
if(beginDate == null || beginDate.equalsIgnoreCase("")){
beginDate = "1850-01-01";
}
if(endDate == null || endDate.equalsIgnoreCase("")){
// Pull current date for upper limit of data search
DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date currentDate = new Date();
endDate = desiredDateFormat.format(currentDate);
}
//Pull data out into a string array
String[] inputRows = attributeFile.split("\n");
ArrayList<String> fileNameList = new ArrayList<String>();
int index_database = -1, index_org_id = -1, index_station_id = -1, index_state = -1;
for(int i=0; i<inputRows.length; i++){//Start on 2nd line (first line is a header)
String[] currentAttributes = inputRows[i].split("\t");
if(i==0){
//Determine indicies for data
for(int j=0; j<currentAttributes.length; j++){
if(currentAttributes[j].equalsIgnoreCase("database")){
index_database = j;
}else if(currentAttributes[j].equalsIgnoreCase("org_id")){
index_org_id = j;
}else if(currentAttributes[j].equalsIgnoreCase("staid")){
index_station_id = j;
}else if(currentAttributes[j].equalsIgnoreCase("state")){
index_state = j;
}
}
//Check for missing information
if(index_database == -1){
throw new IOException("There is no attribute column 'database' in the station list provided");
}
if(index_org_id == -1){
throw new IOException("There is no attribute column 'org_id' (STORET organization id) in the station list provided");
}
if(index_station_id == -1){
throw new IOException("There is no attribute column 'staid' (station ID) in the station list provided");
}
if(index_state == -1){
throw new IOException("There is no attribute column 'state' in the station list provided");
}
}else{
//Call Download function for current database
if(currentAttributes[index_database].equalsIgnoreCase("USGS Wells")){
GroundwaterDataInterface groundwaterLib = GroundwaterData.getNewGroundwaterDataInterface("USGS", "");
if(USGSWellsDepthTF){
ArrayList<String> rawData = groundwaterLib.extractDepthToWaterTable_raw(currentAttributes[index_station_id], beginDate, endDate);
fileNameList = writeOutputFile(rawData, fileNameList, "USGSwells_" + currentAttributes[index_station_id] + "_DepthData.txt", currentAttributes[index_database]);
}
if(USGSWellsWatertableHeightTF){
ArrayList<String> rawData = groundwaterLib.extractDepthToWaterTable_raw(currentAttributes[index_station_id], beginDate, endDate);
fileNameList = writeOutputFile(rawData, fileNameList, "USGSwells_" + currentAttributes[index_station_id] + "_HeightData.txt", currentAttributes[index_database]);
}
if(USGSWellwqTF){
ArrayList<String> rawData = groundwaterLib.extractWaterQualityData_raw(currentAttributes[index_station_id], beginDate, endDate, "all");
fileNameList = writeOutputFile(rawData, fileNameList, "USGSwells_" + currentAttributes[index_station_id] + "_WQData.txt", currentAttributes[index_database]);
}
}else if(currentAttributes[index_database].equalsIgnoreCase("NADP")){
String NADPbeginDate = beginDate;
if (Integer.parseInt(NADPbeginDate.substring(0,4))<1900) {
// NADP website give an internal server error if beginyear<1900
// This is the revised default beginDate for NADP. No NADP stations recorded data earlier than 1978
NADPbeginDate = "1900-01-01";
}
AirDataInterface airLib = AirData.getNewAirDataDataInterface("NADP");
if(NADPannualDataTF){
ArrayList<String> rawData = airLib.extractAnnualDepositionData_raw(currentAttributes[index_station_id], NADPbeginDate, endDate, "");
fileNameList = writeOutputFile(rawData, fileNameList, "NADP_" + currentAttributes[index_station_id] + "_AnnualData.csv", currentAttributes[index_database]);
}
if(NADPmonthlyDataTF) {
ArrayList<String> rawData = airLib.extractMonthlyDepositionData_raw(currentAttributes[index_station_id], NADPbeginDate, endDate, "");
fileNameList = writeOutputFile(rawData, fileNameList, "NADP_" + currentAttributes[index_station_id] + "_MonthlyData.csv", currentAttributes[index_database]);
}
if(NADPweeklyDataTF) {
ArrayList<String> rawData = airLib.extractWeeklyDepositionData_raw(currentAttributes[index_station_id], NADPbeginDate, endDate, "");
fileNameList = writeOutputFile(rawData, fileNameList, "NADP_" + currentAttributes[index_station_id] + "_WeeklyData.csv", currentAttributes[index_database]);
}
}else if(currentAttributes[index_database].equalsIgnoreCase("SNOTEL")){
Object[] returnArray = SNOTEL_Data.getSNOTELdata(currentAttributes[index_station_id], currentAttributes[index_state], "snow");
ArrayList<String> textData = (ArrayList<String>) returnArray[0];
//String[][] flowData = (String[][]) returnArray[1];
//String start = (String) returnArray[2];
//String end = (String) returnArray[3];
fileNameList = writeOutputFile(textData, fileNameList, "SNOTEL_" + currentAttributes[index_state] + "_" + currentAttributes[index_station_id] + ".csv", currentAttributes[index_database]);
}else{
WaterDataInterface waterLib = WaterData.getNewWaterDataInterface(currentAttributes[index_database], "");
String stationId_fixed = currentAttributes[index_station_id].replace("/", "");
if((currentAttributes[index_database].equalsIgnoreCase("USGS") && USGSflowTF) ||
(currentAttributes[index_database].equalsIgnoreCase("STORET") && STORETflowTF) ||
(currentAttributes[index_database].equalsIgnoreCase("CDWR") && CDWRflowTF) ||
(currentAttributes[index_database].equalsIgnoreCase("CDSN") && CDSNflowTF) ) {
ArrayList<String> rawData = new ArrayList<>();
try{
rawData = waterLib.extractFlowData_raw(directory, currentAttributes[index_org_id], currentAttributes[index_station_id], beginDate, endDate);
}catch(WaterDataException e){
rawData.add("There is no available flow data in the " + currentAttributes[index_database] + " database for station '" + currentAttributes[index_station_id] + "'.");
}
fileNameList = writeOutputFile(rawData, fileNameList, currentAttributes[index_database] + "_" + stationId_fixed + "_FlowData.txt", currentAttributes[index_database]);
}
if((currentAttributes[index_database].equalsIgnoreCase("USGS") && USGSwqTF) ||
(currentAttributes[index_database].equalsIgnoreCase("STORET") && STORETwqTF) ||
(currentAttributes[index_database].equalsIgnoreCase("CDSN") && CDSNwqTF) ) {
ArrayList<String> rawData = new ArrayList<>();
try{
rawData = waterLib.extractWaterQualityData_raw(directory, currentAttributes[index_org_id], currentAttributes[index_station_id], beginDate, endDate, "all");
}catch(WaterDataException e){
rawData.add("There is no available water quality data in the " + currentAttributes[index_database] + " database for station '" + currentAttributes[index_station_id] + "'.");
}
fileNameList = writeOutputFile(rawData, fileNameList, currentAttributes[index_database] + "_" + stationId_fixed + "_WqData.txt", currentAttributes[index_database]);
}
if((currentAttributes[index_database].equalsIgnoreCase("USGS") && USGSfloodTF) ) {
ArrayList<String> rawData = new ArrayList<>();
try{
rawData = waterLib.extractFloodData_raw(directory, currentAttributes[index_org_id], currentAttributes[index_station_id], beginDate, endDate);
}catch(WaterDataException e){
rawData.add("There is no available flood data in the " + currentAttributes[index_database] + " database for station '" + currentAttributes[index_station_id] + "'.");
}
fileNameList = writeOutputFile(rawData, fileNameList, currentAttributes[index_database] + "_" + currentAttributes[index_station_id] + "_FloodData.txt", currentAttributes[index_database]);
}
if((currentAttributes[index_database].equalsIgnoreCase("USGS") && USGSstageDischargeTF) ||
(currentAttributes[index_database].equalsIgnoreCase("CDWR") && CDWRstageDischargeTF) ) {
ArrayList<String> rawData = new ArrayList<>();
try{
rawData = waterLib.extractStageDischarge_raw(currentAttributes[index_station_id]);
}catch(WaterDataException e){
rawData.add("There is no available stage-discharge (rating curve) data in the " + currentAttributes[index_database] + " database for station '" + currentAttributes[index_station_id] + "'.");
}
fileNameList = writeOutputFile(rawData, fileNameList, currentAttributes[index_database] + "_" + currentAttributes[index_station_id] + "_StageDischargeData.txt", currentAttributes[index_database]);
}
}
}
}
File[] resultFileNames = new File[fileNameList.size()];
for(int i=0; i<fileNameList.size(); i++){
resultFileNames[i] = new File(directory + File.separator + fileNameList.get(i));
}
//Output list of file names
zipResultsFiles(resultFileNames);
//Save the result of how long this model run took
long endTime = System.currentTimeMillis();
System.out.println("Finished. all models took: " + (endTime - startTime) +
" millseconds (" + ((endTime - startTime)/1000) + " seconds)(" + (((endTime - startTime)/1000)/60) + " minutes) to run");
System.out.println("");
//Location of Attributes (just for reference)
// Database = 0
// Org = 1
// StaID = 2
// StaName = 3
// Latitiude = 4
// Longitude = 5
// County = 6
// State = 7
// Country = 8
// HUCNumber = 9
// DrainArea = 10
// Elev = 11
// ElevUnits = 12
// ElevDatum = 13
// BeginDate = 14
// EndDate = 15
// Coop_ID = 16
// WBAN_ID = 17
}
public static void main(String[] args) throws IOException, Exception{
//Run model
guiDataDownloads model = new guiDataDownloads();
model.run();
}
}