@@ -34,142 +34,142 @@ |
*/ |
public class FileArchive implements ServiceArchive { |
|
- HashMap<String, ModelArchive> archives = new HashMap<>(); |
- String directory; |
+ HashMap<String, ModelArchive> archives = new HashMap<>(); |
+ String directory; |
|
- public FileArchive(String directory) throws Exception { |
- this.directory = directory; |
- if (!Files.isDirectory(Paths.get(directory))) { |
- throw new Exception("Directory specified does not exist: (" + directory + ")"); |
+ public FileArchive(String directory) throws Exception { |
+ this.directory = directory; |
+ if (!Files.isDirectory(Paths.get(directory))) { |
+ throw new Exception("Directory specified does not exist: (" + directory + ")"); |
+ } |
+ FilenameFilter jsonFilter = (File dir, String name) -> { |
+ String lowercaseName = name.toLowerCase(); |
+ return lowercaseName.endsWith(".json"); |
+ }; |
+ File folder = new File(directory); |
+ File[] fileList = folder.listFiles(jsonFilter); |
+ |
+ for (File file : fileList) { |
+ if (file.isFile() && file.canRead()) { |
+ ModelArchive model = new ModelArchive(readFile(file)); |
+ archives.put(model.getSUID(), model); |
+ } |
+ } |
+ } |
+ |
+ @Override |
+ public ArrayList<ModelArchive> getArchiveByReqIP(String ip, String service) throws Exception { |
+ throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
+ } |
+ |
+ @Override |
+ public ModelArchive getArchiveBySUID(String suid) throws Exception { |
+ return archives.get(suid); |
+ } |
+ |
+ @Override |
+ public long getCount() { |
+ return archives.size(); |
+ } |
+ |
+ @Override |
+ public JSONObject getServiceRequest(String suid) throws Exception { |
+ JSONObject requestData = null; |
+ |
+ InputStream zipFile = new FileInputStream(directory + "/" + suid + ".zip"); |
+ |
+ try (ZipInputStream zin = new ZipInputStream(zipFile)) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains(".request")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr); |
+ } |
+ requestData = new JSONObject(fileContent.toString()); |
+ break; |
} |
- FilenameFilter jsonFilter = (File dir, String name) -> { |
- String lowercaseName = name.toLowerCase(); |
- return lowercaseName.endsWith(".json"); |
- }; |
- File folder = new File(directory); |
- File[] fileList = folder.listFiles(jsonFilter); |
- |
- for (File file : fileList) { |
- if (file.isFile() && file.canRead()) { |
- ModelArchive model = new ModelArchive(readFile(file)); |
- archives.put(model.getSUID(), model); |
- } |
- } |
+ } |
} |
|
- @Override |
- public ArrayList<ModelArchive> getArchiveByReqIP(String ip, String service) throws Exception { |
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
+ return requestData; |
+ } |
+ |
+ @Override |
+ public JSONObject getServiceResponse(String suid) throws Exception { |
+ JSONObject requestData = null; |
+ |
+ InputStream zipFile = new FileInputStream(directory + "/" + suid + ".zip"); |
+ |
+ try (ZipInputStream zin = new ZipInputStream(zipFile)) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains(".response")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr); |
+ } |
+ requestData = new JSONObject(fileContent.toString()); |
+ break; |
+ } |
+ } |
} |
|
- @Override |
- public ModelArchive getArchiveBySUID(String suid) throws Exception { |
- return archives.get(suid); |
+ return requestData; |
+ } |
+ |
+ @Override |
+ public boolean hasArchive(String suid) throws Exception { |
+ return archives.containsKey(suid); |
+ } |
+ |
+ @Override |
+ public Set<String> keys(int skip, int limit, String sortby, boolean sortAsc) { |
+ throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
+ } |
+ |
+ @Override |
+ public void shutdown() throws Exception { |
+ |
+ } |
+ |
+ private JSONObject readFile(File file) throws FileNotFoundException, IOException, JSONException { |
+ JSONObject ret_val; |
+ |
+ try (BufferedReader br = new BufferedReader(new FileReader(file));) { |
+ StringBuilder sb = new StringBuilder(); |
+ String line = br.readLine(); |
+ |
+ while (line != null) { |
+ sb.append(line); |
+ sb.append(System.lineSeparator()); |
+ line = br.readLine(); |
+ } |
+ String jsonData = sb.toString(); |
+ ret_val = new JSONObject(jsonData); |
} |
+ return ret_val; |
+ } |
|
- @Override |
- public long getCount() { |
- return archives.size(); |
- } |
+ @Override |
+ public ArrayList<ModelArchive> getArchivesByFilter(String key, String value, int limit, boolean useBasicArchiveFunctionality) throws Exception { |
+ throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
+ } |
|
- @Override |
- public JSONObject getServiceRequest(String suid) throws Exception { |
- JSONObject requestData = null; |
+ @Override |
+ public ArrayList<String> getSUIDsByFilter(String key, String value) { |
+ throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
+ } |
|
- InputStream zipFile = new FileInputStream(directory + "/" + suid + ".zip"); |
- |
- try (ZipInputStream zin = new ZipInputStream(zipFile)) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains(".request")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr); |
- } |
- requestData = new JSONObject(fileContent.toString()); |
- break; |
- } |
- } |
- } |
- |
- return requestData; |
- } |
- |
- @Override |
- public JSONObject getServiceResponse(String suid) throws Exception { |
- JSONObject requestData = null; |
- |
- InputStream zipFile = new FileInputStream(directory + "/" + suid + ".zip"); |
- |
- try (ZipInputStream zin = new ZipInputStream(zipFile)) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains(".response")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr); |
- } |
- requestData = new JSONObject(fileContent.toString()); |
- break; |
- } |
- } |
- } |
- |
- return requestData; |
- } |
- |
- @Override |
- public boolean hasArchive(String suid) throws Exception { |
- return archives.containsKey(suid); |
- } |
- |
- @Override |
- public Set<String> keys(int skip, int limit, String sortby, boolean sortAsc) { |
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
- } |
- |
- @Override |
- public void shutdown() throws Exception { |
- |
- } |
- |
- private JSONObject readFile(File file) throws FileNotFoundException, IOException, JSONException { |
- JSONObject ret_val; |
- |
- try (BufferedReader br = new BufferedReader(new FileReader(file));) { |
- StringBuilder sb = new StringBuilder(); |
- String line = br.readLine(); |
- |
- while (line != null) { |
- sb.append(line); |
- sb.append(System.lineSeparator()); |
- line = br.readLine(); |
- } |
- String jsonData = sb.toString(); |
- ret_val = new JSONObject(jsonData); |
- } |
- return ret_val; |
- } |
- |
- @Override |
- public ArrayList<ModelArchive> getArchivesByFilter(String key, String value, int limit, boolean useBasicArchiveFunctionality) throws Exception { |
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
- } |
- |
- @Override |
- public ArrayList<String> getSUIDsByFilter(String key, String value) { |
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
- } |
- |
- @Override |
- public ArrayList<ModelArchive> getArchivesByFilters(Iterable<Bson> filters, int limit) throws Exception { |
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
- } |
+ @Override |
+ public ArrayList<ModelArchive> getArchivesByFilters(Iterable<Bson> filters, int limit) throws Exception { |
+ throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. |
+ } |
|
} |
@@ -46,274 +46,274 @@ |
*/ |
public class MongoArchive implements ServiceArchive { |
|
- static String ARCHIVE_TOO_BIG = "the archive was too big to store."; |
- MongoClient mongo; |
- String dbname; |
- static final String FS_COLL = "fs"; |
+ static String ARCHIVE_TOO_BIG = "the archive was too big to store."; |
+ MongoClient mongo; |
+ String dbname; |
+ static final String FS_COLL = "fs"; |
|
- UpdateOptions opt = new UpdateOptions(); |
+ UpdateOptions opt = new UpdateOptions(); |
|
- public MongoArchive(String uri) throws Exception { |
- // "mongodb://user:pass@host:port/db" |
- if (uri.contains("mongodb://")) { |
- MongoClientURI u = new MongoClientURI(uri); |
- dbname = u.getDatabase(); |
- if (dbname == null) { |
- dbname = "csip"; |
- } |
+ public MongoArchive(String uri) throws Exception { |
+ // "mongodb://user:pass@host:port/db" |
+ if (uri.contains("mongodb://")) { |
+ MongoClientURI u = new MongoClientURI(uri); |
+ dbname = u.getDatabase(); |
+ if (dbname == null) { |
+ dbname = "csip"; |
+ } |
|
- mongo = new MongoClient(u); |
- opt.upsert(true); |
- } else { |
- throw new Exception("No mongodb location specified"); |
- } |
- } // "mongodb://user:pass@host:port/db"// "mongodb://user:pass@host:port/db" |
+ mongo = new MongoClient(u); |
+ opt.upsert(true); |
+ } else { |
+ throw new Exception("No mongodb location specified"); |
+ } |
+ } // "mongodb://user:pass@host:port/db"// "mongodb://user:pass@host:port/db" |
|
- @Override |
- public synchronized ArrayList<ModelArchive> getArchiveByReqIP(String ip, String service) throws Exception { |
- ArrayList<ModelArchive> ret_val = new ArrayList<>(); |
- MongoDatabase db = mongo.getDatabase(dbname); |
- FindIterable<Document> r = db.getCollection(FS_COLL).find(eq("req_ip", ip)); |
+ @Override |
+ public synchronized ArrayList<ModelArchive> getArchiveByReqIP(String ip, String service) throws Exception { |
+ ArrayList<ModelArchive> ret_val = new ArrayList<>(); |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ FindIterable<Document> r = db.getCollection(FS_COLL).find(eq("req_ip", ip)); |
|
- r.sort(new Document("ctime", 1)); |
- for (Document doc : r) { |
- if (doc != null) { |
- if (doc.getString("service").equalsIgnoreCase(service)) { |
- ret_val.add(new ModelArchive( |
- doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- doc.getString("service"), |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename") |
- ) |
- ); |
- } |
- } |
- } |
- return ret_val; |
+ r.sort(new Document("ctime", 1)); |
+ for (Document doc : r) { |
+ if (doc != null) { |
+ if (doc.getString("service").equalsIgnoreCase(service)) { |
+ ret_val.add(new ModelArchive( |
+ doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ doc.getString("service"), |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename") |
+ ) |
+ ); |
+ } |
+ } |
+ } |
+ return ret_val; |
+ } |
+ |
+ @Override |
+ public synchronized ModelArchive getArchiveBySUID(String suid) throws Exception { |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ FindIterable<Document> r = db.getCollection(FS_COLL).find(new Document("_id", suid)); |
+ Document doc = r.first(); |
+ if (doc != null) { |
+ return new ModelArchive( |
+ doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ doc.getString("service"), |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename") |
+ ); |
+ } |
+ return null; |
+ } |
+ |
+ private synchronized byte[] getFile(String suid) throws Exception { |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ GridFSBucket gridFSBucket = GridFSBuckets.create(db); |
+ try (GridFSDownloadStream stream = gridFSBucket.openDownloadStream(new BsonString(suid))) { |
+ long fileLength = stream.getGridFSFile().getLength(); |
+ // this should not happen, since it is capped at 1GB. |
+ if (fileLength > Integer.MAX_VALUE) { |
+ return ARCHIVE_TOO_BIG.getBytes(); |
+ } |
+ return IOUtils.toByteArray(stream, fileLength); |
+ } |
+ } |
+ |
+ @Override |
+ public synchronized JSONObject getServiceRequest(String suid) throws Exception { |
+ JSONObject requestData = null; |
+ |
+ byte[] zFile = getFile(suid); |
+ |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(zFile))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains(".request")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ requestData = new JSONObject(fileContent.toString()); |
+ break; |
+ } |
+ } |
} |
|
- @Override |
- public synchronized ModelArchive getArchiveBySUID(String suid) throws Exception { |
- MongoDatabase db = mongo.getDatabase(dbname); |
- FindIterable<Document> r = db.getCollection(FS_COLL).find(new Document("_id", suid)); |
- Document doc = r.first(); |
- if (doc != null) { |
- return new ModelArchive( |
- doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- doc.getString("service"), |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename") |
- ); |
- } |
- return null; |
+ return requestData; |
+ } |
+ |
+ @Override |
+ public synchronized JSONObject getServiceResponse(String suid) throws Exception { |
+ JSONObject requestData = null; |
+ |
+ byte[] zFile = getFile(suid); |
+ |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(zFile))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains(".response")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ requestData = new JSONObject(fileContent.toString()); |
+ break; |
+ } |
+ } |
} |
|
- private synchronized byte[] getFile(String suid) throws Exception { |
- MongoDatabase db = mongo.getDatabase(dbname); |
- GridFSBucket gridFSBucket = GridFSBuckets.create(db); |
- try (GridFSDownloadStream stream = gridFSBucket.openDownloadStream(new BsonString(suid))) { |
- long fileLength = stream.getGridFSFile().getLength(); |
- // this should not happen, since it is capped at 1GB. |
- if (fileLength > Integer.MAX_VALUE) { |
- return ARCHIVE_TOO_BIG.getBytes(); |
- } |
- return IOUtils.toByteArray(stream, fileLength); |
- } |
+ return requestData; |
+ } |
+ |
+ @Override |
+ public synchronized void shutdown() throws Exception { |
+ mongo.close(); |
+ } |
+ |
+ @Override |
+ public synchronized long getCount() { |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ return db.getCollection(FS_COLL).count(); |
+ } |
+ |
+ // new keys implementation. |
+ @Override |
+ public synchronized Set<String> keys(int skip, int limit, String sortby, boolean sortAsc) { |
+ if (sortby == null) { |
+ sortby = "ctime"; |
+ sortAsc = false; |
+ } |
+ Document sort = new Document(sortby, sortAsc ? 1 : -1); |
+ Set<String> l = new LinkedHashSet<>(); |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ MongoCollection<Document> c = db.getCollection(FS_COLL); |
+ |
+ for (Document doc : c.find().sort(sort).skip(skip).limit(limit)) { |
+ l.add(doc.get("_id", String.class |
+ )); |
+ } |
+ return l; |
+ } |
+ |
+ @Override |
+ public synchronized boolean hasArchive(String suid) throws Exception { |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ return db.getCollection(FS_COLL).count(new Document("_id", suid)) == 1; |
+ |
+ } |
+ |
+ @Override |
+ public ArrayList<ModelArchive> getArchivesByFilters(Iterable<Bson> filters, int limit) throws Exception { |
+ ArrayList<ModelArchive> ret_val = new ArrayList<>(); |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ FindIterable<Document> c = db.getCollection(FS_COLL).find(and(filters)).sort(new Document("ctime", -1)); |
+ |
+ MongoCursor<Document> cursor = c.iterator(); |
+ |
+ int count = 0; |
+ while (cursor.hasNext()) { |
+ Document doc = cursor.next(); |
+ if (null != doc) { |
+ ModelArchive model = null; |
+ byte[] fileData = getFile(doc.getString("_id")); |
+ |
+ if (doc.getString("service").contains("weps/5.0")) { |
+ model = new WEPSModelArchive(doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ doc.getString("service"), |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename"), |
+ fileData |
+ ); |
+ model.saveFileData(fileData); |
+ } |
+ |
+ fileData = null; // Allow garbage collection of the byte array. |
+ |
+ ret_val.add(model); |
+ count++; |
+ |
+ // Reached our limit yet? |
+ if ((-1 != limit) && (count >= limit)) { |
+ break; |
+ } |
+ } |
} |
|
- @Override |
- public synchronized JSONObject getServiceRequest(String suid) throws Exception { |
- JSONObject requestData = null; |
+ System.gc(); // We've read a lot of data and dynamically allocated a lot of space, some of which was returned...force some garbage collection. |
+ return ret_val; |
+ } |
|
- byte[] zFile = getFile(suid); |
+ @Override |
+ public ArrayList<ModelArchive> getArchivesByFilter(String key, String value, int limit, boolean basicArchiveFunctionalaity) throws Exception { |
+ ArrayList<ModelArchive> ret_val = new ArrayList<>(); |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ FindIterable<Document> c = db.getCollection(FS_COLL).find(and(eq(key, value), eq("status", "Finished"))).sort(new Document("ctime", -1)); |
|
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(zFile))) { |
- ZipEntry entry; |
+ MongoCursor<Document> cursor = c.iterator(); |
|
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains(".request")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- requestData = new JSONObject(fileContent.toString()); |
- break; |
- } |
- } |
- } |
+ int count = 0; |
+ while (cursor.hasNext()) { |
+ Document doc = cursor.next(); |
+ if ((null != doc) && (doc.getString(key).equalsIgnoreCase(value))) { |
+ ModelArchive model = null; |
+ byte[] fileData = null; |
+ String serviceName = doc.getString("service"); |
|
- return requestData; |
+ model = ModelArchiveFactory.getModelArchive(serviceName, doc); |
+ if (null != model) { |
+ fileData = getFile(doc.getString("_id")); |
+ model.setFileDataEx(fileData); |
+ model.saveFileData(fileData); |
+ fileData = null; // Allow garbage collection of the byte array. |
+ |
+ if (null != model) { |
+ ret_val.add(model); |
+ count++; |
+ } |
+ } |
+ // Reached our limit yet? |
+ if ((-1 != limit) && (count >= limit)) { |
+ break; |
+ } |
+ } |
} |
|
- @Override |
- public synchronized JSONObject getServiceResponse(String suid) throws Exception { |
- JSONObject requestData = null; |
+ System.gc(); // We've read a lot of data and dynamically allocated a lot of space, some of which was returned...force some garbage collection. |
+ return ret_val; |
+ } |
|
- byte[] zFile = getFile(suid); |
+ @Override |
+ public ArrayList<String> getSUIDsByFilter(String key, String value |
+ ) { |
+ ArrayList<String> ret_val = new ArrayList<>(); |
+ MongoDatabase db = mongo.getDatabase(dbname); |
+ FindIterable<Document> c = db.getCollection(FS_COLL).find(eq(key, value)).sort(new Document("ctime", 1)); |
|
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(zFile))) { |
- ZipEntry entry; |
+ MongoCursor<Document> cursor = c.iterator(); |
|
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains(".response")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- requestData = new JSONObject(fileContent.toString()); |
- break; |
- } |
- } |
- } |
- |
- return requestData; |
+ while (cursor.hasNext()) { |
+ Document doc = cursor.next(); |
+ if (null != doc) { |
+ ret_val.add(doc.getString("_id")); |
+ } |
} |
- |
- @Override |
- public synchronized void shutdown() throws Exception { |
- mongo.close(); |
- } |
- |
- @Override |
- public synchronized long getCount() { |
- MongoDatabase db = mongo.getDatabase(dbname); |
- return db.getCollection(FS_COLL).count(); |
- } |
- |
- // new keys implementation. |
- @Override |
- public synchronized Set<String> keys(int skip, int limit, String sortby, boolean sortAsc) { |
- if (sortby == null) { |
- sortby = "ctime"; |
- sortAsc = false; |
- } |
- Document sort = new Document(sortby, sortAsc ? 1 : -1); |
- Set<String> l = new LinkedHashSet<>(); |
- MongoDatabase db = mongo.getDatabase(dbname); |
- MongoCollection<Document> c = db.getCollection(FS_COLL); |
- |
- for (Document doc : c.find().sort(sort).skip(skip).limit(limit)) { |
- l.add(doc.get("_id", String.class |
- )); |
- } |
- return l; |
- } |
- |
- @Override |
- public synchronized boolean hasArchive(String suid) throws Exception { |
- MongoDatabase db = mongo.getDatabase(dbname); |
- return db.getCollection(FS_COLL).count(new Document("_id", suid)) == 1; |
- |
- } |
- |
- @Override |
- public ArrayList<ModelArchive> getArchivesByFilters(Iterable<Bson> filters, int limit) throws Exception { |
- ArrayList<ModelArchive> ret_val = new ArrayList<>(); |
- MongoDatabase db = mongo.getDatabase(dbname); |
- FindIterable<Document> c = db.getCollection(FS_COLL).find(and(filters)).sort(new Document("ctime", -1)); |
- |
- MongoCursor<Document> cursor = c.iterator(); |
- |
- int count = 0; |
- while (cursor.hasNext()) { |
- Document doc = cursor.next(); |
- if (null != doc) { |
- ModelArchive model = null; |
- byte[] fileData = getFile(doc.getString("_id")); |
- |
- if (doc.getString("service").contains("weps/5.0")) { |
- model = new WEPSModelArchive(doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- doc.getString("service"), |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename"), |
- fileData |
- ); |
- model.saveFileData(fileData); |
- } |
- |
- fileData = null; // Allow garbage collection of the byte array. |
- |
- ret_val.add(model); |
- count++; |
- |
- // Reached our limit yet? |
- if ((-1 != limit) && (count >= limit)) { |
- break; |
- } |
- } |
- } |
- |
- System.gc(); // We've read a lot of data and dynamically allocated a lot of space, some of which was returned...force some garbage collection. |
- return ret_val; |
- } |
- |
- @Override |
- public ArrayList<ModelArchive> getArchivesByFilter(String key, String value, int limit, boolean basicArchiveFunctionalaity) throws Exception { |
- ArrayList<ModelArchive> ret_val = new ArrayList<>(); |
- MongoDatabase db = mongo.getDatabase(dbname); |
- FindIterable<Document> c = db.getCollection(FS_COLL).find(and(eq(key, value), eq("status", "Finished"))).sort(new Document("ctime", -1)); |
- |
- MongoCursor<Document> cursor = c.iterator(); |
- |
- int count = 0; |
- while (cursor.hasNext()) { |
- Document doc = cursor.next(); |
- if ((null != doc) && (doc.getString(key).equalsIgnoreCase(value))) { |
- ModelArchive model = null; |
- byte[] fileData = null; |
- String serviceName = doc.getString("service"); |
- |
- model = ModelArchiveFactory.getModelArchive(serviceName, doc); |
- if (null != model) { |
- fileData = getFile(doc.getString("_id")); |
- model.setFileDataEx(fileData); |
- model.saveFileData(fileData); |
- fileData = null; // Allow garbage collection of the byte array. |
- |
- if (null != model) { |
- ret_val.add(model); |
- count++; |
- } |
- } |
- // Reached our limit yet? |
- if ((-1 != limit) && (count >= limit)) { |
- break; |
- } |
- } |
- } |
- |
- System.gc(); // We've read a lot of data and dynamically allocated a lot of space, some of which was returned...force some garbage collection. |
- return ret_val; |
- } |
- |
- @Override |
- public ArrayList<String> getSUIDsByFilter(String key, String value |
- ) { |
- ArrayList<String> ret_val = new ArrayList<>(); |
- MongoDatabase db = mongo.getDatabase(dbname); |
- FindIterable<Document> c = db.getCollection(FS_COLL).find(eq(key, value)).sort(new Document("ctime", 1)); |
- |
- MongoCursor<Document> cursor = c.iterator(); |
- |
- while (cursor.hasNext()) { |
- Document doc = cursor.next(); |
- if (null != doc) { |
- ret_val.add(doc.getString("_id")); |
- } |
- } |
- return ret_val; |
- } |
+ return ret_val; |
+ } |
} |
@@ -17,476 +17,476 @@ |
*/ |
public class CligenData { |
|
- public static final int MAX_YEARS = 100; |
- public static final int MONTH_AVERAGE_INDEX = 31; |
- public static final int MAX_YEAR_AVERAGE_INDEX = MAX_YEARS; |
+ public static final int MAX_YEARS = 100; |
+ public static final int MONTH_AVERAGE_INDEX = 31; |
+ public static final int MAX_YEAR_AVERAGE_INDEX = MAX_YEARS; |
|
- public static final int PRCP_INDEX = 0; |
- public static final int DUR_INDEX = 1; |
- public static final int TP_INDEX = 2; |
- public static final int IP_INDEX = 3; |
- public static final int TMAX_INDEX = 4; |
- public static final int TMIN_INDEX = 5; |
- public static final int RAD_INDEX = 6; |
- public static final int W_VL_INDEX = 7; |
- public static final int W_DIR_INDEX = 8; |
- public static final int TDEW_INDEX = 9; |
- public static final int DATA_TYPES_LENGTH = 10; |
+ public static final int PRCP_INDEX = 0; |
+ public static final int DUR_INDEX = 1; |
+ public static final int TP_INDEX = 2; |
+ public static final int IP_INDEX = 3; |
+ public static final int TMAX_INDEX = 4; |
+ public static final int TMIN_INDEX = 5; |
+ public static final int RAD_INDEX = 6; |
+ public static final int W_VL_INDEX = 7; |
+ public static final int W_DIR_INDEX = 8; |
+ public static final int TDEW_INDEX = 9; |
+ public static final int DATA_TYPES_LENGTH = 10; |
|
- public static final int OBS_MONTHLY_TMAX = 0; |
- public static final int OBS_MONTHLY_TMIN = 1; |
- public static final int OBS_MONTHLY_RAD = 2; |
- public static final int OBS_MONTHLY_PRCP = 3; |
- public static final int OBS_MONTHLY_LENGTH = 4; |
+ public static final int OBS_MONTHLY_TMAX = 0; |
+ public static final int OBS_MONTHLY_TMIN = 1; |
+ public static final int OBS_MONTHLY_RAD = 2; |
+ public static final int OBS_MONTHLY_PRCP = 3; |
+ public static final int OBS_MONTHLY_LENGTH = 4; |
|
- private static final int DAILY_VALUE_TOKENS = 13; |
+ private static final int DAILY_VALUE_TOKENS = 13; |
|
- // All monthly data for observeredYears years. (Max out, i.e. cap output, at MAX_YEARS yrs) |
- private double[][][][] dailyData = new double[MAX_YEARS + 1][12][MONTH_AVERAGE_INDEX + 1][DATA_TYPES_LENGTH]; //Daily Averages |
- private double[][] yearlyData = new double[MAX_YEARS][DATA_TYPES_LENGTH]; //Yearly summaries |
+ // All monthly data for observeredYears years. (Max out, i.e. cap output, at MAX_YEARS yrs) |
+ private double[][][][] dailyData = new double[MAX_YEARS + 1][12][MONTH_AVERAGE_INDEX + 1][DATA_TYPES_LENGTH]; //Daily Averages |
+ private double[][] yearlyData = new double[MAX_YEARS][DATA_TYPES_LENGTH]; //Yearly summaries |
|
- // Montly averages, over entire observed years period, for prcp, dur, tp, ip, tmax, tmin, rad, w-vl, w-dir, tdew. |
- private double[][] monthlyAverage = new double[12][DATA_TYPES_LENGTH]; |
+ // Montly averages, over entire observed years period, for prcp, dur, tp, ip, tmax, tmin, rad, w-vl, w-dir, tdew. |
+ private double[][] monthlyAverage = new double[12][DATA_TYPES_LENGTH]; |
|
- public static final int[] monthDays = {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; |
- private String messages = ""; |
- private boolean badClimateData = false; |
- private String stationName = ""; |
+ public static final int[] monthDays = {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; |
+ private String messages = ""; |
+ private boolean badClimateData = false; |
+ private String stationName = ""; |
|
- // Entire observedYears years averaged by month |
- private double[][] observedMmonthlyAverage = new double[12][OBS_MONTHLY_LENGTH]; |
+ // Entire observedYears years averaged by month |
+ private double[][] observedMmonthlyAverage = new double[12][OBS_MONTHLY_LENGTH]; |
|
- // How many years total in the file |
- private int observedYears = 0; |
+ // How many years total in the file |
+ private int observedYears = 0; |
|
- private int yearCount = 0; |
+ private int yearCount = 0; |
|
- private int elevation = 0; |
- private String climateVersion; |
- private int yearsSimulated; |
- private int beginningYear; |
- private double annualAvgPrecip = Double.NaN; |
+ private int elevation = 0; |
+ private String climateVersion; |
+ private int yearsSimulated; |
+ private int beginningYear; |
+ private double annualAvgPrecip = Double.NaN; |
|
- public CligenData(String cliFileData) { |
- try { |
- readClimateData(cliFileData); |
- } catch (IOException ex) { |
- Logger.getLogger(WindGenData.class.getName()).log(Level.SEVERE, null, ex); |
- } |
+ public CligenData(String cliFileData) { |
+ try { |
+ readClimateData(cliFileData); |
+ } catch (IOException ex) { |
+ Logger.getLogger(WindGenData.class.getName()).log(Level.SEVERE, null, ex); |
+ } |
+ } |
+ |
+ public boolean badClimateData() { |
+ return badClimateData; |
+ } |
+ |
+ public String cligenDataMessages() { |
+ return messages; |
+ } |
+ |
+ /** |
+ * Calculates if not yet calculated, the annual average precipitation based on |
+ * "observed" monthly averages found at the top of the climate file. This does |
+ * not represent the actual yearly or yearsSimulated averages, which can be |
+ * found in other functions. |
+ * |
+ * @return Returns a double value representing a calculated annual average |
+ * precipitation based on "observed" monthly values found in the top of the |
+ * climate file. |
+ */ |
+ public double annualAvgPrecip() { |
+ if (Double.isNaN(annualAvgPrecip)) { |
+ double avg = 0.0; |
+ for (int i = 0; i < 12; i++) { |
+ avg += observedMmonthlyAverage[i][3]; |
+ } |
+ annualAvgPrecip = avg; // / 12.0; |
} |
|
- public boolean badClimateData() { |
- return badClimateData; |
+ return annualAvgPrecip; |
+ } |
+ |
+ public int observedYears() { |
+ return observedYears; |
+ } |
+ |
+ public int yearsSimulated() { |
+ return yearsSimulated; |
+ } |
+ |
+ public int yearsInFile() { |
+ return yearCount; |
+ } |
+ |
+ public int elevation() { |
+ return elevation; |
+ } |
+ |
+ public String climateVersion() { |
+ return climateVersion; |
+ } |
+ |
+ public int beginningYear() { |
+ return beginningYear; |
+ } |
+ |
+ public String stationName() { |
+ return stationName; |
+ } |
+ |
+ public double[][] monthlyAverages() { |
+ return monthlyAverage; |
+ } |
+ |
+ public double observedAnnualAverage(int dataType) { |
+ double ret_val = 0.0; |
+ |
+ for (int i = 0; i < 12; i++) { |
+ ret_val += observedMonthlyAverages(i, dataType); |
} |
|
- public String cligenDataMessages() { |
- return messages; |
+ ret_val /= 12.0; |
+ |
+ return ret_val; |
+ } |
+ |
+ public double[][] observedMonthlyAverages() { |
+ return observedMmonthlyAverage; |
+ } |
+ |
+ public double[] observedMonthlyAverages(int month) { |
+ if ((month >= 0) && (month < 12)) { |
+ return observedMmonthlyAverage[month]; |
} |
|
- /** |
- * Calculates if not yet calculated, the annual average precipitation based |
- * on "observed" monthly averages found at the top of the climate file. This |
- * does not represent the actual yearly or yearsSimulated averages, which |
- * can be found in other functions. |
- * |
- * @return Returns a double value representing a calculated annual average |
- * precipitation based on "observed" monthly values found in the top of the |
- * climate file. |
- */ |
- public double annualAvgPrecip() { |
- if (Double.isNaN(annualAvgPrecip)) { |
- double avg = 0.0; |
- for (int i = 0; i < 12; i++) { |
- avg += observedMmonthlyAverage[i][3]; |
- } |
- annualAvgPrecip = avg; // / 12.0; |
+ return null; |
+ } |
+ |
+ public double observedMonthlyAverages(int month, int dataType) { |
+ if ((month >= 0) && (month < 12) && (dataType >= 0) && (dataType < OBS_MONTHLY_LENGTH)) { |
+ return observedMmonthlyAverage[month][dataType]; |
+ } |
+ return Double.NaN; |
+ } |
+ |
+ public double monthlyAverages(int month, int dataType) { |
+ if ((month >= 0) && (month < 12) && (dataType >= 0) && (dataType < OBS_MONTHLY_LENGTH)) { |
+ return this.monthlyAverage[month][dataType]; |
+ } |
+ return Double.NaN; |
+ } |
+ |
+ public double[] yearlySummary(int yearIndex) { |
+ if ((yearIndex < yearCount) && (yearIndex >= 0)) { |
+ return yearlyData[yearIndex]; |
+ } |
+ |
+ return null; |
+ } |
+ |
+ public double yearlySummary(int yearIndex, int dataType) { |
+ if ((yearIndex < yearCount) && (yearIndex >= 0) && (dataType >= 0) && (dataType < DATA_TYPES_LENGTH)) { |
+ return yearlyData[yearIndex][dataType]; |
+ } |
+ |
+ return Double.NaN; |
+ } |
+ |
+ public double simulationAverage(int dataType) { |
+ if ((dataType >= 0) && (dataType < DATA_TYPES_LENGTH)) { |
+ double total = 0.0; |
+ |
+ for (int i = 0; i < 12; i++) { |
+ total += monthlyAverage[i][dataType]; |
+ } |
+ |
+ return ((dataType == PRCP_INDEX) ? total : (total / 12.0)); |
+ } |
+ |
+ return Double.NaN; |
+ } |
+ |
+ private void readClimateData(String windFileData) throws IOException { |
+ BufferedReader climateData; |
+ climateData = new BufferedReader(new StringReader(windFileData)); |
+ |
+ String climateLine; |
+ int count = 0; |
+ int febCount = 0; |
+ boolean foundData = false; |
+ boolean foundEndOfCycle = false; |
+ |
+ while ((climateLine = climateData.readLine()) != null) { |
+ count++; |
+ if (count == 1) { |
+ climateVersion = climateLine; |
+ continue; |
+ } |
+ |
+ if (!foundData) { |
+ |
+ if (climateLine.contains("Station:")) { |
+ stationName = climateLine.substring(climateLine.indexOf("Station:") + 9, climateLine.indexOf("CLIGEN") - (climateLine.indexOf("Station:") + 9)).trim(); |
+ continue; |
} |
|
- return annualAvgPrecip; |
- } |
+ if (climateLine.contains("Latitude ")) { |
+ climateLine = climateData.readLine(); |
+ if (null != climateLine) { |
+ count++; |
+ String[] tokens = climateLine.trim().split("\\s+"); |
|
- public int observedYears() { |
- return observedYears; |
- } |
+ if (tokens.length >= 7) { |
+ elevation = Integer.parseInt(tokens[2]); |
+ observedYears = Integer.parseInt(tokens[3]); |
+ beginningYear = Integer.parseInt(tokens[4]); |
+ yearsSimulated = Integer.parseInt(tokens[5]); |
+ } else { |
+ messages += "\n#\nClimate file is invalid. Data line that follows the headers on line " + count + " is incomplete."; |
+ badClimateData = true; |
+ break; |
+ } |
|
- public int yearsSimulated() { |
- return yearsSimulated; |
- } |
- |
- public int yearsInFile() { |
- return yearCount; |
- } |
- |
- public int elevation() { |
- return elevation; |
- } |
- |
- public String climateVersion() { |
- return climateVersion; |
- } |
- |
- public int beginningYear() { |
- return beginningYear; |
- } |
- |
- public String stationName() { |
- return stationName; |
- } |
- |
- public double[][] monthlyAverages() { |
- return monthlyAverage; |
- } |
- |
- public double observedAnnualAverage(int dataType) { |
- double ret_val = 0.0; |
- |
- for (int i = 0; i < 12; i++) { |
- ret_val += observedMonthlyAverages(i, dataType); |
+ } else { |
+ messages += "\n#\nClimate file is invalid. No data line follows the headers on line " + count; |
+ badClimateData = true; |
+ break; |
+ } |
+ continue; |
} |
|
- ret_val /= 12.0; |
+ if (climateLine.contains("Observed monthly ave max temperature (C)")) { |
+ climateLine = climateData.readLine(); |
+ if (null != climateLine) { |
+ count++; |
+ String[] tokens = climateLine.trim().split("\\s+"); |
|
- return ret_val; |
- } |
+ if (tokens.length >= 12) { |
+ for (int i = 0; i < 12; i++) { |
+ observedMmonthlyAverage[i][OBS_MONTHLY_TMAX] = Double.parseDouble(tokens[i]); |
+ } |
+ } else { |
+ messages += "\n#\nClimate file is invalid. Data line that follows the observed monthly average max temperatures header on line " + count + " is incomplete."; |
+ badClimateData = true; |
+ break; |
+ } |
|
- public double[][] observedMonthlyAverages() { |
- return observedMmonthlyAverage; |
- } |
- |
- public double[] observedMonthlyAverages(int month) { |
- if ((month >= 0) && (month < 12)) { |
- return observedMmonthlyAverage[month]; |
+ } else { |
+ messages += "\n#\nClimate file is invalid. No data line that follows the observed monthly average max temperatures header on line " + count; |
+ badClimateData = true; |
+ break; |
+ } |
+ continue; |
} |
|
- return null; |
- } |
+ if (climateLine.contains("Observed monthly ave min temperature (C)")) { |
+ climateLine = climateData.readLine(); |
+ if (null != climateLine) { |
+ count++; |
+ String[] tokens = climateLine.trim().split("\\s+"); |
|
- public double observedMonthlyAverages(int month, int dataType) { |
- if ((month >= 0) && (month < 12) && (dataType >= 0) && (dataType < OBS_MONTHLY_LENGTH)) { |
- return observedMmonthlyAverage[month][dataType]; |
- } |
- return Double.NaN; |
- } |
+ if (tokens.length >= 12) { |
+ for (int i = 0; i < 12; i++) { |
+ observedMmonthlyAverage[i][OBS_MONTHLY_TMIN] = Double.parseDouble(tokens[i]); |
+ } |
+ } else { |
+ messages += "\n#\nClimate file is invalid. Data line that follows the observed monthly average min temperatures header on line " + count + " is incomplete."; |
+ badClimateData = true; |
+ break; |
+ } |
|
- public double monthlyAverages(int month, int dataType) { |
- if ((month >= 0) && (month < 12) && (dataType >= 0) && (dataType < OBS_MONTHLY_LENGTH)) { |
- return this.monthlyAverage[month][dataType]; |
- } |
- return Double.NaN; |
- } |
- |
- public double[] yearlySummary(int yearIndex) { |
- if ((yearIndex < yearCount) && (yearIndex >= 0)) { |
- return yearlyData[yearIndex]; |
+ } else { |
+ messages += "\n#\nClimate file is invalid. No data line that follows the observed monthly average min temperatures header on line " + count; |
+ badClimateData = true; |
+ break; |
+ } |
+ continue; |
} |
|
- return null; |
- } |
+ if (climateLine.contains("Observed monthly ave solar radiation (Langleys/day)")) { |
+ climateLine = climateData.readLine(); |
+ if (null != climateLine) { |
+ count++; |
+ String[] tokens = climateLine.trim().split("\\s+"); |
|
- public double yearlySummary(int yearIndex, int dataType) { |
- if ((yearIndex < yearCount) && (yearIndex >= 0) && (dataType >= 0) && (dataType < DATA_TYPES_LENGTH)) { |
- return yearlyData[yearIndex][dataType]; |
+ if (tokens.length >= 12) { |
+ for (int i = 0; i < 12; i++) { |
+ observedMmonthlyAverage[i][OBS_MONTHLY_RAD] = Double.parseDouble(tokens[i]); |
+ } |
+ } else { |
+ messages += "\n#\nClimate file is invalid. Data line that follows the Observed monthly ave solar radiation (Langleys/day) header on line " + count + " is incomplete."; |
+ badClimateData = true; |
+ break; |
+ } |
+ |
+ } else { |
+ messages += "\n#\nClimate file is invalid. No data line that follows the Observed monthly ave solar radiation (Langleys/day) header on line " + count; |
+ badClimateData = true; |
+ break; |
+ } |
+ continue; |
} |
|
- return Double.NaN; |
- } |
+ if (climateLine.contains("Observed monthly ave precipitation (mm)")) { |
+ climateLine = climateData.readLine(); |
+ if (null != climateLine) { |
+ count++; |
+ String[] tokens = climateLine.trim().split("\\s+"); |
|
- public double simulationAverage(int dataType) { |
- if ((dataType >= 0) && (dataType < DATA_TYPES_LENGTH)) { |
- double total = 0.0; |
- |
- for (int i = 0; i < 12; i++) { |
- total += monthlyAverage[i][dataType]; |
+ if (tokens.length >= 12) { |
+ for (int i = 0; i < 12; i++) { |
+ observedMmonthlyAverage[i][OBS_MONTHLY_PRCP] = Double.parseDouble(tokens[i]); |
+ } |
+ } else { |
+ messages += "\n#\nClimate file is invalid. Data line that follows the Observed monthly ave precipitation (mm) header on line " + count + " is incomplete."; |
+ badClimateData = true; |
+ break; |
} |
|
- return ((dataType == PRCP_INDEX) ? total : (total / 12.0)); |
+ } else { |
+ messages += "\n#\nClimate file is invalid. No data line that follows the Observed monthly ave precipitation (mm) header on line " + count; |
+ badClimateData = true; |
+ break; |
+ } |
+ continue; |
} |
|
- return Double.NaN; |
- } |
+ if (climateLine.contains("da mo year prcp")) { |
+ climateLine = climateData.readLine(); |
+ if (null != climateLine) { |
+ count++; |
+ foundData = true; |
+ } |
+ continue; |
+ } |
+ } else { |
+ // If we got to here, then we are now reading the monthly data lines... |
+ int currentYear = -1; |
+ int currentMonth = -1; |
+ int numDays = 0; |
+ int monthDayCount = 0; |
+ yearCount = 0; |
+ double[] runningTotal = new double[DATA_TYPES_LENGTH]; |
|
- private void readClimateData(String windFileData) throws IOException { |
- BufferedReader climateData; |
- climateData = new BufferedReader(new StringReader(windFileData)); |
+ while (true) { |
+ String[] tokens = climateLine.trim().split("\\s+"); |
|
- String climateLine; |
- int count = 0; |
- int febCount = 0; |
- boolean foundData = false; |
- boolean foundEndOfCycle = false; |
+ if (tokens.length >= DAILY_VALUE_TOKENS) { |
+ int day, month, year; |
|
- while ((climateLine = climateData.readLine()) != null) { |
- count++; |
- if (count == 1) { |
- climateVersion = climateLine; |
- continue; |
+ day = Integer.parseInt(tokens[0]) - 1; |
+ month = Integer.parseInt(tokens[1]) - 1; |
+ year = Integer.parseInt(tokens[2]) - 1; |
+ |
+ if (((year >= (MAX_YEARS - 1)) || (yearCount >= yearsSimulated)) && (month == 11) && (day == 30)) { |
+ foundEndOfCycle = true; |
} |
|
- if (!foundData) { |
+ //Catch month transition and save yearly averages...do this before checking year transition to avoid doing this twice. |
+ if (month != currentMonth) { |
+ if (currentMonth == -1) { |
+ currentMonth = month; |
+ monthDayCount = 0; |
+ } else { |
+ for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
+ dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i] /= ((i == PRCP_INDEX) ? 1 : monthDayCount); |
+ dailyData[MAX_YEAR_AVERAGE_INDEX][currentMonth][MONTH_AVERAGE_INDEX][i] += dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i]; |
+ } |
+ monthDayCount = 0; |
+ currentMonth = month; |
+ } |
+ } |
|
- if (climateLine.contains("Station:")) { |
- stationName = climateLine.substring(climateLine.indexOf("Station:") + 9, climateLine.indexOf("CLIGEN") - (climateLine.indexOf("Station:") + 9)).trim(); |
- continue; |
+ //Catch year transition and save yearly averages. |
+ if (year != currentYear) { |
+ if (currentYear == -1) { |
+ currentYear = year; |
+ for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
+ runningTotal[i] = 0; |
} |
+ } else { |
+ for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
+ yearlyData[currentYear][i] = runningTotal[i] / ((i == PRCP_INDEX) ? 1 : numDays); |
+ runningTotal[i] = 0; |
+ } |
+ currentYear = year; |
+ } |
+ numDays = 0; |
+ yearCount++; |
+ } |
|
- if (climateLine.contains("Latitude ")) { |
- climateLine = climateData.readLine(); |
- if (null != climateLine) { |
- count++; |
- String[] tokens = climateLine.trim().split("\\s+"); |
+ if (year < MAX_YEARS) { |
+ for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
+ double dataValue = Double.parseDouble(tokens[i + 3]); |
|
- if (tokens.length >= 7) { |
- elevation = Integer.parseInt(tokens[2]); |
- observedYears = Integer.parseInt(tokens[3]); |
- beginningYear = Integer.parseInt(tokens[4]); |
- yearsSimulated = Integer.parseInt(tokens[5]); |
- } else { |
- messages += "\n#\nClimate file is invalid. Data line that follows the headers on line " + count + " is incomplete."; |
- badClimateData = true; |
- break; |
- } |
+ //Set value in matrix for this day/month/year/dataType |
+ dailyData[year][month][day][i] = dataValue; |
|
- } else { |
- messages += "\n#\nClimate file is invalid. No data line follows the headers on line " + count; |
- badClimateData = true; |
- break; |
- } |
- continue; |
- } |
+ //Add to MAX_YEAR_AVERAGE_INDEX averages for this day/month/year/dataType |
+ dailyData[MAX_YEAR_AVERAGE_INDEX][month][day][i] += dataValue; |
|
- if (climateLine.contains("Observed monthly ave max temperature (C)")) { |
- climateLine = climateData.readLine(); |
- if (null != climateLine) { |
- count++; |
- String[] tokens = climateLine.trim().split("\\s+"); |
+ //Add to yearly averages for this dataTypeIndex |
+ runningTotal[i] += dataValue; |
|
- if (tokens.length >= 12) { |
- for (int i = 0; i < 12; i++) { |
- observedMmonthlyAverage[i][OBS_MONTHLY_TMAX] = Double.parseDouble(tokens[i]); |
- } |
- } else { |
- messages += "\n#\nClimate file is invalid. Data line that follows the observed monthly average max temperatures header on line " + count + " is incomplete."; |
- badClimateData = true; |
- break; |
- } |
+ //Add to monthly averages for this year for this dataTypeIndex |
+ dailyData[year][month][MONTH_AVERAGE_INDEX][i] += dataValue; |
+ } |
|
- } else { |
- messages += "\n#\nClimate file is invalid. No data line that follows the observed monthly average max temperatures header on line " + count; |
- badClimateData = true; |
- break; |
- } |
- continue; |
- } |
+ // If its February and a leap year, count the number of these that occured in this data set. |
+ if ((month == 1) && (day == 29)) { |
+ febCount++; |
+ } |
|
- if (climateLine.contains("Observed monthly ave min temperature (C)")) { |
- climateLine = climateData.readLine(); |
- if (null != climateLine) { |
- count++; |
- String[] tokens = climateLine.trim().split("\\s+"); |
+ //Increment number of days of data read this year. |
+ numDays++; |
|
- if (tokens.length >= 12) { |
- for (int i = 0; i < 12; i++) { |
- observedMmonthlyAverage[i][OBS_MONTHLY_TMIN] = Double.parseDouble(tokens[i]); |
- } |
- } else { |
- messages += "\n#\nClimate file is invalid. Data line that follows the observed monthly average min temperatures header on line " + count + " is incomplete."; |
- badClimateData = true; |
- break; |
- } |
+ //Increment number of days of data read this month/year |
+ monthDayCount++; |
|
- } else { |
- messages += "\n#\nClimate file is invalid. No data line that follows the observed monthly average min temperatures header on line " + count; |
- badClimateData = true; |
- break; |
- } |
- continue; |
- } |
+ } else { |
+ messages += "\n#\nClimate file was longer than " + MAX_YEARS + " years. Data clipped to " + MAX_YEARS + " years."; |
+ foundEndOfCycle = true; |
+ break; |
+ } |
|
- if (climateLine.contains("Observed monthly ave solar radiation (Langleys/day)")) { |
- climateLine = climateData.readLine(); |
- if (null != climateLine) { |
- count++; |
- String[] tokens = climateLine.trim().split("\\s+"); |
+ } else { |
+ if (!foundEndOfCycle) { |
+ messages += "\n#\nMissing daily climate data on line " + count + "of the cligen data file. Expected 13 fields and found only " + tokens.length + " fields instead."; |
+ badClimateData = true; |
+ break; |
+ } |
+ } |
|
- if (tokens.length >= 12) { |
- for (int i = 0; i < 12; i++) { |
- observedMmonthlyAverage[i][OBS_MONTHLY_RAD] = Double.parseDouble(tokens[i]); |
- } |
- } else { |
- messages += "\n#\nClimate file is invalid. Data line that follows the Observed monthly ave solar radiation (Langleys/day) header on line " + count + " is incomplete."; |
- badClimateData = true; |
- break; |
- } |
+ climateLine = climateData.readLine(); |
+ if (null == climateLine) { |
+ break; |
+ } |
+ count++; |
+ } |
|
- } else { |
- messages += "\n#\nClimate file is invalid. No data line that follows the Observed monthly ave solar radiation (Langleys/day) header on line " + count; |
- badClimateData = true; |
- break; |
- } |
- continue; |
- } |
+ if (foundEndOfCycle) { |
|
- if (climateLine.contains("Observed monthly ave precipitation (mm)")) { |
- climateLine = climateData.readLine(); |
- if (null != climateLine) { |
- count++; |
- String[] tokens = climateLine.trim().split("\\s+"); |
+ //Get final year's averages since while loop will exit right after filling in the last year of data, but not do that year's averages. |
+ for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
+ yearlyData[currentYear][i] = runningTotal[i] / ((i == PRCP_INDEX) ? 1 : numDays); |
+ dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i] /= ((i == PRCP_INDEX) ? 1 : monthDayCount); |
+ dailyData[MAX_YEAR_AVERAGE_INDEX][currentMonth][MONTH_AVERAGE_INDEX][i] += dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i]; |
+ } |
|
- if (tokens.length >= 12) { |
- for (int i = 0; i < 12; i++) { |
- observedMmonthlyAverage[i][OBS_MONTHLY_PRCP] = Double.parseDouble(tokens[i]); |
- } |
- } else { |
- messages += "\n#\nClimate file is invalid. Data line that follows the Observed monthly ave precipitation (mm) header on line " + count + " is incomplete."; |
- badClimateData = true; |
- break; |
- } |
+ //////////////////////////////////////////////////////////////////////// |
+ // Calculates MAX_YEARS year averages...by day and by month for each dataType. |
+ // |
+ // Also calculate daily averages each month over the MAX_YEARS years...saves a |
+ // daily average for the entire MAX_YEARS years, so the MAX_YEAR_AVERAGE_INDEX line of this matrix contains an average for the entire simulation. |
+ ///////////////////////////////////////////////////////////////////////////////////////////////// |
+ for (int monthIndex = 0; monthIndex < 12; monthIndex++) { |
+ double[] monthlyAvg = new double[DATA_TYPES_LENGTH]; |
+ int febDayCounter = 0; |
|
- } else { |
- messages += "\n#\nClimate file is invalid. No data line that follows the Observed monthly ave precipitation (mm) header on line " + count; |
- badClimateData = true; |
- break; |
- } |
- continue; |
- } |
- |
- if (climateLine.contains("da mo year prcp")) { |
- climateLine = climateData.readLine(); |
- if (null != climateLine) { |
- count++; |
- foundData = true; |
- } |
- continue; |
- } |
- } else { |
- // If we got to here, then we are now reading the monthly data lines... |
- int currentYear = -1; |
- int currentMonth = -1; |
- int numDays = 0; |
- int monthDayCount = 0; |
- yearCount = 0; |
- double[] runningTotal = new double[DATA_TYPES_LENGTH]; |
- |
- while (true) { |
- String[] tokens = climateLine.trim().split("\\s+"); |
- |
- if (tokens.length >= DAILY_VALUE_TOKENS) { |
- int day, month, year; |
- |
- day = Integer.parseInt(tokens[0]) - 1; |
- month = Integer.parseInt(tokens[1]) - 1; |
- year = Integer.parseInt(tokens[2]) - 1; |
- |
- if (((year >= (MAX_YEARS - 1)) || (yearCount >= yearsSimulated)) && (month == 11) && (day == 30)) { |
- foundEndOfCycle = true; |
- } |
- |
- //Catch month transition and save yearly averages...do this before checking year transition to avoid doing this twice. |
- if (month != currentMonth) { |
- if (currentMonth == -1) { |
- currentMonth = month; |
- monthDayCount = 0; |
- } else { |
- for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
- dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i] /= ((i == PRCP_INDEX) ? 1 : monthDayCount); |
- dailyData[MAX_YEAR_AVERAGE_INDEX][currentMonth][MONTH_AVERAGE_INDEX][i] += dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i]; |
- } |
- monthDayCount = 0; |
- currentMonth = month; |
- } |
- } |
- |
- //Catch year transition and save yearly averages. |
- if (year != currentYear) { |
- if (currentYear == -1) { |
- currentYear = year; |
- for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
- runningTotal[i] = 0; |
- } |
- } else { |
- for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
- yearlyData[currentYear][i] = runningTotal[i] / ((i == PRCP_INDEX) ? 1 : numDays); |
- runningTotal[i] = 0; |
- } |
- currentYear = year; |
- } |
- numDays = 0; |
- yearCount++; |
- } |
- |
- if (year < MAX_YEARS) { |
- for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
- double dataValue = Double.parseDouble(tokens[i + 3]); |
- |
- //Set value in matrix for this day/month/year/dataType |
- dailyData[year][month][day][i] = dataValue; |
- |
- //Add to MAX_YEAR_AVERAGE_INDEX averages for this day/month/year/dataType |
- dailyData[MAX_YEAR_AVERAGE_INDEX][month][day][i] += dataValue; |
- |
- //Add to yearly averages for this dataTypeIndex |
- runningTotal[i] += dataValue; |
- |
- //Add to monthly averages for this year for this dataTypeIndex |
- dailyData[year][month][MONTH_AVERAGE_INDEX][i] += dataValue; |
- } |
- |
- // If its February and a leap year, count the number of these that occured in this data set. |
- if ((month == 1) && (day == 29)) { |
- febCount++; |
- } |
- |
- //Increment number of days of data read this year. |
- numDays++; |
- |
- //Increment number of days of data read this month/year |
- monthDayCount++; |
- |
- } else { |
- messages += "\n#\nClimate file was longer than " + MAX_YEARS + " years. Data clipped to " + MAX_YEARS + " years."; |
- foundEndOfCycle = true; |
- break; |
- } |
- |
- } else { |
- if (!foundEndOfCycle) { |
- messages += "\n#\nMissing daily climate data on line " + count + "of the cligen data file. Expected 13 fields and found only " + tokens.length + " fields instead."; |
- badClimateData = true; |
- break; |
- } |
- } |
- |
- climateLine = climateData.readLine(); |
- if (null == climateLine) { |
- break; |
- } |
- count++; |
- } |
- |
- if (foundEndOfCycle) { |
- |
- //Get final year's averages since while loop will exit right after filling in the last year of data, but not do that year's averages. |
- for (int i = 0; i < DATA_TYPES_LENGTH; i++) { |
- yearlyData[currentYear][i] = runningTotal[i] / ((i == PRCP_INDEX) ? 1 : numDays); |
- dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i] /= ((i == PRCP_INDEX) ? 1 : monthDayCount); |
- dailyData[MAX_YEAR_AVERAGE_INDEX][currentMonth][MONTH_AVERAGE_INDEX][i] += dailyData[currentYear][currentMonth][MONTH_AVERAGE_INDEX][i]; |
- } |
- |
- //////////////////////////////////////////////////////////////////////// |
- // Calculates MAX_YEARS year averages...by day and by month for each dataType. |
- // |
- // Also calculate daily averages each month over the MAX_YEARS years...saves a |
- // daily average for the entire MAX_YEARS years, so the MAX_YEAR_AVERAGE_INDEX line of this matrix contains an average for the entire simulation. |
- ///////////////////////////////////////////////////////////////////////////////////////////////// |
- for (int monthIndex = 0; monthIndex < 12; monthIndex++) { |
- double[] monthlyAvg = new double[DATA_TYPES_LENGTH]; |
- int febDayCounter = 0; |
- |
- //Average all month data |
- for (int dataTypeIndex = 0; dataTypeIndex < DATA_TYPES_LENGTH; dataTypeIndex++) { |
- dailyData[MAX_YEAR_AVERAGE_INDEX][monthIndex][MONTH_AVERAGE_INDEX][dataTypeIndex] /= yearsSimulated; |
- monthlyAverage[monthIndex][dataTypeIndex] = dailyData[MAX_YEAR_AVERAGE_INDEX][monthIndex][MONTH_AVERAGE_INDEX][dataTypeIndex]; |
- } |
+ //Average all month data |
+ for (int dataTypeIndex = 0; dataTypeIndex < DATA_TYPES_LENGTH; dataTypeIndex++) { |
+ dailyData[MAX_YEAR_AVERAGE_INDEX][monthIndex][MONTH_AVERAGE_INDEX][dataTypeIndex] /= yearsSimulated; |
+ monthlyAverage[monthIndex][dataTypeIndex] = dailyData[MAX_YEAR_AVERAGE_INDEX][monthIndex][MONTH_AVERAGE_INDEX][dataTypeIndex]; |
+ } |
|
// // For each day of this month. |
// for (int monthDay = 0; monthDay < monthDays[monthIndex]; monthDay++) { |
@@ -520,17 +520,17 @@ |
// // This value should match the one saved just above...check on this... |
// dailyData[MAX_YEAR_AVERAGE_INDEX][monthIndex][MONTH_AVERAGE_INDEX][dataTypeIndex] /= yearsSimulated; |
// } |
- } |
+ } |
|
- //Done with this file... |
- break; |
- } else { |
- messages += "\n#\nDid not find an entire " + MAX_YEARS + " year simulation of climate data."; |
- badClimateData = true; |
- break; |
- } |
- } |
+ //Done with this file... |
+ break; |
+ } else { |
+ messages += "\n#\nDid not find an entire " + MAX_YEARS + " year simulation of climate data."; |
+ badClimateData = true; |
+ break; |
} |
+ } |
+ } |
|
- } |
+ } |
} |
@@ -17,229 +17,229 @@ |
*/ |
public class IFCFile { |
|
- private static final Logger LOGGER = Logger.getLogger(IFCFile.class.getName()); |
- //Logs for errors and warnings. |
+ private static final Logger LOGGER = Logger.getLogger(IFCFile.class.getName()); |
+ //Logs for errors and warnings. |
|
- String errorsInIfcFile = ""; // list of variables adjusted on output |
- String mySavedFilename = null; |
+ String errorsInIfcFile = ""; // list of variables adjusted on output |
+ String mySavedFilename = null; |
|
- public String localPhase = "local phase"; |
- public String taxOrder = "tax order"; |
- public int soilLossTolerance = 0; |
- public double surfaceAlbedo = Double.NaN; |
- public double surfaceSlope = -1; |
- public double surfaceFragmentCover = 0; |
- public double bedrockDepth = 9999; |
- public double impermiableDepth = 9999; |
- public double layerThickness[]; |
- public double fractionSand[]; |
- public double fractionSilt[]; |
- public double fractionClay[]; |
- public double fractionRock[]; |
- public double veryCoarseSandFraction[]; |
- public double coarseSandFraction[]; |
- public double mediumSandFraction[]; |
- public double fineSandFraction[]; |
- public double veryFineSandFraction[]; |
- public double wetBulkDensity[]; |
- public double organicMaterial[]; |
- public double soilpH[]; |
- public double calciumCarbonateEquivalent[]; |
- public double cationExchangeCapacity[]; |
- public double linearExtensibility[]; |
- public double aggregateMeanDiameter[]; |
- public double aggregateStdDeviation[]; |
- public double maxAggregateSize[]; |
- public double minAggregateSize[]; |
- public double aggregateDensity[]; |
- public double aggregateStability[]; |
- public double crustThickness = 0.01; |
- public double crustDensity = Double.NaN; |
- public double crustStability = Double.NaN; |
- public double crustFraction = 0; |
- public double crustLooseMaterialMass = 0; |
- public double crustLooseMaterialFraction = 0; |
- public double randomRoughness = 4.0; |
- public double roughnessOrientation = 0; |
- public double roughnessHeight = 0; |
- public double roughnessSpacing = 10.0; |
- public double roughnessWidth = 10.0; |
- public double initialBulkDensity[]; |
- public double initialSWC[]; |
- public double saturatedSWC[]; |
- public double fieldCapacitySWC[]; |
- public double wiltingPointSWC[]; |
- public double soilCB[]; |
- public double airEntryPotential[]; |
- public double saturatedHydraulicConductivity[]; |
- public int numberOfSoilLayers; |
- public double layerDepthBottom[]; |
- public double layerDepthTop[]; |
- public double dryBulkDensity[]; |
- public double tenthBarSWC[]; |
- public String state; |
- public String county; |
- public String soilSurveyAreaName; |
- public String soilSurveyID; |
- public String mapUnitSymbol; |
- public String componentName; |
- public String componentPercent; |
- public String surfaceTexture; |
+ public String localPhase = "local phase"; |
+ public String taxOrder = "tax order"; |
+ public int soilLossTolerance = 0; |
+ public double surfaceAlbedo = Double.NaN; |
+ public double surfaceSlope = -1; |
+ public double surfaceFragmentCover = 0; |
+ public double bedrockDepth = 9999; |
+ public double impermiableDepth = 9999; |
+ public double layerThickness[]; |
+ public double fractionSand[]; |
+ public double fractionSilt[]; |
+ public double fractionClay[]; |
+ public double fractionRock[]; |
+ public double veryCoarseSandFraction[]; |
+ public double coarseSandFraction[]; |
+ public double mediumSandFraction[]; |
+ public double fineSandFraction[]; |
+ public double veryFineSandFraction[]; |
+ public double wetBulkDensity[]; |
+ public double organicMaterial[]; |
+ public double soilpH[]; |
+ public double calciumCarbonateEquivalent[]; |
+ public double cationExchangeCapacity[]; |
+ public double linearExtensibility[]; |
+ public double aggregateMeanDiameter[]; |
+ public double aggregateStdDeviation[]; |
+ public double maxAggregateSize[]; |
+ public double minAggregateSize[]; |
+ public double aggregateDensity[]; |
+ public double aggregateStability[]; |
+ public double crustThickness = 0.01; |
+ public double crustDensity = Double.NaN; |
+ public double crustStability = Double.NaN; |
+ public double crustFraction = 0; |
+ public double crustLooseMaterialMass = 0; |
+ public double crustLooseMaterialFraction = 0; |
+ public double randomRoughness = 4.0; |
+ public double roughnessOrientation = 0; |
+ public double roughnessHeight = 0; |
+ public double roughnessSpacing = 10.0; |
+ public double roughnessWidth = 10.0; |
+ public double initialBulkDensity[]; |
+ public double initialSWC[]; |
+ public double saturatedSWC[]; |
+ public double fieldCapacitySWC[]; |
+ public double wiltingPointSWC[]; |
+ public double soilCB[]; |
+ public double airEntryPotential[]; |
+ public double saturatedHydraulicConductivity[]; |
+ public int numberOfSoilLayers; |
+ public double layerDepthBottom[]; |
+ public double layerDepthTop[]; |
+ public double dryBulkDensity[]; |
+ public double tenthBarSWC[]; |
+ public String state; |
+ public String county; |
+ public String soilSurveyAreaName; |
+ public String soilSurveyID; |
+ public String mapUnitSymbol; |
+ public String componentName; |
+ public String componentPercent; |
+ public String surfaceTexture; |
|
- public IFCFile(String fileData) throws IOException { |
- readIfc(fileData); |
+ public IFCFile(String fileData) throws IOException { |
+ readIfc(fileData); |
+ } |
+ |
+ /** |
+ * ******************************************************************************************************** |
+ */ |
+ /* reads line from file skipping comments */ |
+ private String readLine(BufferedReader inpf) { |
+ String line; |
+ try { |
+ line = inpf.readLine(); |
+ } catch (IOException e) { |
+ return null; |
} |
+ if (line == null || line.length() == 0) { |
+ return null; |
+ } |
+ if (line.charAt(0) == '#') { |
+ line = readLine(inpf); |
+ } |
+ return line; |
+ } |
|
- /** |
- * ******************************************************************************************************** |
- */ |
- /* reads line from file skipping comments */ |
- private String readLine(BufferedReader inpf) { |
- String line; |
- try { |
- line = inpf.readLine(); |
- } catch (IOException e) { |
- return null; |
- } |
- if (line == null || line.length() == 0) { |
- return null; |
- } |
- if (line.charAt(0) == '#') { |
- line = readLine(inpf); |
- } |
- return line; |
+ /** |
+ * ******************************************************************************************************** |
+ */ |
+ /* reads in a single double */ |
+ private double readDouble(BufferedReader inpf) { |
+ String line = readLine(inpf); |
+ return Double.valueOf(line.trim()).doubleValue(); |
+ } |
+ |
+ /** |
+ * ******************************************************************************************************** |
+ */ |
+ /* reads in a line of doubles */ |
+ private double[] readDoubles(BufferedReader inpf, int numlay) { |
+ double rtnval[] = new double[numlay]; |
+ String line = readLine(inpf); |
+ java.util.StringTokenizer st = new java.util.StringTokenizer(line, " "); |
+ for (int ldx = 0; ldx < numlay; ldx++) { |
+ String dblstr = st.nextToken(); |
+ rtnval[ldx] = Double.valueOf(dblstr.trim()).doubleValue(); |
} |
+ return rtnval; |
|
- /** |
- * ******************************************************************************************************** |
- */ |
- /* reads in a single double */ |
- private double readDouble(BufferedReader inpf) { |
- String line = readLine(inpf); |
- return Double.valueOf(line.trim()).doubleValue(); |
+ } |
+ |
+ /** |
+ * ******************************************************************************************************** |
+ */ |
+ /* parse Soil ID line */ |
+ private void prsSoilID(String inplin, boolean newFlg) { |
+ if (!newFlg) { |
+ if (!inplin.startsWith("Soil ID ")) { |
+ return; |
+ } |
+ inplin = inplin.substring(8); |
} |
+ soilSurveyID = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ mapUnitSymbol = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ componentName = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ componentPercent = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ surfaceTexture = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ state = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ county = inplin.substring(0, inplin.indexOf('-')); |
+ inplin = inplin.substring(inplin.indexOf('-') + 1); |
+ soilSurveyAreaName = inplin.trim(); |
+ } |
|
- /** |
- * ******************************************************************************************************** |
- */ |
- /* reads in a line of doubles */ |
- private double[] readDoubles(BufferedReader inpf, int numlay) { |
- double rtnval[] = new double[numlay]; |
- String line = readLine(inpf); |
- java.util.StringTokenizer st = new java.util.StringTokenizer(line, " "); |
- for (int ldx = 0; ldx < numlay; ldx++) { |
- String dblstr = st.nextToken(); |
- rtnval[ldx] = Double.valueOf(dblstr.trim()).doubleValue(); |
- } |
- return rtnval; |
+ /** |
+ * ******************************************************************************************************** |
+ */ |
+ /* reads ifc file */ |
+ private void readNewIFC(BufferedReader inpf) { |
+ try { |
+ int nsl; |
|
- } |
+ prsSoilID(readLine(inpf), true); |
|
- /** |
- * ******************************************************************************************************** |
- */ |
- /* parse Soil ID line */ |
- private void prsSoilID(String inplin, boolean newFlg) { |
- if (!newFlg) { |
- if (!inplin.startsWith("Soil ID ")) { |
- return; |
- } |
- inplin = inplin.substring(8); |
- } |
- soilSurveyID = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- mapUnitSymbol = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- componentName = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- componentPercent = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- surfaceTexture = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- state = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- county = inplin.substring(0, inplin.indexOf('-')); |
- inplin = inplin.substring(inplin.indexOf('-') + 1); |
- soilSurveyAreaName = inplin.trim(); |
- } |
+ localPhase = readLine(inpf); |
+ taxOrder = readLine(inpf); |
|
- /** |
- * ******************************************************************************************************** |
- */ |
- /* reads ifc file */ |
- private void readNewIFC(BufferedReader inpf) { |
- try { |
- int nsl; |
- |
- prsSoilID(readLine(inpf), true); |
+ // soilLossTolerance = Integer.valueOf(readLine(inpf).trim()).intValue(); |
+ soilLossTolerance = Double.valueOf(readLine(inpf).trim()).intValue(); |
+ // soilLossTolerance = readDouble(inpf); |
+ surfaceAlbedo = readDouble(inpf); |
+ surfaceSlope = readDouble(inpf); |
+ surfaceFragmentCover = readDouble(inpf); |
|
- localPhase = readLine(inpf); |
- taxOrder = readLine(inpf); |
+ bedrockDepth = readDouble(inpf); |
+ impermiableDepth = readDouble(inpf); |
|
- // soilLossTolerance = Integer.valueOf(readLine(inpf).trim()).intValue(); |
- soilLossTolerance = Double.valueOf(readLine(inpf).trim()).intValue(); |
- // soilLossTolerance = readDouble(inpf); |
- surfaceAlbedo = readDouble(inpf); |
- surfaceSlope = readDouble(inpf); |
- surfaceFragmentCover = readDouble(inpf); |
+ nsl = Integer.valueOf(readLine(inpf).trim()).intValue(); |
+ layerThickness = readDoubles(inpf, nsl); |
+ layerDepthTop = new double[nsl]; |
+ layerDepthBottom = new double[nsl]; |
|
- bedrockDepth = readDouble(inpf); |
- impermiableDepth = readDouble(inpf); |
+ fractionSand = readDoubles(inpf, nsl); |
+ fractionSilt = readDoubles(inpf, nsl); |
+ fractionClay = readDoubles(inpf, nsl); |
+ fractionRock = readDoubles(inpf, nsl); |
+ veryCoarseSandFraction = readDoubles(inpf, nsl); |
+ coarseSandFraction = readDoubles(inpf, nsl); |
+ mediumSandFraction = readDoubles(inpf, nsl); |
+ fineSandFraction = readDoubles(inpf, nsl); |
+ veryFineSandFraction = readDoubles(inpf, nsl); |
+ wetBulkDensity = readDoubles(inpf, nsl); |
+ // dryBulkDensity = readDoubles(inpf, nsl); |
|
- nsl = Integer.valueOf(readLine(inpf).trim()).intValue(); |
- layerThickness = readDoubles(inpf, nsl); |
- layerDepthTop = new double[nsl]; |
- layerDepthBottom = new double[nsl]; |
+ organicMaterial = readDoubles(inpf, nsl); |
+ soilpH = readDoubles(inpf, nsl); |
+ calciumCarbonateEquivalent = readDoubles(inpf, nsl); |
+ cationExchangeCapacity = readDoubles(inpf, nsl); |
+ linearExtensibility = readDoubles(inpf, nsl); |
|
- fractionSand = readDoubles(inpf, nsl); |
- fractionSilt = readDoubles(inpf, nsl); |
- fractionClay = readDoubles(inpf, nsl); |
- fractionRock = readDoubles(inpf, nsl); |
- veryCoarseSandFraction = readDoubles(inpf, nsl); |
- coarseSandFraction = readDoubles(inpf, nsl); |
- mediumSandFraction = readDoubles(inpf, nsl); |
- fineSandFraction = readDoubles(inpf, nsl); |
- veryFineSandFraction = readDoubles(inpf, nsl); |
- wetBulkDensity = readDoubles(inpf, nsl); |
- // dryBulkDensity = readDoubles(inpf, nsl); |
+ aggregateMeanDiameter = readDoubles(inpf, nsl); |
+ aggregateStdDeviation = readDoubles(inpf, nsl); |
+ maxAggregateSize = readDoubles(inpf, nsl); |
+ minAggregateSize = readDoubles(inpf, nsl); |
+ aggregateDensity = readDoubles(inpf, nsl); |
+ aggregateStability = readDoubles(inpf, nsl); |
|
- organicMaterial = readDoubles(inpf, nsl); |
- soilpH = readDoubles(inpf, nsl); |
- calciumCarbonateEquivalent = readDoubles(inpf, nsl); |
- cationExchangeCapacity = readDoubles(inpf, nsl); |
- linearExtensibility = readDoubles(inpf, nsl); |
+ crustThickness = readDouble(inpf); |
+ crustDensity = readDouble(inpf); |
+ crustStability = readDouble(inpf); |
+ crustFraction = readDouble(inpf); |
+ crustLooseMaterialMass = readDouble(inpf); |
+ crustLooseMaterialFraction = readDouble(inpf); |
|
- aggregateMeanDiameter = readDoubles(inpf, nsl); |
- aggregateStdDeviation = readDoubles(inpf, nsl); |
- maxAggregateSize = readDoubles(inpf, nsl); |
- minAggregateSize = readDoubles(inpf, nsl); |
- aggregateDensity = readDoubles(inpf, nsl); |
- aggregateStability = readDoubles(inpf, nsl); |
+ randomRoughness = readDouble(inpf); |
+ roughnessOrientation = readDouble(inpf); |
+ roughnessHeight = readDouble(inpf); |
+ roughnessSpacing = readDouble(inpf); |
+ roughnessWidth = readDouble(inpf); |
|
- crustThickness = readDouble(inpf); |
- crustDensity = readDouble(inpf); |
- crustStability = readDouble(inpf); |
- crustFraction = readDouble(inpf); |
- crustLooseMaterialMass = readDouble(inpf); |
- crustLooseMaterialFraction = readDouble(inpf); |
+ initialBulkDensity = readDoubles(inpf, nsl); |
+ initialSWC = readDoubles(inpf, nsl); |
+ saturatedSWC = readDoubles(inpf, nsl); |
+ fieldCapacitySWC = readDoubles(inpf, nsl); |
+ wiltingPointSWC = readDoubles(inpf, nsl); |
|
- randomRoughness = readDouble(inpf); |
- roughnessOrientation = readDouble(inpf); |
- roughnessHeight = readDouble(inpf); |
- roughnessSpacing = readDouble(inpf); |
- roughnessWidth = readDouble(inpf); |
+ soilCB = readDoubles(inpf, nsl); |
+ airEntryPotential = readDoubles(inpf, nsl); |
+ saturatedHydraulicConductivity = readDoubles(inpf, nsl); |
|
- initialBulkDensity = readDoubles(inpf, nsl); |
- initialSWC = readDoubles(inpf, nsl); |
- saturatedSWC = readDoubles(inpf, nsl); |
- fieldCapacitySWC = readDoubles(inpf, nsl); |
- wiltingPointSWC = readDoubles(inpf, nsl); |
- |
- soilCB = readDoubles(inpf, nsl); |
- airEntryPotential = readDoubles(inpf, nsl); |
- saturatedHydraulicConductivity = readDoubles(inpf, nsl); |
- |
- //System.out.println("I_rIF:"); |
+ //System.out.println("I_rIF:"); |
// try { |
// while (true) { |
// String inpLin = inpf.readLine(); |
@@ -253,31 +253,31 @@ |
// } |
// } catch (IOException e) { |
// } |
- } finally { |
- try { |
- inpf.close(); |
- } catch (IOException e) { |
- LOGGER.log(Level.SEVERE, "Unable to close soil input stream.", e); |
- } |
- } |
- //System.out.println(errorsInIfcFile); |
+ } finally { |
+ try { |
+ inpf.close(); |
+ } catch (IOException e) { |
+ LOGGER.log(Level.SEVERE, "Unable to close soil input stream.", e); |
+ } |
} |
+ //System.out.println(errorsInIfcFile); |
+ } |
|
- /** |
- * |
- * @param fileData |
- * @throws java.io.IOException |
- */ |
- /* reads ifc file */ |
- public void readIfc(String fileData) throws IOException { |
- BufferedReader inpf; |
- inpf = new BufferedReader(new StringReader(fileData)); |
+ /** |
+ * |
+ * @param fileData |
+ * @throws java.io.IOException |
+ */ |
+ /* reads ifc file */ |
+ public void readIfc(String fileData) throws IOException { |
+ BufferedReader inpf; |
+ inpf = new BufferedReader(new StringReader(fileData)); |
|
- String firstLine = readLine(inpf); |
- if (firstLine.startsWith("Version")) { |
- readNewIFC(inpf); |
- } else { |
- throw new IOException("Invalid IFC File. No 'Version' tag found"); |
- } |
+ String firstLine = readLine(inpf); |
+ if (firstLine.startsWith("Version")) { |
+ readNewIFC(inpf); |
+ } else { |
+ throw new IOException("Invalid IFC File. No 'Version' tag found"); |
} |
+ } |
} |
@@ -23,194 +23,194 @@ |
*/ |
public class SOLFile { |
|
- private static final Logger LOGGER = Logger.getLogger(IFCFile.class.getName()); |
- //Logs for errors and warnings. |
+ private static final Logger LOGGER = Logger.getLogger(IFCFile.class.getName()); |
+ //Logs for errors and warnings. |
|
- private String errorsInSolFile = ""; // list of variables adjusted on output |
- public String comments = ""; |
+ private String errorsInSolFile = ""; // list of variables adjusted on output |
+ public String comments = ""; |
|
- //File Line 4 values |
- public String soilName; // a) soil name for current OFE or channel - character (slid) |
- public String soilTexture; // b) soil texture for current OFE or channel - character (texid) |
- public int numLayers; // c) number of soil layers for current OFE or channel - integer (nsl) |
- public double salb; // d) albedo of the bare dry surface soil on the current OFE or channel - real (salb) |
- public double sat; // e) initial saturation level of the soil profile porosity (m/m) - real (sat) |
- public double ki; // f) baseline interrill erodibility parameter (kg*s/m4) - real (ki) |
- public double kr; // g) baseline rill erodibility parameter (s/m) - real (kr) |
- public double shcrit; // h) baseline critical shear parameter (N/m2) - real (shcrit) |
- public double avke; // i) effective hydraulic conductivity of surface soil (mm/h) - real (avke) |
+ //File Line 4 values |
+ public String soilName; // a) soil name for current OFE or channel - character (slid) |
+ public String soilTexture; // b) soil texture for current OFE or channel - character (texid) |
+ public int numLayers; // c) number of soil layers for current OFE or channel - integer (nsl) |
+ public double salb; // d) albedo of the bare dry surface soil on the current OFE or channel - real (salb) |
+ public double sat; // e) initial saturation level of the soil profile porosity (m/m) - real (sat) |
+ public double ki; // f) baseline interrill erodibility parameter (kg*s/m4) - real (ki) |
+ public double kr; // g) baseline rill erodibility parameter (s/m) - real (kr) |
+ public double shcrit; // h) baseline critical shear parameter (N/m2) - real (shcrit) |
+ public double avke; // i) effective hydraulic conductivity of surface soil (mm/h) - real (avke) |
|
- //File starting line 5 values for each horizon/layer |
- public double[] solthk; // a) depth from soil surface to bottom of soil layer (mm) - real (solthk) |
- public double[] sand; // b) percentage of sand in the layer (%) - real (sand) |
- public double[] clay; // c) percentage of clay in the layer (%) - real (clay) |
- public double[] orgmat; // d) percentage of organic matter (volume) in the layer (%) - real (orgmat) |
- public double[] cec; // e) cation exchange capacity in the layer (meq/100 g of soil) - real (cec) |
- public double[] rfg; // f) percentage of rock fragments by volume in the layer (%) - real (rfg) |
+ //File starting line 5 values for each horizon/layer |
+ public double[] solthk; // a) depth from soil surface to bottom of soil layer (mm) - real (solthk) |
+ public double[] sand; // b) percentage of sand in the layer (%) - real (sand) |
+ public double[] clay; // c) percentage of clay in the layer (%) - real (clay) |
+ public double[] orgmat; // d) percentage of organic matter (volume) in the layer (%) - real (orgmat) |
+ public double[] cec; // e) cation exchange capacity in the layer (meq/100 g of soil) - real (cec) |
+ public double[] rfg; // f) percentage of rock fragments by volume in the layer (%) - real (rfg) |
|
- //File last line values: Bedrock restricting layer info (Most of the soils are not going to have any bedrock layer defined so this line ends up being all 0's) |
- public int restrictingFlag; // a) flag to indicate if present |
- public int restrictingType; // b) type |
- public double anisotropyRatio; // c) anisotropy ratio |
- public double ksat; // d) ksat |
+ //File last line values: Bedrock restricting layer info (Most of the soils are not going to have any bedrock layer defined so this line ends up being all 0's) |
+ public int restrictingFlag; // a) flag to indicate if present |
+ public int restrictingType; // b) type |
+ public double anisotropyRatio; // c) anisotropy ratio |
+ public double ksat; // d) ksat |
|
- public SOLFile(String fileData) throws IOException { |
- readSol(fileData); |
+ public SOLFile(String fileData) throws IOException { |
+ readSol(fileData); |
+ } |
+ |
+ /** |
+ * ******************************************************************************************************** |
+ */ |
+ /* reads line from file skipping comments */ |
+ private String readLine(BufferedReader inpf) { |
+ String line; |
+ try { |
+ line = inpf.readLine(); |
+ } catch (IOException e) { |
+ return null; |
+ } |
+ if (line == null || line.length() == 0) { |
+ return null; |
} |
|
- /** |
- * ******************************************************************************************************** |
- */ |
- /* reads line from file skipping comments */ |
- private String readLine(BufferedReader inpf) { |
- String line; |
- try { |
- line = inpf.readLine(); |
- } catch (IOException e) { |
- return null; |
- } |
- if (line == null || line.length() == 0) { |
- return null; |
+ return line; |
+ } |
+ |
+ // Sol file description |
+ // Line 1: version control number (95.7) - real (datver) |
+ // Line 2: a) User comment line - character*80, (solcom) |
+ // Line 3: a) number of overland flow elements(OFE's) or channels integer (ntemp) |
+ // b) flag to to use internal hydraulic conductivity adjustments - integer (ksflag) |
+ // 0 - do not use adjustments (conductivity will be held constant) |
+ // 1 - use internal adjustments |
+ // [ALWAYS "1 1" if generated by us... |
+ // Lines 4 & 5 are repeated for the number of OFE's or channels on Line 3a. |
+ // Line 4: a) soil name for current OFE or channel - character (slid) |
+ // b) soil texture for current OFE or channel - character (texid) |
+ // c) number of soil layers for current OFE or channel - integer (nsl) |
+ // d) albedo of the bare dry surface soil on the current OFE or channel - real (salb) |
+ // e) initial saturation level of the soil profile porosity (m/m) - real (sat) |
+ // f) baseline interrill erodibility parameter (kg*s/m4) - real (ki) |
+ // g) baseline rill erodibility parameter (s/m) - real (kr) |
+ // h) baseline critical shear parameter (N/m2) - real (shcrit) |
+ // i) effective hydraulic conductivity of surface soil (mm/h) - real (avke) |
+ // Line 5: (repeated for the number of soil layers indicated on Line 4c.) |
+ // a) depth from soil surface to bottom of soil layer (mm) - real (solthk) |
+ // b) percentage of sand in the layer (%) - real (sand) |
+ // c) percentage of clay in the layer (%) - real (clay) |
+ // d) percentage of organic matter (volume) in the layer (%) - real (orgmat) |
+ // e) cation exchange capacity in the layer (meq/100 g of soil) - real (cec) |
+ // f) percentage of rock fragments by volume in the layer (%) - real (rfg) |
+ // Line 6: Bedrock restricting layer info (Most of the soils are not going to have any bedrock layer defined so this line ends up being all 0's) |
+ // a) flag to indicate if present |
+ // b) type |
+ // c) anisotropy ratio |
+ // d) ksat |
+ private void readNewSol(BufferedReader inpf) throws IOException { |
+ try { |
+ //Should now be starting with line 4: |
+ String line4 = readLine(inpf); |
+ String[] line4Parts = line4.trim().split(" "); |
+ if (line4Parts.length != 9) { |
+ throw new IOException("SOL file does not contain a valid line 4. Expected 9 values on line 4 but found " + line4Parts.length); |
+ } |
+ |
+ // Read initial data about soil |
+ soilName = line4Parts[0]; |
+ soilTexture = line4Parts[1]; |
+ numLayers = Integer.parseInt(line4Parts[2]); |
+ salb = Double.parseDouble(line4Parts[3]); |
+ sat = Double.parseDouble(line4Parts[4]); |
+ ki = Double.parseDouble(line4Parts[5]); |
+ kr = Double.parseDouble(line4Parts[6]); |
+ shcrit = Double.parseDouble(line4Parts[7]); |
+ avke = Double.parseDouble(line4Parts[8]); |
+ |
+ solthk = new double[numLayers]; |
+ sand = new double[numLayers]; |
+ clay = new double[numLayers]; |
+ orgmat = new double[numLayers]; |
+ cec = new double[numLayers]; |
+ rfg = new double[numLayers]; |
+ // Read each soil horizon/layer present in file |
+ for (int i = 0; i < numLayers; i++) { |
+ String horizonLine = readLine(inpf); |
+ |
+ if ((null != horizonLine) && (!horizonLine.isEmpty())) { |
+ String[] lineParts = horizonLine.trim().split(" "); |
+ |
+ if (lineParts.length != 6) { |
+ throw new IOException("Invlalid horizon line found in SOL file on line " + (5 + i) |
+ + ". Line does not contain 6 values as expected, but has " + lineParts.length + " values instead."); |
+ } |
+ |
+ solthk[i] = Double.parseDouble(lineParts[0]); |
+ sand[i] = Double.parseDouble(lineParts[1]); |
+ clay[i] = Double.parseDouble(lineParts[2]); |
+ orgmat[i] = Double.parseDouble(lineParts[3]); |
+ cec[i] = Double.parseDouble(lineParts[4]); |
+ rfg[i] = Double.parseDouble(lineParts[5]); |
+ |
+ } else { |
+ throw new IOException("Invalid horizon line found in SOL file on line " + (5 + i) + "."); |
} |
|
- return line; |
+ } |
+ |
+ // Now read bedrock line |
+ String bedrockLine = readLine(inpf); |
+ if ((null != bedrockLine) && (!bedrockLine.isEmpty())) { |
+ String[] lineParts = bedrockLine.trim().split(" "); |
+ if (lineParts.length != 4) { |
+ throw new IOException("SOL file does not contain a valid bedrock line. Expected 4 values on line 4 but found " + lineParts.length); |
+ } |
+ |
+ restrictingFlag = Integer.parseInt(lineParts[0]); |
+ restrictingType = Integer.parseInt(lineParts[1]); |
+ anisotropyRatio = Double.parseDouble(lineParts[2]); |
+ ksat = Double.parseDouble(lineParts[2]); |
+ |
+ } else { |
+ throw new IOException("No bedrock line found in this SOL file."); |
+ } |
+ |
+ } finally { |
+ try { |
+ inpf.close(); |
+ } catch (IOException e) { |
+ LOGGER.log(Level.SEVERE, "Unable to close soil input stream.", e); |
+ } |
} |
+ //System.out.println(errorsInSolFile); |
+ } |
|
- // Sol file description |
- // Line 1: version control number (95.7) - real (datver) |
- // Line 2: a) User comment line - character*80, (solcom) |
- // Line 3: a) number of overland flow elements(OFE's) or channels integer (ntemp) |
- // b) flag to to use internal hydraulic conductivity adjustments - integer (ksflag) |
- // 0 - do not use adjustments (conductivity will be held constant) |
- // 1 - use internal adjustments |
- // [ALWAYS "1 1" if generated by us... |
- // Lines 4 & 5 are repeated for the number of OFE's or channels on Line 3a. |
- // Line 4: a) soil name for current OFE or channel - character (slid) |
- // b) soil texture for current OFE or channel - character (texid) |
- // c) number of soil layers for current OFE or channel - integer (nsl) |
- // d) albedo of the bare dry surface soil on the current OFE or channel - real (salb) |
- // e) initial saturation level of the soil profile porosity (m/m) - real (sat) |
- // f) baseline interrill erodibility parameter (kg*s/m4) - real (ki) |
- // g) baseline rill erodibility parameter (s/m) - real (kr) |
- // h) baseline critical shear parameter (N/m2) - real (shcrit) |
- // i) effective hydraulic conductivity of surface soil (mm/h) - real (avke) |
- // Line 5: (repeated for the number of soil layers indicated on Line 4c.) |
- // a) depth from soil surface to bottom of soil layer (mm) - real (solthk) |
- // b) percentage of sand in the layer (%) - real (sand) |
- // c) percentage of clay in the layer (%) - real (clay) |
- // d) percentage of organic matter (volume) in the layer (%) - real (orgmat) |
- // e) cation exchange capacity in the layer (meq/100 g of soil) - real (cec) |
- // f) percentage of rock fragments by volume in the layer (%) - real (rfg) |
- // Line 6: Bedrock restricting layer info (Most of the soils are not going to have any bedrock layer defined so this line ends up being all 0's) |
- // a) flag to indicate if present |
- // b) type |
- // c) anisotropy ratio |
- // d) ksat |
- private void readNewSol(BufferedReader inpf) throws IOException { |
- try { |
- //Should now be starting with line 4: |
- String line4 = readLine(inpf); |
- String[] line4Parts = line4.trim().split(" "); |
- if (line4Parts.length != 9) { |
- throw new IOException("SOL file does not contain a valid line 4. Expected 9 values on line 4 but found " + line4Parts.length); |
- } |
+ /** |
+ * |
+ * @param fileData |
+ * @throws java.io.IOException |
+ */ |
+ /* reads ifc file */ |
+ private void readSol(String fileData) throws IOException { |
+ BufferedReader inpf; |
+ inpf = new BufferedReader(new StringReader(fileData)); |
|
- // Read initial data about soil |
- soilName = line4Parts[0]; |
- soilTexture = line4Parts[1]; |
- numLayers = Integer.parseInt(line4Parts[2]); |
- salb = Double.parseDouble(line4Parts[3]); |
- sat = Double.parseDouble(line4Parts[4]); |
- ki = Double.parseDouble(line4Parts[5]); |
- kr = Double.parseDouble(line4Parts[6]); |
- shcrit = Double.parseDouble(line4Parts[7]); |
- avke = Double.parseDouble(line4Parts[8]); |
- |
- solthk = new double[numLayers]; |
- sand = new double[numLayers]; |
- clay = new double[numLayers]; |
- orgmat = new double[numLayers]; |
- cec = new double[numLayers]; |
- rfg = new double[numLayers]; |
- // Read each soil horizon/layer present in file |
- for (int i = 0; i < numLayers; i++) { |
- String horizonLine = readLine(inpf); |
- |
- if ((null != horizonLine) && (!horizonLine.isEmpty())) { |
- String[] lineParts = horizonLine.trim().split(" "); |
- |
- if (lineParts.length != 6) { |
- throw new IOException("Invlalid horizon line found in SOL file on line " + (5 + i) |
- + ". Line does not contain 6 values as expected, but has " + lineParts.length + " values instead."); |
- } |
- |
- solthk[i] = Double.parseDouble(lineParts[0]); |
- sand[i] = Double.parseDouble(lineParts[1]); |
- clay[i] = Double.parseDouble(lineParts[2]); |
- orgmat[i] = Double.parseDouble(lineParts[3]); |
- cec[i] = Double.parseDouble(lineParts[4]); |
- rfg[i] = Double.parseDouble(lineParts[5]); |
- |
- } else { |
- throw new IOException("Invalid horizon line found in SOL file on line " + (5 + i) + "."); |
- } |
- |
- } |
- |
- // Now read bedrock line |
- String bedrockLine = readLine(inpf); |
- if ((null != bedrockLine) && (!bedrockLine.isEmpty())) { |
- String[] lineParts = bedrockLine.trim().split(" "); |
- if (lineParts.length != 4) { |
- throw new IOException("SOL file does not contain a valid bedrock line. Expected 4 values on line 4 but found " + lineParts.length); |
- } |
- |
- restrictingFlag = Integer.parseInt(lineParts[0]); |
- restrictingType = Integer.parseInt(lineParts[1]); |
- anisotropyRatio = Double.parseDouble(lineParts[2]); |
- ksat = Double.parseDouble(lineParts[2]); |
- |
- } else { |
- throw new IOException("No bedrock line found in this SOL file."); |
- } |
- |
- } finally { |
- try { |
- inpf.close(); |
- } catch (IOException e) { |
- LOGGER.log(Level.SEVERE, "Unable to close soil input stream.", e); |
- } |
+ String firstLine = readLine(inpf); |
+ if (firstLine.startsWith("2006.2")) { |
+ comments = readLine(inpf); |
+ if ((null != comments) && (!comments.isEmpty())) { |
+ comments = comments.replace("Comments:", "").trim(); |
+ } |
+ String line3 = readLine(inpf); |
+ if (null != line3) { |
+ line3 = line3.trim(); |
+ if (!line3.equalsIgnoreCase("1 1")) { |
+ throw new IOException("Invalid SOL File. Line 3 shold be '1 1' but is: " + line3); |
} |
- //System.out.println(errorsInSolFile); |
+ } |
+ readNewSol(inpf); |
+ } else { |
+ throw new IOException("Invalid SOL File. Version tag not found or does not match '2006.2'"); |
} |
- |
- /** |
- * |
- * @param fileData |
- * @throws java.io.IOException |
- */ |
- /* reads ifc file */ |
- private void readSol(String fileData) throws IOException { |
- BufferedReader inpf; |
- inpf = new BufferedReader(new StringReader(fileData)); |
- |
- String firstLine = readLine(inpf); |
- if (firstLine.startsWith("2006.2")) { |
- comments = readLine(inpf); |
- if ((null != comments) && (!comments.isEmpty())) { |
- comments = comments.replace("Comments:", "").trim(); |
- } |
- String line3 = readLine(inpf); |
- if (null != line3) { |
- line3 = line3.trim(); |
- if (!line3.equalsIgnoreCase("1 1")) { |
- throw new IOException("Invalid SOL File. Line 3 shold be '1 1' but is: " + line3); |
- } |
- } |
- readNewSol(inpf); |
- } else { |
- throw new IOException("Invalid SOL File. Version tag not found or does not match '2006.2'"); |
- } |
- } |
+ } |
|
} |
@@ -25,310 +25,310 @@ |
*/ |
public class WindGenData { |
|
- public static final int MPS_THRESHOLD = 8; |
- public static final int MAX_YEARS = 100; |
- public static final int MAX_YEAR_AVERAGE_INDEX = MAX_YEARS; |
+ public static final int MPS_THRESHOLD = 8; |
+ public static final int MAX_YEARS = 100; |
+ public static final int MAX_YEAR_AVERAGE_INDEX = MAX_YEARS; |
|
- private double[][][] dailyData = new double[MAX_YEARS][12][31]; //Daily Averages |
- private double[][] monthlyAverage = new double[MAX_YEARS + 1][12]; |
- private double[] yearlyAverage = new double[MAX_YEARS + 1]; |
+ private double[][][] dailyData = new double[MAX_YEARS][12][31]; //Daily Averages |
+ private double[][] monthlyAverage = new double[MAX_YEARS + 1][12]; |
+ private double[] yearlyAverage = new double[MAX_YEARS + 1]; |
|
- private double[][][] dailyEnergyData = new double[MAX_YEARS + 1][12][31]; //Daily Averages |
- private double[][] monthlyEnergyData = new double[MAX_YEARS + 1][12]; |
- private double[] yearlyEnergyData = new double[MAX_YEARS + 1]; |
+ private double[][][] dailyEnergyData = new double[MAX_YEARS + 1][12][31]; //Daily Averages |
+ private double[][] monthlyEnergyData = new double[MAX_YEARS + 1][12]; |
+ private double[] yearlyEnergyData = new double[MAX_YEARS + 1]; |
|
- public static int[] monthDays = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; |
+ public static int[] monthDays = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; |
|
- private String messages = ""; |
- private boolean badWindData = false; |
- private String stationName = ""; |
- private String period = ""; |
- private int elevation = 0; |
- private Period windPeriod; |
- private int yearCount = 0; |
- private double avgPeriodWindEnergy = 0.0; |
+ private String messages = ""; |
+ private boolean badWindData = false; |
+ private String stationName = ""; |
+ private String period = ""; |
+ private int elevation = 0; |
+ private Period windPeriod; |
+ private int yearCount = 0; |
+ private double avgPeriodWindEnergy = 0.0; |
|
- public WindGenData(String windFileData) { |
+ public WindGenData(String windFileData) { |
|
- for (int year = 0; year < MAX_YEARS; year++) { |
- for (int month = 0; month < 12; month++) { |
- for (int day = 0; day < 31; day++) { |
- dailyData[year][month][day] = -1.0; |
- } |
- } |
+ for (int year = 0; year < MAX_YEARS; year++) { |
+ for (int month = 0; month < 12; month++) { |
+ for (int day = 0; day < 31; day++) { |
+ dailyData[year][month][day] = -1.0; |
} |
- |
- try { |
- readWindData(windFileData); |
- } catch (IOException ex) { |
- Logger.getLogger(WindGenData.class.getName()).log(Level.SEVERE, null, ex); |
- } |
+ } |
} |
|
- private void readWindData(String windFileData) throws IOException { |
- BufferedReader windData; |
- windData = new BufferedReader(new StringReader(windFileData)); |
+ try { |
+ readWindData(windFileData); |
+ } catch (IOException ex) { |
+ Logger.getLogger(WindGenData.class.getName()).log(Level.SEVERE, null, ex); |
+ } |
+ } |
|
- String windLine; |
- int count = 0; |
- int febCount = 0; |
- boolean foundData = false; |
- int lastYear = -1; |
- double yearTotal = 0; |
- double yearEnergyTotal = 0; |
- double monthEnergyTotal = 0; |
- int numDays = 0; |
- int numMonthDays = 0; |
- int lastMonth = -1; |
- double monthTotal = 0; |
- avgPeriodWindEnergy = 0.0; |
+ private void readWindData(String windFileData) throws IOException { |
+ BufferedReader windData; |
+ windData = new BufferedReader(new StringReader(windFileData)); |
|
- while ((windLine = windData.readLine()) != null) { |
- count++; |
- if (!foundData && windLine.startsWith("#")) { |
- if (windLine.contains("station:")) { |
- stationName = windLine.substring(windLine.indexOf("station: ") + 9); |
- } else { |
- if (windLine.contains("period: ")) { |
- period = windLine.substring(windLine.indexOf("period: ") + 8, windLine.indexOf("el:")).trim(); |
- String elString = windLine.substring(windLine.indexOf("el:") + 3); |
- elevation = Integer.parseInt(elString.replace("m", " ").trim()); |
+ String windLine; |
+ int count = 0; |
+ int febCount = 0; |
+ boolean foundData = false; |
+ int lastYear = -1; |
+ double yearTotal = 0; |
+ double yearEnergyTotal = 0; |
+ double monthEnergyTotal = 0; |
+ int numDays = 0; |
+ int numMonthDays = 0; |
+ int lastMonth = -1; |
+ double monthTotal = 0; |
+ avgPeriodWindEnergy = 0.0; |
+ |
+ while ((windLine = windData.readLine()) != null) { |
+ count++; |
+ if (!foundData && windLine.startsWith("#")) { |
+ if (windLine.contains("station:")) { |
+ stationName = windLine.substring(windLine.indexOf("station: ") + 9); |
+ } else { |
+ if (windLine.contains("period: ")) { |
+ period = windLine.substring(windLine.indexOf("period: ") + 8, windLine.indexOf("el:")).trim(); |
+ String elString = windLine.substring(windLine.indexOf("el:") + 3); |
+ elevation = Integer.parseInt(elString.replace("m", " ").trim()); |
+ } else { |
+ if (windLine.contains("day mo year dir hr1")) { |
+ if ((windLine = windData.readLine()) != null) { |
+ if (windLine.contains("deg m/s m/s")) { |
+ if ((windLine = windData.readLine()) != null) { |
+ if (windLine.contains("# ----------------")) { |
+ foundData = true; |
} else { |
- if (windLine.contains("day mo year dir hr1")) { |
- if ((windLine = windData.readLine()) != null) { |
- if (windLine.contains("deg m/s m/s")) { |
- if ((windLine = windData.readLine()) != null) { |
- if (windLine.contains("# ----------------")) { |
- foundData = true; |
- } else { |
- messages += "\n#\nMissing comment underline line after deg m/s m/s... line"; |
- badWindData = true; |
- break; |
- } |
- } else { |
- messages += "\n#\nMissing next line after deg m/s... line"; |
- badWindData = true; |
- break; |
- } |
- } |
- } else { |
- messages += "\n#\nMissing next line after day mo year... line"; |
- badWindData = true; |
- break; |
- } |
- } |
+ messages += "\n#\nMissing comment underline line after deg m/s m/s... line"; |
+ badWindData = true; |
+ break; |
} |
+ } else { |
+ messages += "\n#\nMissing next line after deg m/s... line"; |
+ badWindData = true; |
+ break; |
+ } |
} |
- continue; |
- } else { |
- if (foundData) { |
- String[] tokens = windLine.trim().split("\\s+"); |
- if (tokens.length >= 28) { |
- int day, month, year; |
- double dayAverage = 0; |
+ } else { |
+ messages += "\n#\nMissing next line after day mo year... line"; |
+ badWindData = true; |
+ break; |
+ } |
+ } |
+ } |
+ } |
+ continue; |
+ } else { |
+ if (foundData) { |
+ String[] tokens = windLine.trim().split("\\s+"); |
+ if (tokens.length >= 28) { |
+ int day, month, year; |
+ double dayAverage = 0; |
|
- day = Integer.parseInt(tokens[0]) - 1; |
- month = Integer.parseInt(tokens[1]) - 1; |
- year = Integer.parseInt(tokens[2]) - 1; |
+ day = Integer.parseInt(tokens[0]) - 1; |
+ month = Integer.parseInt(tokens[1]) - 1; |
+ year = Integer.parseInt(tokens[2]) - 1; |
|
- if (month != lastMonth) { |
- if (lastMonth != -1) { |
- monthlyAverage[lastYear][lastMonth] = monthTotal / numMonthDays; |
- monthlyAverage[MAX_YEARS][lastMonth] += monthlyAverage[lastYear][lastMonth]; |
- monthlyEnergyData[lastYear][lastMonth] = monthEnergyTotal; |
- } |
- monthTotal = 0; |
- numMonthDays = 0; |
- lastMonth = month; |
- monthEnergyTotal = 0; |
- } |
- |
- if (year != lastYear) { |
- if (lastYear != -1) { |
- yearlyAverage[lastYear] = yearTotal / numDays; |
- yearlyAverage[MAX_YEARS] += yearlyAverage[lastYear]; |
- yearlyEnergyData[lastYear] = yearEnergyTotal; |
- } |
- yearCount++; |
- yearTotal = 0; |
- numDays = 0; |
- lastYear = year; |
- yearEnergyTotal = 0; |
- } |
- |
- if ((year < MAX_YEARS)) { |
- double periodWindEnergy = 0.0; |
- |
- for (int i = 4; i < 28; i++) { |
- double dayValue = Double.parseDouble(tokens[i]); |
- dayAverage += dayValue; |
- if ((dayValue > MPS_THRESHOLD) && (((month >= 2) && (month <= 4)) || ((month >= 8) && (month <= 10)))) { |
- periodWindEnergy += (0.6 * (dayValue - MPS_THRESHOLD) * Math.pow(dayValue, 2.0) * 0.01157405741); |
- } |
- } |
- if (periodWindEnergy > 0.0) { |
- avgPeriodWindEnergy += periodWindEnergy; |
- } |
- |
- dayAverage /= 24.0; |
- |
- dailyData[year][month][day] = dayAverage; |
- //dailyData[MAX_YEAR_AVERAGE_INDEX][month][day] += dayAverage; |
- |
- if (dayAverage > MPS_THRESHOLD) { |
- dailyEnergyData[year][month][day] = 0.6 * Math.pow(dayAverage - MPS_THRESHOLD, 2); |
- } else { |
- dailyEnergyData[year][month][day] = 0; |
- } |
- |
- if ((month == 2) && (day == 29)) { |
- febCount++; |
- } |
- |
- dailyEnergyData[MAX_YEARS][month][day] += dailyEnergyData[year][month][day]; |
- |
- yearEnergyTotal += dailyEnergyData[year][month][day]; |
- monthEnergyTotal += dailyEnergyData[year][month][day]; |
- monthTotal += dayAverage; |
- numMonthDays++; |
- yearTotal += dayAverage; |
- numDays++; |
- } else { |
- messages += "\n#\nWind file was longer than 100 years. Data clipped to 100 years."; |
- break; |
- } |
- } else { |
- messages += "\n#\nMissing hourly wind data on line " + count + "of the windgen data file. Expected 28 fields and found only " + tokens.length + " fields instead."; |
- badWindData = true; |
- break; |
- } |
- } else { |
- // Blank line?? Regardless ignore it for now...maybe generate an error if we discover that this cannot/shoud not happen in a .win file. |
- } |
- } |
- } |
- |
- if (foundData && (yearCount > 0)) { |
- avgPeriodWindEnergy /= yearCount; |
- |
- monthlyAverage[lastYear][lastMonth] = monthTotal / numMonthDays; |
- yearlyAverage[lastYear] = yearTotal / numDays; |
- monthlyEnergyData[lastYear][lastMonth] = monthEnergyTotal; |
- yearlyEnergyData[lastYear] = yearEnergyTotal; |
- |
- yearlyAverage[MAX_YEARS] /= yearCount; |
- yearlyEnergyData[MAX_YEARS] /= yearCount; |
- |
- for (int i = 0; i < 12; i++) { |
- monthlyAverage[MAX_YEARS][i] /= yearCount; |
- monthlyEnergyData[MAX_YEARS][i] /= yearCount; |
+ if (month != lastMonth) { |
+ if (lastMonth != -1) { |
+ monthlyAverage[lastYear][lastMonth] = monthTotal / numMonthDays; |
+ monthlyAverage[MAX_YEARS][lastMonth] += monthlyAverage[lastYear][lastMonth]; |
+ monthlyEnergyData[lastYear][lastMonth] = monthEnergyTotal; |
+ } |
+ monthTotal = 0; |
+ numMonthDays = 0; |
+ lastMonth = month; |
+ monthEnergyTotal = 0; |
} |
|
+ if (year != lastYear) { |
+ if (lastYear != -1) { |
+ yearlyAverage[lastYear] = yearTotal / numDays; |
+ yearlyAverage[MAX_YEARS] += yearlyAverage[lastYear]; |
+ yearlyEnergyData[lastYear] = yearEnergyTotal; |
+ } |
+ yearCount++; |
+ yearTotal = 0; |
+ numDays = 0; |
+ lastYear = year; |
+ yearEnergyTotal = 0; |
+ } |
+ |
+ if ((year < MAX_YEARS)) { |
+ double periodWindEnergy = 0.0; |
+ |
+ for (int i = 4; i < 28; i++) { |
+ double dayValue = Double.parseDouble(tokens[i]); |
+ dayAverage += dayValue; |
+ if ((dayValue > MPS_THRESHOLD) && (((month >= 2) && (month <= 4)) || ((month >= 8) && (month <= 10)))) { |
+ periodWindEnergy += (0.6 * (dayValue - MPS_THRESHOLD) * Math.pow(dayValue, 2.0) * 0.01157405741); |
+ } |
+ } |
+ if (periodWindEnergy > 0.0) { |
+ avgPeriodWindEnergy += periodWindEnergy; |
+ } |
+ |
+ dayAverage /= 24.0; |
+ |
+ dailyData[year][month][day] = dayAverage; |
+ //dailyData[MAX_YEAR_AVERAGE_INDEX][month][day] += dayAverage; |
+ |
+ if (dayAverage > MPS_THRESHOLD) { |
+ dailyEnergyData[year][month][day] = 0.6 * Math.pow(dayAverage - MPS_THRESHOLD, 2); |
+ } else { |
+ dailyEnergyData[year][month][day] = 0; |
+ } |
+ |
+ if ((month == 2) && (day == 29)) { |
+ febCount++; |
+ } |
+ |
+ dailyEnergyData[MAX_YEARS][month][day] += dailyEnergyData[year][month][day]; |
+ |
+ yearEnergyTotal += dailyEnergyData[year][month][day]; |
+ monthEnergyTotal += dailyEnergyData[year][month][day]; |
+ monthTotal += dayAverage; |
+ numMonthDays++; |
+ yearTotal += dayAverage; |
+ numDays++; |
+ } else { |
+ messages += "\n#\nWind file was longer than 100 years. Data clipped to 100 years."; |
+ break; |
+ } |
+ } else { |
+ messages += "\n#\nMissing hourly wind data on line " + count + "of the windgen data file. Expected 28 fields and found only " + tokens.length + " fields instead."; |
+ badWindData = true; |
+ break; |
+ } |
+ } else { |
+ // Blank line?? Regardless ignore it for now...maybe generate an error if we discover that this cannot/shoud not happen in a .win file. |
} |
+ } |
} |
|
- public int yearsSimulated() { |
- return period().getYears(); |
+ if (foundData && (yearCount > 0)) { |
+ avgPeriodWindEnergy /= yearCount; |
+ |
+ monthlyAverage[lastYear][lastMonth] = monthTotal / numMonthDays; |
+ yearlyAverage[lastYear] = yearTotal / numDays; |
+ monthlyEnergyData[lastYear][lastMonth] = monthEnergyTotal; |
+ yearlyEnergyData[lastYear] = yearEnergyTotal; |
+ |
+ yearlyAverage[MAX_YEARS] /= yearCount; |
+ yearlyEnergyData[MAX_YEARS] /= yearCount; |
+ |
+ for (int i = 0; i < 12; i++) { |
+ monthlyAverage[MAX_YEARS][i] /= yearCount; |
+ monthlyEnergyData[MAX_YEARS][i] /= yearCount; |
+ } |
+ |
+ } |
+ } |
+ |
+ public int yearsSimulated() { |
+ return period().getYears(); |
+ } |
+ |
+ public int yearsInFile() { |
+ return yearCount; |
+ } |
+ |
+ public int elevation() { |
+ return elevation; |
+ } |
+ |
+ public double[][] monthlyAverages() { |
+ return monthlyAverage; |
+ } |
+ |
+ public double monthlyAverage(int year, int month) { |
+ if ((year < yearCount) && ((month >= 0) && (month < 12))) { |
+ return monthlyAverage[year][month]; |
} |
|
- public int yearsInFile() { |
- return yearCount; |
+ return Double.NaN; |
+ } |
+ |
+ public String stationName() { |
+ return stationName; |
+ } |
+ |
+ public double yearlyAverage(int yearIndex) { |
+ if ((yearIndex < yearCount) && (yearIndex >= 0)) { |
+ return yearlyAverage[yearIndex]; |
} |
|
- public int elevation() { |
- return elevation; |
+ return Double.NaN; |
+ } |
+ |
+ public double[] monthlyAverage(int yearIndex) { |
+ if ((yearIndex < yearCount) && (yearIndex >= 0)) { |
+ return monthlyAverage[yearIndex]; |
} |
|
- public double[][] monthlyAverages() { |
- return monthlyAverage; |
+ return null; |
+ } |
+ |
+ public double windEnergy(int month, int day, int period) { |
+ double energyVal = 0; |
+ |
+ if (period > 365) { |
+ energyVal = Double.NaN; |
+ } else { |
+ int currentMonth = month; |
+ int startDay = day; |
+ int dayCount = 0; |
+ |
+ while (dayCount < period) { |
+ for (int tDay = startDay; tDay < monthDays[currentMonth]; tDay++) { |
+ energyVal += (dailyEnergyData[MAX_YEARS][currentMonth][tDay] / yearCount); |
+ dayCount++; |
+ } |
+ startDay = 0; |
+ currentMonth++; |
+ if (currentMonth == 12) { |
+ currentMonth = 0; |
+ } |
+ } |
} |
|
- public double monthlyAverage(int year, int month) { |
- if ((year < yearCount) && ((month >= 0) && (month < 12))) { |
- return monthlyAverage[year][month]; |
- } |
+ return energyVal; |
+ } |
|
- return Double.NaN; |
+ public double simulationAverage() { |
+ //return yearlyAverage[MAX_YEARS]; |
+ return avgPeriodWindEnergy; |
+ } |
+ |
+ public double[] simulationMonthlyAverage() { |
+ return monthlyAverage[MAX_YEARS]; |
+ } |
+ |
+ public Period period() { |
+ if (null == windPeriod) { |
+ String periods[] = period.trim().split("-"); |
+ if (periods.length == 2) { |
+ DateTimeFormatter df = DateTimeFormatter.ofPattern("yyyyMMdd"); |
+ LocalDate startDate = LocalDate.parse(periods[0], df); |
+ LocalDate endDate = LocalDate.parse(periods[1], df); |
+ windPeriod = Period.between(startDate, endDate); |
+ |
+ } else { |
+ this.badWindData = true; |
+ this.messages = "\n#\nPeriod string found in file is not properly formatted."; |
+ } |
} |
|
- public String stationName() { |
- return stationName; |
- } |
+ return windPeriod; |
+ } |
|
- public double yearlyAverage(int yearIndex) { |
- if ((yearIndex < yearCount) && (yearIndex >= 0)) { |
- return yearlyAverage[yearIndex]; |
- } |
+ public boolean badWindData() { |
+ return badWindData; |
+ } |
|
- return Double.NaN; |
- } |
- |
- public double[] monthlyAverage(int yearIndex) { |
- if ((yearIndex < yearCount) && (yearIndex >= 0)) { |
- return monthlyAverage[yearIndex]; |
- } |
- |
- return null; |
- } |
- |
- public double windEnergy(int month, int day, int period) { |
- double energyVal = 0; |
- |
- if (period > 365) { |
- energyVal = Double.NaN; |
- } else { |
- int currentMonth = month; |
- int startDay = day; |
- int dayCount = 0; |
- |
- while (dayCount < period) { |
- for (int tDay = startDay; tDay < monthDays[currentMonth]; tDay++) { |
- energyVal += (dailyEnergyData[MAX_YEARS][currentMonth][tDay] / yearCount); |
- dayCount++; |
- } |
- startDay = 0; |
- currentMonth++; |
- if (currentMonth == 12) { |
- currentMonth = 0; |
- } |
- } |
- } |
- |
- return energyVal; |
- } |
- |
- public double simulationAverage() { |
- //return yearlyAverage[MAX_YEARS]; |
- return avgPeriodWindEnergy; |
- } |
- |
- public double[] simulationMonthlyAverage() { |
- return monthlyAverage[MAX_YEARS]; |
- } |
- |
- public Period period() { |
- if (null == windPeriod) { |
- String periods[] = period.trim().split("-"); |
- if (periods.length == 2) { |
- DateTimeFormatter df = DateTimeFormatter.ofPattern("yyyyMMdd"); |
- LocalDate startDate = LocalDate.parse(periods[0], df); |
- LocalDate endDate = LocalDate.parse(periods[1], df); |
- windPeriod = Period.between(startDate, endDate); |
- |
- } else { |
- this.badWindData = true; |
- this.messages = "\n#\nPeriod string found in file is not properly formatted."; |
- } |
- } |
- |
- return windPeriod; |
- } |
- |
- public boolean badWindData() { |
- return badWindData; |
- } |
- |
- public String windDataMessages() { |
- return messages; |
- } |
+ public String windDataMessages() { |
+ return messages; |
+ } |
} |
@@ -29,194 +29,194 @@ |
*/ |
public class ModelArchive { |
|
- private static final String FAILED_MESSAGE = "FAILED: "; |
+ private static final String FAILED_MESSAGE = "FAILED: "; |
|
- protected String service; |
- protected String status; |
- protected String ctime; // creation time |
- protected String etime; // expiration time |
- protected String req_ip; |
- protected String suid; |
- protected String filename; |
- protected JSONObject originalRequest; |
- protected JSONObject originalResponse; |
- protected String logFile; |
+ protected String service; |
+ protected String status; |
+ protected String ctime; // creation time |
+ protected String etime; // expiration time |
+ protected String req_ip; |
+ protected String suid; |
+ protected String filename; |
+ protected JSONObject originalRequest; |
+ protected JSONObject originalResponse; |
+ protected String logFile; |
|
- public ModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename) { |
- this.ctime = ctime; |
- this.etime = etime; |
- this.service = service; |
- this.status = status; |
- this.req_ip = req_ip; |
- this.suid = suid; |
- this.filename = filename; |
+ public ModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename) { |
+ this.ctime = ctime; |
+ this.etime = etime; |
+ this.service = service; |
+ this.status = status; |
+ this.req_ip = req_ip; |
+ this.suid = suid; |
+ this.filename = filename; |
+ } |
+ |
+ public ModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename, byte[] fileData) throws IOException, JSONException { |
+ this.ctime = ctime; |
+ this.etime = etime; |
+ this.service = service; |
+ this.status = status; |
+ this.req_ip = req_ip; |
+ this.suid = suid; |
+ this.filename = filename; |
+ |
+ setFileData(fileData); |
+ } |
+ |
+ public ModelArchive(JSONObject inputData) throws JSONException { |
+ suid = inputData.getString(csip.ModelDataService.KEY_SUUID); |
+ ctime = inputData.getString(csip.ModelDataService.KEY_TSTAMP); |
+ etime = inputData.getString(csip.ModelDataService.KEY_EXPIRATION_DATE); |
+ service = inputData.getString(csip.ModelDataService.KEY_SERVICE_URL); |
+ status = inputData.getString(csip.ModelDataService.KEY_STATUS); |
+ req_ip = inputData.getString(csip.ModelDataService.KEY_REQ_IP); |
+ filename = suid + ".zip"; |
+ } |
+ |
+ protected final void setFileData(byte[] fileData) throws IOException, JSONException { |
+ originalRequest = getServiceRequest(fileData); |
+ originalResponse = getServiceResponse(fileData); |
+ logFile = getFileContents(fileData, ".log.txt"); |
+ } |
+ |
+ public void setFileDataEx(byte[] fileData) throws IOException, JSONException { |
+ setFileData(fileData); |
+ } |
+ |
+ public final String getFileContents(byte[] fileData, String fileName) throws IOException { |
+ String fileString = null; |
+ |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains(fileName)) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ fileString = fileContent.toString(); |
+ break; |
+ } |
+ } |
} |
|
- public ModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename, byte[] fileData) throws IOException, JSONException { |
- this.ctime = ctime; |
- this.etime = etime; |
- this.service = service; |
- this.status = status; |
- this.req_ip = req_ip; |
- this.suid = suid; |
- this.filename = filename; |
+ return fileString; |
+ } |
|
- setFileData(fileData); |
+ protected JSONObject getServiceRequest(byte[] fileData) throws IOException, JSONException { |
+ JSONObject requestData; |
+ requestData = new JSONObject(getFileContents(fileData, ".request")); |
+ return requestData; |
+ } |
+ |
+ public String getOriginalRequest(String lookup) { |
+ String ret_val = ((originalResponseFailed()) ? FAILED_MESSAGE : ""); |
+ |
+ JSONArray results = originalRequest.optJSONArray("parameter"); |
+ |
+ if (null != results) { |
+ for (int i = 0; i < results.length(); i++) { |
+ JSONObject jsonObject = results.optJSONObject(i); |
+ if (null != jsonObject) { |
+ if (jsonObject.optString("name") != null) { |
+ if (jsonObject.optString("name").equalsIgnoreCase(lookup)) { |
+ if (jsonObject.optString("value") != null) { |
+ ret_val = jsonObject.optString("value"); |
+ } |
+ } |
+ } |
+ } |
+ } |
} |
|
- public ModelArchive(JSONObject inputData) throws JSONException { |
- suid = inputData.getString(csip.ModelDataService.KEY_SUUID); |
- ctime = inputData.getString(csip.ModelDataService.KEY_TSTAMP); |
- etime = inputData.getString(csip.ModelDataService.KEY_EXPIRATION_DATE); |
- service = inputData.getString(csip.ModelDataService.KEY_SERVICE_URL); |
- status = inputData.getString(csip.ModelDataService.KEY_STATUS); |
- req_ip = inputData.getString(csip.ModelDataService.KEY_REQ_IP); |
- filename = suid + ".zip"; |
+ return ret_val; |
+ } |
+ |
+ protected JSONObject getServiceResponse(byte[] fileData) throws IOException, JSONException { |
+ JSONObject requestData; |
+ requestData = new JSONObject(getFileContents(fileData, ".response")); |
+ return requestData; |
+ } |
+ |
+ public String getOriginalResponse(String lookup) { |
+ String ret_val = ((originalResponseFailed()) ? FAILED_MESSAGE : ""); |
+ |
+ JSONArray results = originalResponse.optJSONArray("result"); |
+ |
+ if (null != results) { |
+ for (int i = 0; i < results.length(); i++) { |
+ JSONObject jsonObject = results.optJSONObject(i); |
+ if (null != jsonObject) { |
+ if (jsonObject.optString("name") != null) { |
+ if (jsonObject.optString("name").equalsIgnoreCase(lookup)) { |
+ if (jsonObject.optString("value") != null) { |
+ ret_val = jsonObject.optString("value"); |
+ } |
+ } |
+ } |
+ } |
+ } |
} |
|
- protected final void setFileData(byte[] fileData) throws IOException, JSONException { |
- originalRequest = getServiceRequest(fileData); |
- originalResponse = getServiceResponse(fileData); |
- logFile = getFileContents(fileData, ".log.txt"); |
+ return ret_val; |
+ } |
+ |
+ public boolean originalResponseFailed() { |
+ boolean ret_val = true; |
+ JSONObject metaInfo = originalResponse.optJSONObject("metainfo"); |
+ |
+ if (null != metaInfo) { |
+ String status = metaInfo.optString("status"); |
+ |
+ if ((null != status) && (!status.isEmpty())) { |
+ ret_val = !status.equalsIgnoreCase("finished"); |
+ } |
} |
|
- public void setFileDataEx(byte[] fileData) throws IOException, JSONException { |
- setFileData(fileData); |
+ return ret_val; |
+ } |
+ |
+ public JSONObject getOriginalRequest() { |
+ return originalRequest; |
+ } |
+ |
+ public JSONObject getOriginalResponse() { |
+ return originalResponse; |
+ } |
+ |
+ public String getReqIP() { |
+ return req_ip; |
+ } |
+ |
+ public String getStatus() { |
+ return status; |
+ } |
+ |
+ public String getService() { |
+ return service; |
+ } |
+ |
+ public String getSUID() { |
+ return suid; |
+ } |
+ |
+ public String getCtime() { |
+ return ctime; |
+ } |
+ |
+ public String getEtime() { |
+ return etime; |
+ } |
+ |
+ public void saveFileData(byte[] fileData) throws IOException { |
+ if (null != fileData) { |
+ if (fileData.length > 0) { |
+ FileUtils.writeByteArrayToFile(new File(filename), fileData); |
+ } |
} |
- |
- public final String getFileContents(byte[] fileData, String fileName) throws IOException { |
- String fileString = null; |
- |
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains(fileName)) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- fileString = fileContent.toString(); |
- break; |
- } |
- } |
- } |
- |
- return fileString; |
- } |
- |
- protected JSONObject getServiceRequest(byte[] fileData) throws IOException, JSONException { |
- JSONObject requestData; |
- requestData = new JSONObject(getFileContents(fileData, ".request")); |
- return requestData; |
- } |
- |
- public String getOriginalRequest(String lookup) { |
- String ret_val = ((originalResponseFailed()) ? FAILED_MESSAGE : ""); |
- |
- JSONArray results = originalRequest.optJSONArray("parameter"); |
- |
- if (null != results) { |
- for (int i = 0; i < results.length(); i++) { |
- JSONObject jsonObject = results.optJSONObject(i); |
- if (null != jsonObject) { |
- if (jsonObject.optString("name") != null) { |
- if (jsonObject.optString("name").equalsIgnoreCase(lookup)) { |
- if (jsonObject.optString("value") != null) { |
- ret_val = jsonObject.optString("value"); |
- } |
- } |
- } |
- } |
- } |
- } |
- |
- return ret_val; |
- } |
- |
- protected JSONObject getServiceResponse(byte[] fileData) throws IOException, JSONException { |
- JSONObject requestData; |
- requestData = new JSONObject(getFileContents(fileData, ".response")); |
- return requestData; |
- } |
- |
- public String getOriginalResponse(String lookup) { |
- String ret_val = ((originalResponseFailed()) ? FAILED_MESSAGE : ""); |
- |
- JSONArray results = originalResponse.optJSONArray("result"); |
- |
- if (null != results) { |
- for (int i = 0; i < results.length(); i++) { |
- JSONObject jsonObject = results.optJSONObject(i); |
- if (null != jsonObject) { |
- if (jsonObject.optString("name") != null) { |
- if (jsonObject.optString("name").equalsIgnoreCase(lookup)) { |
- if (jsonObject.optString("value") != null) { |
- ret_val = jsonObject.optString("value"); |
- } |
- } |
- } |
- } |
- } |
- } |
- |
- return ret_val; |
- } |
- |
- public boolean originalResponseFailed() { |
- boolean ret_val = true; |
- JSONObject metaInfo = originalResponse.optJSONObject("metainfo"); |
- |
- if (null != metaInfo) { |
- String status = metaInfo.optString("status"); |
- |
- if ((null != status) && (!status.isEmpty())) { |
- ret_val = !status.equalsIgnoreCase("finished"); |
- } |
- } |
- |
- return ret_val; |
- } |
- |
- public JSONObject getOriginalRequest() { |
- return originalRequest; |
- } |
- |
- public JSONObject getOriginalResponse() { |
- return originalResponse; |
- } |
- |
- public String getReqIP() { |
- return req_ip; |
- } |
- |
- public String getStatus() { |
- return status; |
- } |
- |
- public String getService() { |
- return service; |
- } |
- |
- public String getSUID() { |
- return suid; |
- } |
- |
- public String getCtime() { |
- return ctime; |
- } |
- |
- public String getEtime() { |
- return etime; |
- } |
- |
- public void saveFileData(byte[] fileData) throws IOException { |
- if (null != fileData) { |
- if (fileData.length > 0) { |
- FileUtils.writeByteArrayToFile(new File(filename), fileData); |
- } |
- } |
- } |
+ } |
} |
@@ -16,118 +16,118 @@ |
*/ |
public class ModelArchiveFactory { |
|
- public static ModelArchive getModelArchive(String serviceName, Document doc) { |
- ModelArchive model = null; |
+ public static ModelArchive getModelArchive(String serviceName, Document doc) { |
+ ModelArchive model = null; |
|
- model = getModelArchive(serviceName, doc, false); |
+ model = getModelArchive(serviceName, doc, false); |
|
- return model; |
+ return model; |
+ } |
+ |
+ public static ModelArchive getModelArchive(String serviceName, Document doc, boolean returnBasicArchive) { |
+ ModelArchive model = null; |
+ |
+ if (returnBasicArchive) { |
+ model = new ModelArchive(doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ serviceName, |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename") |
+ ); |
+ } else { |
+ if (serviceName.contains("weps/5.2")) { |
+ model = new WEPSModelArchive(doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ serviceName, |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename") |
+ ); |
+ } else { |
+ if (serviceName.contains("wepp/")) { |
+ model = new WEPPModelArchive(doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ serviceName, |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename") |
+ ); |
+ } else { |
+ //Not a service we are interested in... |
+ } |
+ } |
} |
|
- public static ModelArchive getModelArchive(String serviceName, Document doc, boolean returnBasicArchive) { |
- ModelArchive model = null; |
+ return model; |
+ } |
|
- if (returnBasicArchive) { |
- model = new ModelArchive(doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- serviceName, |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename") |
- ); |
- } else { |
- if (serviceName.contains("weps/5.2")) { |
- model = new WEPSModelArchive(doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- serviceName, |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename") |
- ); |
- } else { |
- if (serviceName.contains("wepp/")) { |
- model = new WEPPModelArchive(doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- serviceName, |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename") |
- ); |
- } else { |
- //Not a service we are interested in... |
- } |
- } |
- } |
+ public static ModelArchive getModelArchive(String serviceName, Document doc, byte[] fileData) throws IOException, JSONException { |
+ ModelArchive model = null; |
|
- return model; |
+ if (serviceName.contains("weps/5.2")) { |
+ model = new WEPSModelArchive(doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ serviceName, |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename"), |
+ fileData |
+ ); |
+ } else { |
+ if (serviceName.contains("wepp/")) { |
+ model = new WEPPModelArchive(doc.getString("_id"), |
+ doc.getString("ctime"), |
+ doc.getString("etime"), |
+ serviceName, |
+ doc.getString("status"), |
+ doc.getString("req_ip"), |
+ doc.getString("filename"), |
+ fileData |
+ ); |
+ } else { |
+ //Not a service we are interested in... |
+ } |
} |
|
- public static ModelArchive getModelArchive(String serviceName, Document doc, byte[] fileData) throws IOException, JSONException { |
- ModelArchive model = null; |
+ return model; |
+ } |
|
- if (serviceName.contains("weps/5.2")) { |
- model = new WEPSModelArchive(doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- serviceName, |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename"), |
- fileData |
- ); |
- } else { |
- if (serviceName.contains("wepp/")) { |
- model = new WEPPModelArchive(doc.getString("_id"), |
- doc.getString("ctime"), |
- doc.getString("etime"), |
- serviceName, |
- doc.getString("status"), |
- doc.getString("req_ip"), |
- doc.getString("filename"), |
- fileData |
- ); |
- } else { |
- //Not a service we are interested in... |
- } |
- } |
+ public static ModelArchive getModelArchive(JSONObject metaData) throws JSONException { |
+ ModelArchive model = null; |
+ String serviceName = metaData.getString(csip.ModelDataService.KEY_SERVICE_URL); |
|
- return model; |
+ if (serviceName.contains("weps/5.2")) { |
+ model = new WEPSModelArchive(metaData); |
+ } else { |
+ if (serviceName.contains("wepp/")) { |
+ model = new WEPPModelArchive(metaData); |
+ } else { |
+ //Not a service we are interested in... |
+ } |
} |
|
- public static ModelArchive getModelArchive(JSONObject metaData) throws JSONException { |
- ModelArchive model = null; |
- String serviceName = metaData.getString(csip.ModelDataService.KEY_SERVICE_URL); |
+ return model; |
+ } |
|
- if (serviceName.contains("weps/5.2")) { |
- model = new WEPSModelArchive(metaData); |
- } else { |
- if (serviceName.contains("wepp/")) { |
- model = new WEPPModelArchive(metaData); |
- } else { |
- //Not a service we are interested in... |
- } |
- } |
+ public static ModelArchive getModelArchive(JSONObject metaData, byte[] fileData) throws JSONException, IOException { |
+ ModelArchive model = null; |
+ String serviceName = metaData.getString(csip.ModelDataService.KEY_SERVICE_URL); |
|
- return model; |
+ if (serviceName.contains("weps/5.2")) { |
+ model = new WEPSModelArchive(metaData, fileData); |
+ } else { |
+ if (serviceName.contains("wepp/")) { |
+ model = new WEPPModelArchive(metaData, fileData); |
+ } else { |
+ //Not a service we are interested in... |
+ } |
} |
|
- public static ModelArchive getModelArchive(JSONObject metaData, byte[] fileData) throws JSONException, IOException { |
- ModelArchive model = null; |
- String serviceName = metaData.getString(csip.ModelDataService.KEY_SERVICE_URL); |
- |
- if (serviceName.contains("weps/5.2")) { |
- model = new WEPSModelArchive(metaData, fileData); |
- } else { |
- if (serviceName.contains("wepp/")) { |
- model = new WEPPModelArchive(metaData, fileData); |
- } else { |
- //Not a service we are interested in... |
- } |
- } |
- |
- return model; |
- } |
+ return model; |
+ } |
} |
@@ -33,62 +33,61 @@ |
*/ |
public class WEPPModelArchive extends ModelArchive { |
|
+ private SOLFile solFile; |
+ private WindGenData windData; |
+ private CligenData cligenData; |
+ private String stdErrorFile; |
+ private String stdOutFile; |
+ private WEPSManagement management; |
+ private boolean possibleBadModelRun = false; |
+ private boolean badModelRun = false; |
+ private String badModelMessage = ""; |
+ private WEPPMetaData weppMetaData; |
|
- private SOLFile solFile; |
- private WindGenData windData; |
- private CligenData cligenData; |
- private String stdErrorFile; |
- private String stdOutFile; |
- private WEPSManagement management; |
- private boolean possibleBadModelRun = false; |
- private boolean badModelRun = false; |
- private String badModelMessage = ""; |
- private WEPPMetaData weppMetaData; |
+ public WEPPModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename) { |
+ super(suid, ctime, etime, service, status, req_ip, filename); |
+ } |
|
- public WEPPModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename) { |
- super(suid, ctime, etime, service, status, req_ip, filename); |
+ public WEPPModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename, byte[] fileData) throws IOException, JSONException { |
+ super(suid, ctime, etime, service, status, req_ip, filename, fileData); |
+ setFileDataEx(fileData); |
+ } |
+ |
+ public WEPPModelArchive(JSONObject metaData) throws JSONException { |
+ super(metaData); |
+ } |
+ |
+ public WEPPModelArchive(JSONObject metaData, byte[] fileData) throws JSONException, IOException { |
+ super(metaData); |
+ setFileDataEx(fileData); |
+ } |
+ |
+ @Override |
+ public final void setFileDataEx(byte[] fileData) throws IOException, JSONException { |
+ super.setFileDataEx(fileData); |
+ |
+ try { |
+ stdErrorFile = getStdErrorFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
+ } |
+ if (null != stdErrorFile) { |
+ possibleBadModelRun = stdErrorFile.contains("IEEE_UNDERFLOW_FLAG") || stdErrorFile.contains("IEEE_DENORMAL"); |
+ |
+ if (possibleBadModelRun) { |
+ badModelMessage += "##\nThe WEPP model executable stderr file contained: "; |
+ badModelMessage += ((stdErrorFile.contains("IEEE_UNDERFLOW_FLAG")) ? "\nIEEE_UNDERFLOW_FLAG, Meaning that some values were rounded to zero because they were too small for the FORTRAN code to interpret." : ""); |
+ if (badModelMessage.contains("IEEE")) { |
+ badModelMessage += "\n AND "; |
+ } |
+ badModelMessage += ((stdErrorFile.contains("IEEE_DENORMAL")) ? "\nIEEE_DENORMAL, Meaning that there are denormal numbers generated when running the code." : ""); |
+ badModelMessage += "\n This may be a hint about numerical problems in the model FORTRAN code, but it is not an error per se. Probably, the program finished successfully, but some result values may be suspect."; |
+ } |
} |
|
- public WEPPModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename, byte[] fileData) throws IOException, JSONException { |
- super(suid, ctime, etime, service, status, req_ip, filename, fileData); |
- setFileDataEx(fileData); |
- } |
- |
- public WEPPModelArchive(JSONObject metaData) throws JSONException { |
- super(metaData); |
- } |
- |
- public WEPPModelArchive(JSONObject metaData, byte[] fileData) throws JSONException, IOException { |
- super(metaData); |
- setFileDataEx(fileData); |
- } |
- |
- @Override |
- public final void setFileDataEx(byte[] fileData) throws IOException, JSONException { |
- super.setFileDataEx(fileData); |
- |
- try { |
- stdErrorFile = getStdErrorFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- if (null != stdErrorFile) { |
- possibleBadModelRun = stdErrorFile.contains("IEEE_UNDERFLOW_FLAG") || stdErrorFile.contains("IEEE_DENORMAL"); |
- |
- if (possibleBadModelRun) { |
- badModelMessage += "##\nThe WEPP model executable stderr file contained: "; |
- badModelMessage += ((stdErrorFile.contains("IEEE_UNDERFLOW_FLAG")) ? "\nIEEE_UNDERFLOW_FLAG, Meaning that some values were rounded to zero because they were too small for the FORTRAN code to interpret." : ""); |
- if (badModelMessage.contains("IEEE")) { |
- badModelMessage += "\n AND "; |
- } |
- badModelMessage += ((stdErrorFile.contains("IEEE_DENORMAL")) ? "\nIEEE_DENORMAL, Meaning that there are denormal numbers generated when running the code." : ""); |
- badModelMessage += "\n This may be a hint about numerical problems in the model FORTRAN code, but it is not an error per se. Probably, the program finished successfully, but some result values may be suspect."; |
- } |
- } |
- |
- // TODO: Change to getting the ROT file. |
+ // TODO: Change to getting the ROT file. |
// try { |
// management = getManagementFile(fileData); |
// } catch (IOException ex) { |
@@ -96,409 +95,406 @@ |
// badModelMessage += "##\n"; |
// badModelMessage += ex.getMessage(); |
// } |
- try { |
- windData = getWindDataFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- if (windData.badWindData()) { |
- badModelMessage += "##\nThe Wind data associated with this model run has some qualifying messages: \n" + windData.windDataMessages(); |
- } |
- |
- try { |
- cligenData = getClimateDataFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- if (cligenData.badClimateData()) { |
- badModelMessage += "##\nThe Cligen data associated with this model run has some qualifying messages: \n" + cligenData.cligenDataMessages(); |
- } |
- |
- try { |
- stdOutFile = getStdOutFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- try { |
- solFile = getSolFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- if (!badModelRun && badModelMessage.isEmpty()) { |
- calcWEPPMetaData(); |
- } |
+ try { |
+ windData = getWindDataFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public boolean questionableResults() { |
- return possibleBadModelRun; |
+ if (windData.badWindData()) { |
+ badModelMessage += "##\nThe Wind data associated with this model run has some qualifying messages: \n" + windData.windDataMessages(); |
} |
|
- public boolean badModelRun() { |
- return badModelRun; |
+ try { |
+ cligenData = getClimateDataFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public String badModelMessage() { |
- return badModelMessage; |
+ if (cligenData.badClimateData()) { |
+ badModelMessage += "##\nThe Cligen data associated with this model run has some qualifying messages: \n" + cligenData.cligenDataMessages(); |
} |
|
- public SOLFile solFile() { |
- return solFile; |
+ try { |
+ stdOutFile = getStdOutFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public WindGenData windData() { |
- return windData; |
+ try { |
+ solFile = getSolFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public CligenData cligenData() { |
- return cligenData; |
+ if (!badModelRun && badModelMessage.isEmpty()) { |
+ calcWEPPMetaData(); |
+ } |
+ } |
+ |
+ public boolean questionableResults() { |
+ return possibleBadModelRun; |
+ } |
+ |
+ public boolean badModelRun() { |
+ return badModelRun; |
+ } |
+ |
+ public String badModelMessage() { |
+ return badModelMessage; |
+ } |
+ |
+ public SOLFile solFile() { |
+ return solFile; |
+ } |
+ |
+ public WindGenData windData() { |
+ return windData; |
+ } |
+ |
+ public CligenData cligenData() { |
+ return cligenData; |
+ } |
+ |
+ public String stdErrorFile() { |
+ return stdErrorFile; |
+ } |
+ |
+ public String stdOutFile() { |
+ return stdOutFile; |
+ } |
+ |
+ public WEPPMetaData getWEPPMetaData() { |
+ return weppMetaData; |
+ } |
+ |
+ public void calcWEPPMetaData() { |
+ if (null == weppMetaData) { |
+ weppMetaData = new WEPPMetaData(); |
+ } |
+// |
+ weppMetaData.suid(this.suid); |
+ weppMetaData.cokey(getOriginalRequest("soil")); |
+ weppMetaData.longitude(Double.parseDouble(getOriginalRequest("longitude"))); |
+ weppMetaData.latitude(Double.parseDouble(getOriginalRequest("latitude"))); |
+ weppMetaData.annualPrecip(cligenData.annualAvgPrecip()); |
+ weppMetaData.windEnergy(windData.simulationAverage()); |
+ weppMetaData.componentName(solFile.soilName); |
+ |
+ weppMetaData.num_layers(solFile.numLayers); |
+ weppMetaData.surface_thickness(((int) solFile.solthk[0])); |
+ |
+ String biomass_avg = getOriginalResponse("average_biomass"); |
+ if (!biomass_avg.isEmpty()) { |
+ weppMetaData.rotWeightResBiomass(Double.parseDouble(biomass_avg) * 2000); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing biomass_avg result in WEPP output result file."; |
} |
|
- public String stdErrorFile() { |
- return stdErrorFile; |
+ String stir = getOriginalResponse("STIR"); |
+ if (!stir.isEmpty()) { |
+ weppMetaData.rotWeightSoilTillIntensity(Double.parseDouble(stir)); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing avg_all_stir result in WEPP output result file."; |
} |
|
- public String stdOutFile() { |
- return stdOutFile; |
+ String soilLoss = getOriginalResponse("SoilLoss"); |
+ if (!soilLoss.isEmpty()) { |
+ weppMetaData.erosionRate(Double.parseDouble(soilLoss)); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing wind_eros result in WEPP output result file."; |
} |
|
- public WEPPMetaData getWEPPMetaData() { |
- return weppMetaData; |
+ String respAvgAnnualPrecip = getOriginalResponse("Precipitation"); |
+ if (!respAvgAnnualPrecip.isEmpty()) { |
+ weppMetaData.respAvgAnnualPrecip(Double.parseDouble(respAvgAnnualPrecip)); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing wind_eros result in WEPP output result file."; |
} |
|
- public void calcWEPPMetaData() { |
- if (null == weppMetaData) { |
- weppMetaData = new WEPPMetaData(); |
- } |
-// |
- weppMetaData.suid(this.suid); |
- weppMetaData.cokey(getOriginalRequest("soil")); |
- weppMetaData.longitude(Double.parseDouble(getOriginalRequest("longitude"))); |
- weppMetaData.latitude(Double.parseDouble(getOriginalRequest("latitude"))); |
- weppMetaData.annualPrecip(cligenData.annualAvgPrecip()); |
- weppMetaData.windEnergy(windData.simulationAverage()); |
- weppMetaData.componentName(solFile.soilName); |
+ weppMetaData.errorMessages(badModelMessage); |
|
- weppMetaData.num_layers(solFile.numLayers); |
- weppMetaData.surface_thickness(((int) solFile.solthk[0])); |
+ } |
|
+ private WEPSManagement getManagementFile(byte[] fileData) throws IOException { |
+ WEPSManagement weppManagement = new WEPSManagement(new Urls()); |
|
- String biomass_avg = getOriginalResponse("average_biomass"); |
- if (!biomass_avg.isEmpty()) { |
- weppMetaData.rotWeightResBiomass(Double.parseDouble(biomass_avg) * 2000); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing biomass_avg result in WEPP output result file."; |
- } |
+ File managementFile = new File("management.rot"); |
+ BufferedWriter bufferWriter = Files.newBufferedWriter(managementFile.toPath()); |
+ String data = getFileContents(fileData, ".rot"); |
+ bufferWriter.write(data); |
+ bufferWriter.flush(); |
+ bufferWriter.close(); |
|
- String stir = getOriginalResponse("STIR"); |
- if (!stir.isEmpty()) { |
- weppMetaData.rotWeightSoilTillIntensity(Double.parseDouble(stir)); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing avg_all_stir result in WEPP output result file."; |
- } |
- |
- String soilLoss = getOriginalResponse("SoilLoss"); |
- if (!soilLoss.isEmpty()) { |
- weppMetaData.erosionRate(Double.parseDouble(soilLoss)); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing wind_eros result in WEPP output result file."; |
- } |
- |
- String respAvgAnnualPrecip = getOriginalResponse("Precipitation"); |
- if (!respAvgAnnualPrecip.isEmpty()) { |
- weppMetaData.respAvgAnnualPrecip(Double.parseDouble(respAvgAnnualPrecip)); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing wind_eros result in WEPP output result file."; |
- } |
- |
- |
- weppMetaData.errorMessages(badModelMessage); |
- |
+ try { |
+ weppManagement.readManData(managementFile); |
+ } catch (ParserException | ServiceException ex) { |
+ Logger.getLogger(WEPPModelArchive.class.getName()).log(Level.SEVERE, null, ex); |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Error parsing the WEPP management file: " + ex.getMessage(); |
} |
|
- private WEPSManagement getManagementFile(byte[] fileData) throws IOException { |
- WEPSManagement weppManagement = new WEPSManagement(new Urls()); |
+ return weppManagement; |
+ } |
|
- File managementFile = new File("management.rot"); |
- BufferedWriter bufferWriter = Files.newBufferedWriter(managementFile.toPath()); |
- String data = getFileContents(fileData, ".rot"); |
- bufferWriter.write(data); |
- bufferWriter.flush(); |
- bufferWriter.close(); |
+ private WindGenData getWindDataFile(byte[] fileData) throws IOException { |
+ WindGenData wind; |
|
- try { |
- weppManagement.readManData(managementFile); |
- } catch (ParserException | ServiceException ex) { |
- Logger.getLogger(WEPPModelArchive.class.getName()).log(Level.SEVERE, null, ex); |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Error parsing the WEPP management file: " + ex.getMessage(); |
- } |
+ wind = new WindGenData(getFileContents(fileData, ".win")); |
|
- return weppManagement; |
+ return wind; |
+ } |
+ |
+ private CligenData getClimateDataFile(byte[] fileData) throws IOException { |
+ CligenData climate; |
+ |
+ climate = new CligenData(getFileContents(fileData, ".cli")); |
+ |
+ return climate; |
+ } |
+ |
+ private SOLFile getSolFile(byte[] fileData) throws IOException { |
+ SOLFile requestData = null; |
+ |
+ requestData = new SOLFile(getFileContents(fileData, ".sol")); |
+ |
+ return requestData; |
+ } |
+ |
+ private String getStdErrorFile(byte[] fileData) throws IOException { |
+ String fileString = null; |
+ |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains("wepp.") && entry.getName().contains("stderr.txt")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ fileString = fileContent.toString(); |
+ break; |
+ } |
+ } |
} |
|
- private WindGenData getWindDataFile(byte[] fileData) throws IOException { |
- WindGenData wind; |
+ return fileString; |
+ } |
|
- wind = new WindGenData(getFileContents(fileData, ".win")); |
+ private String getStdOutFile(byte[] fileData) throws IOException { |
+ String fileString = null; |
|
- return wind; |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains("wepp.exe") && entry.getName().contains("stdout.txt")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ fileString = fileContent.toString(); |
+ break; |
+ } |
+ } |
} |
|
- private CligenData getClimateDataFile(byte[] fileData) throws IOException { |
- CligenData climate; |
+ return fileString; |
+ } |
|
- climate = new CligenData(getFileContents(fileData, ".cli")); |
+ public class WEPPMetaData { |
|
- return climate; |
+ private double latitude; |
+ private double longitude; |
+ private String cokey; |
+ private double windEnergy; |
+ private double annualPrecip; |
+ private double respAvgAnnualPrecip; |
+ private double avgAnnualPrecipEventDuration; |
+ private double rillErodability; |
+ private double wAvgSoilSandFraction; |
+ private double wAvgSoilClayFraction; |
+ private double effectiveSurfaceConductivity; |
+ private int contouring; |
+ private double lsFactor; |
+ private double erosionRate; |
+ private double rotIrrEffect; |
+ private double rotWeightCoverCropEffect; |
+ private double rotWeightResBiomass; |
+ private double rotWeightSoilTillIntensity; |
+ private double rotWeightResAddition; |
+ private String componentName; |
+ private int surface_thickness; |
+ private int num_layers; |
+ private String suid; |
+ private String errorMessages = ""; |
+ |
+ public String errorMessages() { |
+ return errorMessages; |
} |
|
- private SOLFile getSolFile(byte[] fileData) throws IOException { |
- SOLFile requestData = null; |
- |
- requestData = new SOLFile(getFileContents(fileData, ".sol")); |
- |
- return requestData; |
+ public void errorMessages(String value) { |
+ errorMessages = value; |
} |
|
- private String getStdErrorFile(byte[] fileData) throws IOException { |
- String fileString = null; |
- |
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains("wepp.") && entry.getName().contains("stderr.txt")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- fileString = fileContent.toString(); |
- break; |
- } |
- } |
- } |
- |
- return fileString; |
+ public String suid() { |
+ return suid; |
} |
|
- private String getStdOutFile(byte[] fileData) throws IOException { |
- String fileString = null; |
- |
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains("wepp.exe") && entry.getName().contains("stdout.txt")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- fileString = fileContent.toString(); |
- break; |
- } |
- } |
- } |
- |
- return fileString; |
+ public void suid(String value) { |
+ suid = value; |
} |
|
- public class WEPPMetaData { |
+ public double windEnergy() { |
+ return windEnergy; |
+ } |
|
- private double latitude; |
- private double longitude; |
- private String cokey; |
- private double windEnergy; |
- private double annualPrecip; |
- private double respAvgAnnualPrecip; |
- private double avgAnnualPrecipEventDuration; |
- private double rillErodability; |
- private double wAvgSoilSandFraction; |
- private double wAvgSoilClayFraction; |
- private double effectiveSurfaceConductivity; |
- private int contouring; |
- private double lsFactor; |
- private double erosionRate; |
- private double rotIrrEffect; |
- private double rotWeightCoverCropEffect; |
- private double rotWeightResBiomass; |
- private double rotWeightSoilTillIntensity; |
- private double rotWeightResAddition; |
- private String componentName; |
- private int surface_thickness; |
- private int num_layers; |
- private String suid; |
- private String errorMessages = ""; |
+ public double annualPrecip() { |
+ return annualPrecip; |
+ } |
|
- public String errorMessages() { |
- return errorMessages; |
- } |
+ public double respAvgAnnualPrecip() { |
+ return respAvgAnnualPrecip; |
+ } |
|
- public void errorMessages(String value) { |
- errorMessages = value; |
- } |
+ public void respAvgAnnualPrecip(double value) { |
+ respAvgAnnualPrecip = value; |
+ } |
|
- public String suid() { |
- return suid; |
- } |
+ public double rotIrrEffect() { |
+ return rotIrrEffect; |
+ } |
|
- public void suid(String value) { |
- suid = value; |
- } |
+ public double rotWeightResBiomass() { |
+ return rotWeightResBiomass; |
+ } |
|
- public double windEnergy() { |
- return windEnergy; |
- } |
+ public double rotWeightSoilTillIntensity() { |
+ return rotWeightSoilTillIntensity; |
+ } |
|
- public double annualPrecip() { |
- return annualPrecip; |
- } |
- |
- public double respAvgAnnualPrecip(){ |
- return respAvgAnnualPrecip; |
- } |
+ public double rotWeightResAddition() { |
+ return rotWeightResAddition; |
+ } |
|
- public void respAvgAnnualPrecip( double value){ |
- respAvgAnnualPrecip = value; |
- } |
- |
- public double rotIrrEffect() { |
- return rotIrrEffect; |
- } |
+ public double erosionRate() { |
+ return erosionRate; |
+ } |
|
- public double rotWeightResBiomass() { |
- return rotWeightResBiomass; |
- } |
+ public void erosionRate(double value) { |
+ erosionRate = value; |
+ } |
|
- public double rotWeightSoilTillIntensity() { |
- return rotWeightSoilTillIntensity; |
- } |
+ public void windEnergy(double value) { |
+ windEnergy = value; |
+ } |
|
- public double rotWeightResAddition() { |
- return rotWeightResAddition; |
- } |
+ public void annualPrecip(double value) { |
+ annualPrecip = value; |
+ } |
|
- public double erosionRate() { |
- return erosionRate; |
- } |
+ public void rotIrrEffect(double value) { |
+ rotIrrEffect = value; |
+ } |
|
- public void erosionRate(double value) { |
- erosionRate = value; |
- } |
- |
- public void windEnergy(double value) { |
- windEnergy = value; |
- } |
+ public void rotWeightResBiomass(double value) { |
+ rotWeightResBiomass = value; |
+ } |
|
- public void annualPrecip(double value) { |
- annualPrecip = value; |
- } |
+ public void rotWeightSoilTillIntensity(double value) { |
+ rotWeightSoilTillIntensity = value; |
+ } |
|
- public void rotIrrEffect(double value) { |
- rotIrrEffect = value; |
- } |
+ public void rotWeightResAddition(double value) { |
+ rotWeightResAddition = value; |
+ } |
|
- public void rotWeightResBiomass(double value) { |
- rotWeightResBiomass = value; |
- } |
+ public String componentName() { |
+ return componentName; |
+ } |
|
- public void rotWeightSoilTillIntensity(double value) { |
- rotWeightSoilTillIntensity = value; |
- } |
+ public void componentName(String value) { |
+ componentName = value; |
+ } |
|
- public void rotWeightResAddition(double value) { |
- rotWeightResAddition = value; |
- } |
+ public void surface_thickness(int value) { |
+ surface_thickness = value; |
+ } |
|
- public String componentName() { |
- return componentName; |
- } |
+ public void num_layers(int value) { |
+ num_layers = value; |
+ } |
|
- public void componentName(String value) { |
- componentName = value; |
- } |
+ public int surface_thickness() { |
+ return surface_thickness; |
+ } |
|
- public void surface_thickness(int value) { |
- surface_thickness = value; |
- } |
+ public int num_layers() { |
+ return num_layers; |
+ } |
|
- public void num_layers(int value) { |
- num_layers = value; |
- } |
+ public double latitude() { |
+ return latitude; |
+ } |
|
- public int surface_thickness() { |
- return surface_thickness; |
- } |
+ public double longitude() { |
+ return longitude; |
+ } |
|
- public int num_layers() { |
- return num_layers; |
- } |
+ public String cokey() { |
+ return cokey; |
+ } |
|
- public double latitude() { |
- return latitude; |
- } |
+ public void latitude(double value) { |
+ latitude = value; |
+ } |
|
- public double longitude() { |
- return longitude; |
- } |
+ public void longitude(double value) { |
+ longitude = value; |
+ } |
|
- public String cokey() { |
- return cokey; |
- } |
+ public void cokey(String value) { |
+ cokey = value; |
+ } |
|
- public void latitude(double value) { |
- latitude = value; |
- } |
+ public JSONArray toJSON() throws JSONException { |
+ JSONArray ret_val = new JSONArray(); |
+ JSONObject results = new JSONObject(); |
|
- public void longitude(double value) { |
- longitude = value; |
- } |
+ results.put("wepp_archive_suid", weppMetaData.suid()); |
+ results.put("annual_precipitation", weppMetaData.annualPrecip()); |
+ results.put("wind_energy", weppMetaData.windEnergy()); |
|
- public void cokey(String value) { |
- cokey = value; |
- } |
+ results.put("rotation_weighted_biomass", weppMetaData.rotWeightResBiomass()); |
+ results.put("annual_precipitation", weppMetaData.annualPrecip()); |
|
- public JSONArray toJSON() throws JSONException { |
- JSONArray ret_val = new JSONArray(); |
- JSONObject results = new JSONObject(); |
+ results.put("rotation_weighted_soil_tillage_intensity", weppMetaData.rotWeightSoilTillIntensity()); |
+ results.put("erosion_rate", weppMetaData.erosionRate()); |
|
- results.put("wepp_archive_suid", weppMetaData.suid()); |
- results.put("annual_precipitation", weppMetaData.annualPrecip()); |
- results.put("wind_energy", weppMetaData.windEnergy()); |
- |
- results.put("rotation_weighted_biomass", weppMetaData.rotWeightResBiomass()); |
- results.put("annual_precipitation", weppMetaData.annualPrecip()); |
- |
- results.put("rotation_weighted_soil_tillage_intensity", weppMetaData.rotWeightSoilTillIntensity()); |
- results.put("erosion_rate", weppMetaData.erosionRate()); |
- |
- ret_val.put(results); |
- return ret_val; |
- } |
+ ret_val.put(results); |
+ return ret_val; |
} |
+ } |
} |
- |
@@ -33,60 +33,60 @@ |
*/ |
public class WEPSModelArchive extends ModelArchive { |
|
- private IFCFile ifcFile; |
- private WindGenData windData; |
- private CligenData cligenData; |
- private String stdErrorFile; |
- private String stdOutFile; |
- private WEPSManagement management; |
- private boolean possibleBadModelRun = false; |
- private boolean badModelRun = false; |
- private String badModelMessage = ""; |
- private WEPSMetaData wepsMetaData; |
+ private IFCFile ifcFile; |
+ private WindGenData windData; |
+ private CligenData cligenData; |
+ private String stdErrorFile; |
+ private String stdOutFile; |
+ private WEPSManagement management; |
+ private boolean possibleBadModelRun = false; |
+ private boolean badModelRun = false; |
+ private String badModelMessage = ""; |
+ private WEPSMetaData wepsMetaData; |
|
- public WEPSModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename) { |
- super(suid, ctime, etime, service, status, req_ip, filename); |
+ public WEPSModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename) { |
+ super(suid, ctime, etime, service, status, req_ip, filename); |
+ } |
+ |
+ public WEPSModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename, byte[] fileData) throws IOException, JSONException { |
+ super(suid, ctime, etime, service, status, req_ip, filename, fileData); |
+ setFileDataEx(fileData); |
+ } |
+ |
+ public WEPSModelArchive(JSONObject metaData) throws JSONException { |
+ super(metaData); |
+ } |
+ |
+ public WEPSModelArchive(JSONObject metaData, byte[] fileData) throws JSONException, IOException { |
+ super(metaData); |
+ setFileDataEx(fileData); |
+ } |
+ |
+ @Override |
+ public final void setFileDataEx(byte[] fileData) throws IOException, JSONException { |
+ super.setFileDataEx(fileData); |
+ |
+ try { |
+ stdErrorFile = getStdErrorFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
+ if (null != stdErrorFile) { |
+ possibleBadModelRun = stdErrorFile.contains("IEEE_UNDERFLOW_FLAG") || stdErrorFile.contains("IEEE_DENORMAL"); |
|
- public WEPSModelArchive(String suid, String ctime, String etime, String service, String status, String req_ip, String filename, byte[] fileData) throws IOException, JSONException { |
- super(suid, ctime, etime, service, status, req_ip, filename, fileData); |
- setFileDataEx(fileData); |
+ if (possibleBadModelRun) { |
+ badModelMessage += "##\nThe WEPS model executable stderr file contained: "; |
+ badModelMessage += ((stdErrorFile.contains("IEEE_UNDERFLOW_FLAG")) ? "\nIEEE_UNDERFLOW_FLAG, Meaning that some values were rounded to zero because they were too small for the FORTRAN code to interpret." : ""); |
+ if (badModelMessage.contains("IEEE")) { |
+ badModelMessage += "\n AND "; |
+ } |
+ badModelMessage += ((stdErrorFile.contains("IEEE_DENORMAL")) ? "\nIEEE_DENORMAL, Meaning that there are denormal numbers generated when running the code." : ""); |
+ badModelMessage += "\n This may be a hint about numerical problems in the model FORTRAN code, but it is not an error per se. Probably, the program finished successfully, but some result values may be suspect."; |
+ } |
} |
|
- public WEPSModelArchive(JSONObject metaData) throws JSONException { |
- super(metaData); |
- } |
- |
- public WEPSModelArchive(JSONObject metaData, byte[] fileData) throws JSONException, IOException { |
- super(metaData); |
- setFileDataEx(fileData); |
- } |
- |
- @Override |
- public final void setFileDataEx(byte[] fileData) throws IOException, JSONException { |
- super.setFileDataEx(fileData); |
- |
- try { |
- stdErrorFile = getStdErrorFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- if (null != stdErrorFile) { |
- possibleBadModelRun = stdErrorFile.contains("IEEE_UNDERFLOW_FLAG") || stdErrorFile.contains("IEEE_DENORMAL"); |
- |
- if (possibleBadModelRun) { |
- badModelMessage += "##\nThe WEPS model executable stderr file contained: "; |
- badModelMessage += ((stdErrorFile.contains("IEEE_UNDERFLOW_FLAG")) ? "\nIEEE_UNDERFLOW_FLAG, Meaning that some values were rounded to zero because they were too small for the FORTRAN code to interpret." : ""); |
- if (badModelMessage.contains("IEEE")) { |
- badModelMessage += "\n AND "; |
- } |
- badModelMessage += ((stdErrorFile.contains("IEEE_DENORMAL")) ? "\nIEEE_DENORMAL, Meaning that there are denormal numbers generated when running the code." : ""); |
- badModelMessage += "\n This may be a hint about numerical problems in the model FORTRAN code, but it is not an error per se. Probably, the program finished successfully, but some result values may be suspect."; |
- } |
- } |
- |
// try { |
// management = getManagementFile(fileData); |
// } catch (IOException ex) { |
@@ -94,493 +94,493 @@ |
// badModelMessage += "##\n"; |
// badModelMessage += ex.getMessage(); |
// } |
- try { |
- windData = getWindDataFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- if (windData.badWindData()) { |
- badModelMessage += "##\nThe Wind data associated with this model run has some qualifying messages: \n" + windData.windDataMessages(); |
- } |
- |
- try { |
- cligenData = getClimateDataFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- if (cligenData.badClimateData()) { |
- badModelMessage += "##\nThe Cligen data associated with this model run has some qualifying messages: \n" + cligenData.cligenDataMessages(); |
- } |
- |
- try { |
- stdOutFile = getStdOutFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- try { |
- ifcFile = getIFCFile(fileData); |
- } catch (IOException ex) { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += ex.getMessage(); |
- } |
- |
- if (!badModelRun && badModelMessage.isEmpty()) { |
- calcWEPSMetaData(); |
- } |
+ try { |
+ windData = getWindDataFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public boolean questionableResults() { |
- return possibleBadModelRun; |
+ if (windData.badWindData()) { |
+ badModelMessage += "##\nThe Wind data associated with this model run has some qualifying messages: \n" + windData.windDataMessages(); |
} |
|
- public boolean badModelRun() { |
- return badModelRun; |
+ try { |
+ cligenData = getClimateDataFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public String badModelMessage() { |
- return badModelMessage; |
+ if (cligenData.badClimateData()) { |
+ badModelMessage += "##\nThe Cligen data associated with this model run has some qualifying messages: \n" + cligenData.cligenDataMessages(); |
} |
|
- public IFCFile iFCFile() { |
- return ifcFile; |
+ try { |
+ stdOutFile = getStdOutFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public WindGenData windData() { |
- return windData; |
+ try { |
+ ifcFile = getIFCFile(fileData); |
+ } catch (IOException ex) { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += ex.getMessage(); |
} |
|
- public CligenData cligenData() { |
- return cligenData; |
+ if (!badModelRun && badModelMessage.isEmpty()) { |
+ calcWEPSMetaData(); |
+ } |
+ } |
+ |
+ public boolean questionableResults() { |
+ return possibleBadModelRun; |
+ } |
+ |
+ public boolean badModelRun() { |
+ return badModelRun; |
+ } |
+ |
+ public String badModelMessage() { |
+ return badModelMessage; |
+ } |
+ |
+ public IFCFile iFCFile() { |
+ return ifcFile; |
+ } |
+ |
+ public WindGenData windData() { |
+ return windData; |
+ } |
+ |
+ public CligenData cligenData() { |
+ return cligenData; |
+ } |
+ |
+ public String stdErrorFile() { |
+ return stdErrorFile; |
+ } |
+ |
+ public String stdOutFile() { |
+ return stdOutFile; |
+ } |
+ |
+ public WEPSMetaData getWEPSMetaData() { |
+ return wepsMetaData; |
+ } |
+ |
+ public void calcWEPSMetaData() { |
+ if (null == wepsMetaData) { |
+ wepsMetaData = new WEPSMetaData(); |
} |
|
- public String stdErrorFile() { |
- return stdErrorFile; |
+ wepsMetaData.suid(this.suid); |
+ wepsMetaData.cokey(getOriginalRequest("soil")); |
+ wepsMetaData.longitude(Double.parseDouble(getOriginalRequest("longitude"))); |
+ wepsMetaData.latitude(Double.parseDouble(getOriginalRequest("latitude"))); |
+ wepsMetaData.annualPrecip(cligenData.annualAvgPrecip()); |
+ wepsMetaData.windEnergy(windData.simulationAverage()); |
+ wepsMetaData.componentName(ifcFile.componentName); |
+ wepsMetaData.fractionSand(ifcFile.fractionSand); |
+ wepsMetaData.fractionSilt(ifcFile.fractionSilt); |
+ wepsMetaData.fractionClay(ifcFile.fractionClay); |
+ wepsMetaData.crustStability(ifcFile.crustStability); |
+ wepsMetaData.surfRockFrag(ifcFile.surfaceFragmentCover); |
+ wepsMetaData.albedo(ifcFile.surfaceAlbedo); |
+ wepsMetaData.num_layers(ifcFile.layerThickness.length); |
+ wepsMetaData.surface_thickness(((int) ifcFile.layerThickness[0])); |
+ wepsMetaData.slope_gradient(ifcFile.surfaceSlope); |
+ |
+ //TODO: Do we need to measure the layers before setting these?? WEPSSoilInput already filters/sorts layers, etc... |
+ wepsMetaData.aggStability(ifcFile.aggregateStability[0]); |
+ wepsMetaData.soilWiltPoint(ifcFile.wiltingPointSWC[0]); |
+ wepsMetaData.aggGeomDiam(ifcFile.aggregateMeanDiameter[0]); |
+ |
+ String biomass_avg = getOriginalResponse("average_biomass"); |
+ if (!biomass_avg.isEmpty()) { |
+ wepsMetaData.rotWeightResBiomass(Double.parseDouble(biomass_avg) * 2000); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing biomass_avg result in WEPS output result file."; |
} |
|
- public String stdOutFile() { |
- return stdOutFile; |
+ String stir_avg = getOriginalResponse("avg_all_stir"); |
+ if (!stir_avg.isEmpty()) { |
+ wepsMetaData.rotWeightSoilTillIntensity(Double.parseDouble(stir_avg)); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing avg_all_stir result in WEPS output result file."; |
} |
|
- public WEPSMetaData getWEPSMetaData() { |
- return wepsMetaData; |
+ String wind_eros = getOriginalResponse("wind_eros"); |
+ if (!wind_eros.isEmpty()) { |
+ wepsMetaData.erosionRate(Double.parseDouble(wind_eros)); |
+ } else { |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Missing wind_eros result in WEPS output result file."; |
} |
|
- public void calcWEPSMetaData() { |
- if (null == wepsMetaData) { |
- wepsMetaData = new WEPSMetaData(); |
- } |
+ wepsMetaData.errorMessages(badModelMessage); |
|
- wepsMetaData.suid(this.suid); |
- wepsMetaData.cokey(getOriginalRequest("soil")); |
- wepsMetaData.longitude(Double.parseDouble(getOriginalRequest("longitude"))); |
- wepsMetaData.latitude(Double.parseDouble(getOriginalRequest("latitude"))); |
- wepsMetaData.annualPrecip(cligenData.annualAvgPrecip()); |
- wepsMetaData.windEnergy(windData.simulationAverage()); |
- wepsMetaData.componentName(ifcFile.componentName); |
- wepsMetaData.fractionSand(ifcFile.fractionSand); |
- wepsMetaData.fractionSilt(ifcFile.fractionSilt); |
- wepsMetaData.fractionClay(ifcFile.fractionClay); |
- wepsMetaData.crustStability(ifcFile.crustStability); |
- wepsMetaData.surfRockFrag(ifcFile.surfaceFragmentCover); |
- wepsMetaData.albedo(ifcFile.surfaceAlbedo); |
- wepsMetaData.num_layers(ifcFile.layerThickness.length); |
- wepsMetaData.surface_thickness(((int) ifcFile.layerThickness[0])); |
- wepsMetaData.slope_gradient(ifcFile.surfaceSlope); |
+ } |
|
- //TODO: Do we need to measure the layers before setting these?? WEPSSoilInput already filters/sorts layers, etc... |
- wepsMetaData.aggStability(ifcFile.aggregateStability[0]); |
- wepsMetaData.soilWiltPoint(ifcFile.wiltingPointSWC[0]); |
- wepsMetaData.aggGeomDiam(ifcFile.aggregateMeanDiameter[0]); |
+ private WEPSManagement getManagementFile(byte[] fileData) throws IOException { |
+ WEPSManagement wepsManagement = new WEPSManagement(new Urls()); |
|
- String biomass_avg = getOriginalResponse("average_biomass"); |
- if (!biomass_avg.isEmpty()) { |
- wepsMetaData.rotWeightResBiomass(Double.parseDouble(biomass_avg) * 2000); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing biomass_avg result in WEPS output result file."; |
- } |
+ File managementFile = new File("management.man"); |
+ BufferedWriter bufferWriter = Files.newBufferedWriter(managementFile.toPath()); |
+ String data = getFileContents(fileData, ".man"); |
+ bufferWriter.write(data); |
+ bufferWriter.flush(); |
+ bufferWriter.close(); |
|
- String stir_avg = getOriginalResponse("avg_all_stir"); |
- if (!stir_avg.isEmpty()) { |
- wepsMetaData.rotWeightSoilTillIntensity(Double.parseDouble(stir_avg)); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing avg_all_stir result in WEPS output result file."; |
- } |
- |
- String wind_eros = getOriginalResponse("wind_eros"); |
- if (!wind_eros.isEmpty()) { |
- wepsMetaData.erosionRate(Double.parseDouble(wind_eros)); |
- } else { |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Missing wind_eros result in WEPS output result file."; |
- } |
- |
- wepsMetaData.errorMessages(badModelMessage); |
- |
+ try { |
+ wepsManagement.readManData(managementFile); |
+ } catch (ParserException | ServiceException ex) { |
+ Logger.getLogger(WEPSModelArchive.class.getName()).log(Level.SEVERE, null, ex); |
+ badModelRun = true; |
+ badModelMessage += "##\n"; |
+ badModelMessage += "Error parsing the WEPS management file: " + ex.getMessage(); |
} |
|
- private WEPSManagement getManagementFile(byte[] fileData) throws IOException { |
- WEPSManagement wepsManagement = new WEPSManagement(new Urls()); |
+ return wepsManagement; |
+ } |
|
- File managementFile = new File("management.man"); |
- BufferedWriter bufferWriter = Files.newBufferedWriter(managementFile.toPath()); |
- String data = getFileContents(fileData, ".man"); |
- bufferWriter.write(data); |
- bufferWriter.flush(); |
- bufferWriter.close(); |
+ private WindGenData getWindDataFile(byte[] fileData) throws IOException { |
+ WindGenData wind; |
|
- try { |
- wepsManagement.readManData(managementFile); |
- } catch (ParserException | ServiceException ex) { |
- Logger.getLogger(WEPSModelArchive.class.getName()).log(Level.SEVERE, null, ex); |
- badModelRun = true; |
- badModelMessage += "##\n"; |
- badModelMessage += "Error parsing the WEPS management file: " + ex.getMessage(); |
+ wind = new WindGenData(getFileContents(fileData, ".win")); |
+ |
+ return wind; |
+ } |
+ |
+ private CligenData getClimateDataFile(byte[] fileData) throws IOException { |
+ CligenData climate; |
+ |
+ climate = new CligenData(getFileContents(fileData, ".cli")); |
+ |
+ return climate; |
+ } |
+ |
+ private IFCFile getIFCFile(byte[] fileData) throws IOException { |
+ IFCFile requestData; |
+ |
+ requestData = new IFCFile(getFileContents(fileData, ".ifc")); |
+ |
+ return requestData; |
+ } |
+ |
+ private String getStdErrorFile(byte[] fileData) throws IOException { |
+ String fileString = null; |
+ |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains("weps.") && entry.getName().contains("stderr.txt")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ fileString = fileContent.toString(); |
+ break; |
} |
- |
- return wepsManagement; |
+ } |
} |
|
- private WindGenData getWindDataFile(byte[] fileData) throws IOException { |
- WindGenData wind; |
+ return fileString; |
+ } |
|
- wind = new WindGenData(getFileContents(fileData, ".win")); |
+ private String getStdOutFile(byte[] fileData) throws IOException { |
+ String fileString = null; |
|
- return wind; |
+ try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
+ ZipEntry entry; |
+ |
+ while ((entry = zin.getNextEntry()) != null) { |
+ if (entry.getName().contains("weps.exe") && entry.getName().contains("stdout.txt")) { |
+ BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
+ StringBuilder fileContent = new StringBuilder(); |
+ String inputStr; |
+ while ((inputStr = bReader.readLine()) != null) { |
+ fileContent.append(inputStr).append(System.lineSeparator()); |
+ } |
+ fileString = fileContent.toString(); |
+ break; |
+ } |
+ } |
} |
|
- private CligenData getClimateDataFile(byte[] fileData) throws IOException { |
- CligenData climate; |
+ return fileString; |
+ } |
|
- climate = new CligenData(getFileContents(fileData, ".cli")); |
+ public class WEPSMetaData { |
|
- return climate; |
+ private double latitude; |
+ private double longitude; |
+ private String cokey; |
+ private double windEnergy; |
+ private double annualPrecip; |
+ private double rotIrrEffect; |
+ private double soilWiltPoint; |
+ private double rotWeightResBiomass; |
+ private double rotWeightSoilTillIntensity; |
+ private double rotWeightResAddition; |
+ private double aggStability; |
+ private double aggGeomDiam; |
+ private double crustStability; |
+ private double surfRockFrag; |
+ private double erosionRate; |
+ private String componentName; |
+ private double[] fractionSand; |
+ private double[] fractionSilt; |
+ private double[] fractionClay; |
+ private double albedo; |
+ private double slope_gradient; |
+ private int surface_thickness; |
+ private int num_layers; |
+ private String suid; |
+ private String errorMessages = ""; |
+ |
+ public String errorMessages() { |
+ return errorMessages; |
} |
|
- private IFCFile getIFCFile(byte[] fileData) throws IOException { |
- IFCFile requestData; |
- |
- requestData = new IFCFile(getFileContents(fileData, ".ifc")); |
- |
- return requestData; |
+ public void errorMessages(String value) { |
+ errorMessages = value; |
} |
|
- private String getStdErrorFile(byte[] fileData) throws IOException { |
- String fileString = null; |
- |
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains("weps.") && entry.getName().contains("stderr.txt")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- fileString = fileContent.toString(); |
- break; |
- } |
- } |
- } |
- |
- return fileString; |
+ public String suid() { |
+ return suid; |
} |
|
- private String getStdOutFile(byte[] fileData) throws IOException { |
- String fileString = null; |
- |
- try (ZipInputStream zin = new ZipInputStream(new ByteArrayInputStream(fileData))) { |
- ZipEntry entry; |
- |
- while ((entry = zin.getNextEntry()) != null) { |
- if (entry.getName().contains("weps.exe") && entry.getName().contains("stdout.txt")) { |
- BufferedReader bReader = new BufferedReader(new InputStreamReader(zin)); |
- StringBuilder fileContent = new StringBuilder(); |
- String inputStr; |
- while ((inputStr = bReader.readLine()) != null) { |
- fileContent.append(inputStr).append(System.lineSeparator()); |
- } |
- fileString = fileContent.toString(); |
- break; |
- } |
- } |
- } |
- |
- return fileString; |
+ public void suid(String value) { |
+ suid = value; |
} |
|
- public class WEPSMetaData { |
+ public double windEnergy() { |
+ return windEnergy; |
+ } |
|
- private double latitude; |
- private double longitude; |
- private String cokey; |
- private double windEnergy; |
- private double annualPrecip; |
- private double rotIrrEffect; |
- private double soilWiltPoint; |
- private double rotWeightResBiomass; |
- private double rotWeightSoilTillIntensity; |
- private double rotWeightResAddition; |
- private double aggStability; |
- private double aggGeomDiam; |
- private double crustStability; |
- private double surfRockFrag; |
- private double erosionRate; |
- private String componentName; |
- private double[] fractionSand; |
- private double[] fractionSilt; |
- private double[] fractionClay; |
- private double albedo; |
- private double slope_gradient; |
- private int surface_thickness; |
- private int num_layers; |
- private String suid; |
- private String errorMessages = ""; |
+ public double annualPrecip() { |
+ return annualPrecip; |
+ } |
|
- public String errorMessages() { |
- return errorMessages; |
- } |
+ public double rotIrrEffect() { |
+ return rotIrrEffect; |
+ } |
|
- public void errorMessages(String value) { |
- errorMessages = value; |
- } |
+ public double soilWiltPoint() { |
+ return soilWiltPoint; |
+ } |
|
- public String suid() { |
- return suid; |
- } |
+ public double rotWeightResBiomass() { |
+ return rotWeightResBiomass; |
+ } |
|
- public void suid(String value) { |
- suid = value; |
- } |
+ public double rotWeightSoilTillIntensity() { |
+ return rotWeightSoilTillIntensity; |
+ } |
|
- public double windEnergy() { |
- return windEnergy; |
- } |
+ public double rotWeightResAddition() { |
+ return rotWeightResAddition; |
+ } |
|
- public double annualPrecip() { |
- return annualPrecip; |
- } |
+ public double aggStability() { |
+ return aggStability; |
+ } |
|
- public double rotIrrEffect() { |
- return rotIrrEffect; |
- } |
+ public double aggGeomDiam() { |
+ return aggGeomDiam; |
+ } |
|
- public double soilWiltPoint() { |
- return soilWiltPoint; |
- } |
+ public double crustStability() { |
+ return crustStability; |
+ } |
|
- public double rotWeightResBiomass() { |
- return rotWeightResBiomass; |
- } |
+ public double surfRockFrag() { |
+ return surfRockFrag; |
+ } |
|
- public double rotWeightSoilTillIntensity() { |
- return rotWeightSoilTillIntensity; |
- } |
+ public double erosionRate() { |
+ return erosionRate; |
+ } |
|
- public double rotWeightResAddition() { |
- return rotWeightResAddition; |
- } |
+ public void windEnergy(double value) { |
+ windEnergy = value; |
+ } |
|
- public double aggStability() { |
- return aggStability; |
- } |
+ public void annualPrecip(double value) { |
+ annualPrecip = value; |
+ } |
|
- public double aggGeomDiam() { |
- return aggGeomDiam; |
- } |
+ public void rotIrrEffect(double value) { |
+ rotIrrEffect = value; |
+ } |
|
- public double crustStability() { |
- return crustStability; |
- } |
+ public void soilWiltPoint(double value) { |
+ soilWiltPoint = value; |
+ } |
|
- public double surfRockFrag() { |
- return surfRockFrag; |
- } |
+ public void rotWeightResBiomass(double value) { |
+ rotWeightResBiomass = value; |
+ } |
|
- public double erosionRate() { |
- return erosionRate; |
- } |
+ public void rotWeightSoilTillIntensity(double value) { |
+ rotWeightSoilTillIntensity = value; |
+ } |
|
- public void windEnergy(double value) { |
- windEnergy = value; |
- } |
+ public void rotWeightResAddition(double value) { |
+ rotWeightResAddition = value; |
+ } |
|
- public void annualPrecip(double value) { |
- annualPrecip = value; |
- } |
+ public void aggStability(double value) { |
+ aggStability = value; |
+ } |
|
- public void rotIrrEffect(double value) { |
- rotIrrEffect = value; |
- } |
+ public void aggGeomDiam(double value) { |
+ aggGeomDiam = value; |
+ } |
|
- public void soilWiltPoint(double value) { |
- soilWiltPoint = value; |
- } |
+ public void crustStability(double value) { |
+ crustStability = value; |
+ } |
|
- public void rotWeightResBiomass(double value) { |
- rotWeightResBiomass = value; |
- } |
+ public void surfRockFrag(double value) { |
+ surfRockFrag = value; |
+ } |
|
- public void rotWeightSoilTillIntensity(double value) { |
- rotWeightSoilTillIntensity = value; |
- } |
+ public void erosionRate(double value) { |
+ erosionRate = value; |
+ } |
|
- public void rotWeightResAddition(double value) { |
- rotWeightResAddition = value; |
- } |
+ public String componentName() { |
+ return componentName; |
+ } |
|
- public void aggStability(double value) { |
- aggStability = value; |
- } |
+ public double fractionSand(int index) { |
+ if ((null != fractionSand) && (index < fractionSand.length)) { |
+ return fractionSand[index]; |
+ } |
+ return Double.NaN; |
+ } |
|
- public void aggGeomDiam(double value) { |
- aggGeomDiam = value; |
- } |
+ public double fractionSilt(int index) { |
+ if ((null != fractionSand) && (index < fractionSilt.length)) { |
+ return fractionSilt[index]; |
+ } |
+ return Double.NaN; |
+ } |
|
- public void crustStability(double value) { |
- crustStability = value; |
- } |
+ public double fractionClay(int index) { |
+ if ((null != fractionSand) && (index < fractionClay.length)) { |
+ return fractionClay[index]; |
+ } |
+ return Double.NaN; |
+ } |
|
- public void surfRockFrag(double value) { |
- surfRockFrag = value; |
- } |
+ public void componentName(String value) { |
+ componentName = value; |
+ } |
|
- public void erosionRate(double value) { |
- erosionRate = value; |
- } |
+ public void fractionSand(double[] value) { |
+ fractionSand = value; |
+ } |
|
- public String componentName() { |
- return componentName; |
- } |
+ public void fractionSilt(double[] value) { |
+ fractionSilt = value; |
+ } |
|
- public double fractionSand(int index) { |
- if ((null != fractionSand) && (index < fractionSand.length)) { |
- return fractionSand[index]; |
- } |
- return Double.NaN; |
- } |
+ public void fractionClay(double[] value) { |
+ fractionClay = value; |
+ } |
|
- public double fractionSilt(int index) { |
- if ((null != fractionSand) && (index < fractionSilt.length)) { |
- return fractionSilt[index]; |
- } |
- return Double.NaN; |
- } |
+ public void albedo(double value) { |
+ albedo = value; |
+ } |
|
- public double fractionClay(int index) { |
- if ((null != fractionSand) && (index < fractionClay.length)) { |
- return fractionClay[index]; |
- } |
- return Double.NaN; |
- } |
+ public void slope_gradient(double value) { |
+ slope_gradient = value; |
+ } |
|
- public void componentName(String value) { |
- componentName = value; |
- } |
+ public void surface_thickness(int value) { |
+ surface_thickness = value; |
+ } |
|
- public void fractionSand(double[] value) { |
- fractionSand = value; |
- } |
+ public void num_layers(int value) { |
+ num_layers = value; |
+ } |
|
- public void fractionSilt(double[] value) { |
- fractionSilt = value; |
- } |
+ public double albedo() { |
+ return albedo; |
+ } |
|
- public void fractionClay(double[] value) { |
- fractionClay = value; |
- } |
+ public double slope_gradient() { |
+ return slope_gradient; |
+ } |
|
- public void albedo(double value) { |
- albedo = value; |
- } |
+ public int surface_thickness() { |
+ return surface_thickness; |
+ } |
|
- public void slope_gradient(double value) { |
- slope_gradient = value; |
- } |
+ public int num_layers() { |
+ return num_layers; |
+ } |
|
- public void surface_thickness(int value) { |
- surface_thickness = value; |
- } |
+ public double latitude() { |
+ return latitude; |
+ } |
|
- public void num_layers(int value) { |
- num_layers = value; |
- } |
+ public double longitude() { |
+ return longitude; |
+ } |
|
- public double albedo() { |
- return albedo; |
- } |
+ public String cokey() { |
+ return cokey; |
+ } |
|
- public double slope_gradient() { |
- return slope_gradient; |
- } |
+ public void latitude(double value) { |
+ latitude = value; |
+ } |
|
- public int surface_thickness() { |
- return surface_thickness; |
- } |
+ public void longitude(double value) { |
+ longitude = value; |
+ } |
|
- public int num_layers() { |
- return num_layers; |
- } |
+ public void cokey(String value) { |
+ cokey = value; |
+ } |
|
- public double latitude() { |
- return latitude; |
- } |
+ public JSONArray toJSON() throws JSONException { |
+ JSONArray ret_val = new JSONArray(); |
+ JSONObject results = new JSONObject(); |
|
- public double longitude() { |
- return longitude; |
- } |
+ results.put("weps_archive_suid", wepsMetaData.suid()); |
+ results.put("annual_precipitation", wepsMetaData.annualPrecip()); |
+ results.put("wind_energy", wepsMetaData.windEnergy()); |
+ results.put("crust_stability", wepsMetaData.crustStability()); |
+ results.put("surface_rock_fragments", wepsMetaData.surfRockFrag()); |
+ results.put("aggregate_stability", wepsMetaData.aggStability()); |
+ results.put("soil_wilting_poiont", wepsMetaData.soilWiltPoint()); |
+ results.put("rotation_weighted_biomass", wepsMetaData.rotWeightResBiomass()); |
+ results.put("annual_precipitation", wepsMetaData.annualPrecip()); |
|
- public String cokey() { |
- return cokey; |
- } |
+ results.put("rotation_weighted_soil_tillage_intensity", wepsMetaData.rotWeightSoilTillIntensity()); |
+ results.put("erosion_rate", wepsMetaData.erosionRate()); |
|
- public void latitude(double value) { |
- latitude = value; |
- } |
- |
- public void longitude(double value) { |
- longitude = value; |
- } |
- |
- public void cokey(String value) { |
- cokey = value; |
- } |
- |
- public JSONArray toJSON() throws JSONException { |
- JSONArray ret_val = new JSONArray(); |
- JSONObject results = new JSONObject(); |
- |
- results.put("weps_archive_suid", wepsMetaData.suid()); |
- results.put("annual_precipitation", wepsMetaData.annualPrecip()); |
- results.put("wind_energy", wepsMetaData.windEnergy()); |
- results.put("crust_stability", wepsMetaData.crustStability()); |
- results.put("surface_rock_fragments", wepsMetaData.surfRockFrag()); |
- results.put("aggregate_stability", wepsMetaData.aggStability()); |
- results.put("soil_wilting_poiont", wepsMetaData.soilWiltPoint()); |
- results.put("rotation_weighted_biomass", wepsMetaData.rotWeightResBiomass()); |
- results.put("annual_precipitation", wepsMetaData.annualPrecip()); |
- |
- results.put("rotation_weighted_soil_tillage_intensity", wepsMetaData.rotWeightSoilTillIntensity()); |
- results.put("erosion_rate", wepsMetaData.erosionRate()); |
- |
- ret_val.put(results); |
- return ret_val; |
- } |
+ ret_val.put(results); |
+ return ret_val; |
} |
+ } |
} |