@@ -33,93 +33,85 @@ |
@Path("m/delete/1.0") |
public class V1_0 extends ModelDataService { |
|
- MongoClient mongo; |
- MongoDatabase db; |
- JSONObject results = new JSONObject(); |
+ MongoClient mongo; |
+ MongoDatabase db; |
+ JSONObject results = new JSONObject(); |
|
+ protected void open(String mongoclienturi) { |
+ MongoClientURI u = new MongoClientURI(mongoclienturi); |
+ mongo = new MongoClient(u); |
+ if (u.getDatabase() == null) { |
+ throw new RuntimeException("Missing database in mongo uri: " + mongoclienturi); |
+ } |
+ if (!u.getDatabase().equals("csip_timeseries")) { |
+ throw new RuntimeException("Can only modify csip_timeseries collection"); |
+ } |
+ db = mongo.getDatabase(u.getDatabase()); |
+ } |
|
- protected void open(String mongoclienturi) { |
- MongoClientURI u = new MongoClientURI(mongoclienturi); |
- mongo = new MongoClient(u); |
- if (u.getDatabase() == null) { |
- throw new RuntimeException("Missing database in mongo uri: " + mongoclienturi); |
- } |
- if (!u.getDatabase().equals("csip_timeseries")) { |
- throw new RuntimeException("Can only modify csip_timeseries collection"); |
- } |
- db = mongo.getDatabase(u.getDatabase()); |
+ protected void close() { |
+ mongo.close(); |
+ } |
+ |
+ @Override |
+ public void doProcess() throws Exception { |
+ String mongo_uri = getStringParam("mongo_uri"); |
+ String mongo_collection = getStringParam("mongo_collection"); |
+ JSONArray ids = getJSONArrayParam("ids", null); |
+ |
+ if (ids != null) { |
+ deleteIds(mongo_uri, mongo_collection, ids); |
+ } else { |
+ // If no ids specified, then delete the collection. |
+ deleteCollection(mongo_uri, mongo_collection); |
+ } |
+ } |
+ |
+ public void deleteCollection(String mongo_uri, String mongo_collection) throws JSONException { |
+ open(mongo_uri); |
+ MongoCollection<Document> collection = db.getCollection(mongo_collection); |
+ collection.drop(); |
+ results.put("deleted_collection", mongo_collection); |
+ close(); |
+ } |
+ |
+ public void deleteIds(String mongo_uri, String mongo_collection, JSONArray ids) throws JSONException { |
+ open(mongo_uri); |
+ MongoCollection<Document> collection = db.getCollection(mongo_collection); |
+ |
+ List<String> ids_list = new ArrayList(); |
+ for (int i = 0; i < ids.length(); i++) { |
+ ids_list.add(ids.getString(i)); |
} |
|
+ List<String> deleted_ids = new ArrayList(); |
+ FindIterable<Document> c = collection.find(); |
+ for (Document finddoc : c) { |
+ String find_id = (String) finddoc.get("_id"); |
+ if (ids_list.contains(find_id)) { |
+ LOG.info("Deleting existing doc"); |
+ collection.deleteOne(finddoc); |
+ deleted_ids.add(find_id); |
+ } |
+ } |
+ results.put("deleted_document_ids", new JSONArray(deleted_ids)); |
+ close(); |
+ } |
|
- protected void close() { |
- mongo.close(); |
- } |
+ @Override |
+ protected void postProcess() throws Exception { |
+ putResult("result", results); |
+ } |
|
+ public static void main(String[] args) throws JSONException { |
+ V1_0 timeseries = new V1_0(); |
+ JSONArray delete_ids = new JSONArray(); |
+ delete_ids.put("test_ref"); |
+ timeseries.deleteIds("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", delete_ids); |
+ timeseries.deleteCollection("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll"); |
+ } |
|
- @Override |
- public void doProcess() throws Exception { |
- String mongo_uri = getStringParam("mongo_uri"); |
- String mongo_collection = getStringParam("mongo_collection"); |
- JSONArray ids = getJSONArrayParam("ids", null); |
- |
- if (ids != null) { |
- deleteIds(mongo_uri, mongo_collection, ids); |
- } else { |
- // If no ids specified, then delete the collection. |
- deleteCollection(mongo_uri, mongo_collection); |
- } |
- } |
- |
- |
- public void deleteCollection(String mongo_uri, String mongo_collection) throws JSONException { |
- open(mongo_uri); |
- MongoCollection<Document> collection = db.getCollection(mongo_collection); |
- collection.drop(); |
- results.put("deleted_collection", mongo_collection); |
- close(); |
- } |
- |
- |
- public void deleteIds(String mongo_uri, String mongo_collection, JSONArray ids) throws JSONException { |
- open(mongo_uri); |
- MongoCollection<Document> collection = db.getCollection(mongo_collection); |
- |
- List<String> ids_list = new ArrayList(); |
- for (int i = 0; i < ids.length(); i++) { |
- ids_list.add(ids.getString(i)); |
- } |
- |
- List<String> deleted_ids = new ArrayList(); |
- FindIterable<Document> c = collection.find(); |
- for (Document finddoc : c) { |
- String find_id = (String) finddoc.get("_id"); |
- if (ids_list.contains(find_id)) { |
- LOG.info("Deleting existing doc"); |
- collection.deleteOne(finddoc); |
- deleted_ids.add(find_id); |
- } |
- } |
- results.put("deleted_document_ids", new JSONArray(deleted_ids)); |
- close(); |
- } |
- |
- |
- @Override |
- protected void postProcess() throws Exception { |
- putResult("result", results); |
- } |
- |
- |
- public static void main(String[] args) throws JSONException { |
- V1_0 timeseries = new V1_0(); |
- JSONArray delete_ids = new JSONArray(); |
- delete_ids.put("test_ref"); |
- timeseries.deleteIds("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", delete_ids); |
- timeseries.deleteCollection("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll"); |
- } |
- |
- |
- public static String[] toStringArray(List<String> data) { |
- return (String[]) data.stream().map(String::toString).toArray(); |
- } |
+ public static String[] toStringArray(List<String> data) { |
+ return (String[]) data.stream().map(String::toString).toArray(); |
+ } |
} |
@@ -29,7 +29,7 @@ |
import org.codehaus.jettison.json.JSONObject; |
import utils.TimeseriesTable; |
import csip.annotations.*; |
- |
+ |
/** |
* Mongodb timeseries data insertion |
* |
@@ -37,141 +37,137 @@ |
*/ |
@Name("Timeseries insert") |
@Description("Update a timeseries collection in mongo.") |
-@Author(org="CSU") |
+@Author(org = "CSU") |
@License(License.MIT) |
|
@Path("m/insert/1.0") |
public class V1_0 extends ModelDataService { |
|
- MongoClient mongo; |
- MongoDatabase db; |
- JSONObject results; |
+ MongoClient mongo; |
+ MongoDatabase db; |
+ JSONObject results; |
|
- protected void open(String mongoclienturi) { |
- MongoClientURI u = new MongoClientURI(mongoclienturi); |
- mongo = new MongoClient(u); |
- if (u.getDatabase() == null) { |
- throw new RuntimeException("Missing database in mongo uri: " + mongoclienturi); |
- } |
- if (!u.getDatabase().equals("csip_timeseries")) { |
- throw new RuntimeException("Can only modify csip_timeseries collection"); |
- } |
- db = mongo.getDatabase(u.getDatabase()); |
+ protected void open(String mongoclienturi) { |
+ MongoClientURI u = new MongoClientURI(mongoclienturi); |
+ mongo = new MongoClient(u); |
+ if (u.getDatabase() == null) { |
+ throw new RuntimeException("Missing database in mongo uri: " + mongoclienturi); |
+ } |
+ if (!u.getDatabase().equals("csip_timeseries")) { |
+ throw new RuntimeException("Can only modify csip_timeseries collection"); |
+ } |
+ db = mongo.getDatabase(u.getDatabase()); |
+ } |
+ |
+ protected void close() { |
+ mongo.close(); |
+ } |
+ |
+ @Override |
+ public void doProcess() throws ServiceException, JSONException, IOException, FileNotFoundException, ParseException { |
+ String mongoUri = getStringParam("mongo_uri"); |
+ String mongoCollection = getStringParam("mongo_collection"); |
+ String docId = getStringParam("id", null); |
+ String data = getStringParam("data"); |
+ File dataFile = getWorkspaceFile(data); |
+ String dataCol = getStringParam("data_column", null); |
+ JSONObject metadata = getJSONParam("metadata"); |
+ String dataFmt = getStringParam("date_format"); |
+ JSONArray locationArray = getJSONArrayParam("location", null); |
+ List<Double> location = null; |
+ if (locationArray != null) { |
+ location = Arrays.asList(Double.parseDouble(locationArray.get(0).toString()), Double.parseDouble(locationArray.get(1).toString())); |
+ } |
+ String chunkStrategy = getStringParam("chunk_strategy", TimeseriesTable.CHUNK_YEAR); |
+ run(mongoUri, mongoCollection, docId, dataFile, dataCol, metadata, dataFmt, location, chunkStrategy, LOG); |
+ } |
+ |
+ /** |
+ * |
+ * @param mongoUri |
+ * @param mongoCollection |
+ * @param docId |
+ * @param dataFile |
+ * @param dataCol |
+ * @param metadata |
+ * @param dataFmt |
+ * @param location |
+ * @param LOG |
+ * @throws FileNotFoundException |
+ * @throws IOException |
+ * @throws JSONException |
+ * @throws ParseException |
+ * @throws ServiceException |
+ */ |
+ void run(String mongoUri, String mongoCollection, String docId, File dataFile, String dataCol, JSONObject metadata, |
+ String dataFmt, List<Double> location, String chunkStrategy, SessionLogger LOG) |
+ throws FileNotFoundException, IOException, JSONException, ParseException, ServiceException { |
+ |
+ open(mongoUri); |
+ MongoCollection<Document> collection = db.getCollection(mongoCollection); |
+ |
+ SimpleDateFormat ft = new SimpleDateFormat(dataFmt); |
+ ft.setTimeZone(TimeZone.getTimeZone("UTC")); |
+ |
+ TimeseriesTable tbl = new TimeseriesTable(); |
+ |
+ // Insert data |
+ tbl.readCSV(dataFile, ft, dataCol, chunkStrategy); |
+ |
+ // Create document with location metadata |
+ Document locationDoc = new Document(); |
+ if (docId != null) { |
+ locationDoc.append("_id", docId); |
} |
|
- |
- protected void close() { |
- mongo.close(); |
+ // Use a subdocument for the metadata. |
+ Document metadataDoc = new Document(); |
+ Iterator itr = metadata.keys(); |
+ while (itr.hasNext()) { |
+ String key = (String) itr.next(); |
+ metadataDoc.put(key, metadata.get(key)); |
+ } |
+ locationDoc.append("metadata", metadataDoc); |
+ locationDoc.append("header", tbl.header); |
+ locationDoc.append("date_fmt", dataFmt); |
+ if (location != null) { |
+ locationDoc.append("location", new Document().append("type", "Point").append("coordinates", location)); |
} |
|
+ if (docId == null) { |
+ collection.insertOne(locationDoc); |
+ } else { |
+ Document query = new Document().append("_id", docId); |
+ UpdateOptions options = new UpdateOptions().upsert(true); |
+ collection.replaceOne(query, locationDoc, options); |
+ } |
+ results = new JSONObject(); |
+ results.put("updated_document_id", locationDoc.get("_id").toString()); |
|
- @Override |
- public void doProcess() throws ServiceException, JSONException, IOException, FileNotFoundException, ParseException { |
- String mongoUri = getStringParam("mongo_uri"); |
- String mongoCollection = getStringParam("mongo_collection"); |
- String docId = getStringParam("id", null); |
- String data = getStringParam("data"); |
- File dataFile = getWorkspaceFile(data); |
- String dataCol = getStringParam("data_column", null); |
- JSONObject metadata = getJSONParam("metadata"); |
- String dataFmt = getStringParam("date_format"); |
- JSONArray locationArray = getJSONArrayParam("location", null); |
- List<Double> location = null; |
- if (locationArray != null) { |
- location = Arrays.asList(Double.parseDouble(locationArray.get(0).toString()), Double.parseDouble(locationArray.get(1).toString())); |
- } |
- String chunkStrategy = getStringParam("chunk_strategy", TimeseriesTable.CHUNK_YEAR); |
- run(mongoUri, mongoCollection, docId, dataFile, dataCol, metadata, dataFmt, location, chunkStrategy, LOG); |
- } |
- |
- /** |
- * |
- * @param mongoUri |
- * @param mongoCollection |
- * @param docId |
- * @param dataFile |
- * @param dataCol |
- * @param metadata |
- * @param dataFmt |
- * @param location |
- * @param LOG |
- * @throws FileNotFoundException |
- * @throws IOException |
- * @throws JSONException |
- * @throws ParseException |
- * @throws ServiceException |
- */ |
- void run(String mongoUri, String mongoCollection, String docId, File dataFile, String dataCol, JSONObject metadata, |
- String dataFmt, List<Double> location, String chunkStrategy, SessionLogger LOG) |
- throws FileNotFoundException, IOException, JSONException, ParseException, ServiceException { |
+ tbl.insertData(collection, locationDoc); |
|
- open(mongoUri); |
- MongoCollection<Document> collection = db.getCollection(mongoCollection); |
+ close(); |
+ } |
|
- SimpleDateFormat ft = new SimpleDateFormat(dataFmt); |
- ft.setTimeZone(TimeZone.getTimeZone("UTC")); |
+ @Override |
+ protected void postProcess() { |
+ putResult("result", results); |
+ } |
|
- TimeseriesTable tbl = new TimeseriesTable(); |
- |
- // Insert data |
- tbl.readCSV(dataFile, ft, dataCol, chunkStrategy); |
- |
- // Create document with location metadata |
- Document locationDoc = new Document(); |
- if (docId != null) { |
- locationDoc.append("_id", docId); |
- } |
+ public static void main(String[] args) throws FileNotFoundException, IOException, JSONException, ParseException, ServiceException { |
+ V1_0 timeseries = new V1_0(); |
+ JSONObject metadata = new JSONObject(); |
+ metadata.put("title", "main test"); |
+ metadata.put("author", "Dave"); |
+ File data_file = new File("/home/dave/work/csip-timeseries/test/service_tests/m/insert/V1_0/insert-req/data_append.csv"); |
+ SessionLogger LOG = new SessionLogger(); |
+ List<Double> location = Arrays.asList(-105.0, 40.5); |
+ timeseries.run("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", "test_ref", |
+ data_file, "Total Precip", metadata, "yyyy-MM-dd", location, TimeseriesTable.CHUNK_YEAR, LOG); |
|
- // Use a subdocument for the metadata. |
- Document metadataDoc = new Document(); |
- Iterator itr = metadata.keys(); |
- while (itr.hasNext()) { |
- String key = (String)itr.next(); |
- metadataDoc.put(key, metadata.get(key)); |
- } |
- locationDoc.append("metadata", metadataDoc); |
- locationDoc.append("header", tbl.header); |
- locationDoc.append("date_fmt", dataFmt); |
- if (location != null) { |
- locationDoc.append("location", new Document().append("type", "Point").append("coordinates", location)); |
- } |
- |
- if (docId == null) { |
- collection.insertOne(locationDoc); |
- } |
- else { |
- Document query = new Document().append("_id", docId); |
- UpdateOptions options = new UpdateOptions().upsert(true); |
- collection.replaceOne(query, locationDoc, options); |
- } |
- results = new JSONObject(); |
- results.put("updated_document_id", locationDoc.get("_id").toString()); |
- |
- tbl.insertData(collection, locationDoc); |
- |
- close(); |
- } |
- |
- |
- @Override |
- protected void postProcess() { |
- putResult("result", results); |
- } |
- |
- public static void main(String [] args) throws FileNotFoundException, IOException, JSONException, ParseException, ServiceException { |
- V1_0 timeseries = new V1_0(); |
- JSONObject metadata = new JSONObject(); |
- metadata.put("title", "main test"); |
- metadata.put("author", "Dave"); |
- File data_file = new File("/home/dave/work/csip-timeseries/test/service_tests/m/insert/V1_0/insert-req/data_append.csv"); |
- SessionLogger LOG = new SessionLogger(); |
- List<Double> location = Arrays.asList(-105.0, 40.5); |
- timeseries.run("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", "test_ref", |
- data_file, "Total Precip", metadata, "yyyy-MM-dd", location, TimeseriesTable.CHUNK_YEAR, LOG); |
- |
- // Append some data. New data start at index 942. |
- data_file = new File("/home/dave/work/csip-timeseries/test/service_tests/m/insert/V1_0/insert-req/data.csv"); |
- timeseries.run("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", "test_ref", |
- data_file, "Total Precip", metadata, "yyyy-MM-dd", location, TimeseriesTable.CHUNK_YEAR, LOG); |
- } |
+ // Append some data. New data start at index 942. |
+ data_file = new File("/home/dave/work/csip-timeseries/test/service_tests/m/insert/V1_0/insert-req/data.csv"); |
+ timeseries.run("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", "test_ref", |
+ data_file, "Total Precip", metadata, "yyyy-MM-dd", location, TimeseriesTable.CHUNK_YEAR, LOG); |
+ } |
} |
@@ -45,308 +45,308 @@ |
@Path("m/query/1.0") |
public class V1_0 extends ModelDataService { |
|
- MongoClient mongo; |
- MongoDatabase db; |
+ MongoClient mongo; |
+ MongoDatabase db; |
|
- JSONArray results = new JSONArray(); |
+ JSONArray results = new JSONArray(); |
|
+ protected void open(String mongoclienturi) { |
+ MongoClientURI u = new MongoClientURI(mongoclienturi); |
+ mongo = new MongoClient(u); |
+ if (u.getDatabase() == null) { |
+ throw new RuntimeException("Missing database in mongo uri: " + mongoclienturi); |
+ } |
+ db = mongo.getDatabase(u.getDatabase()); |
+ } |
|
- protected void open(String mongoclienturi) { |
- MongoClientURI u = new MongoClientURI(mongoclienturi); |
- mongo = new MongoClient(u); |
- if (u.getDatabase() == null) { |
- throw new RuntimeException("Missing database in mongo uri: " + mongoclienturi); |
- } |
- db = mongo.getDatabase(u.getDatabase()); |
+ protected void close() { |
+ mongo.close(); |
+ } |
+ |
+ @Override |
+ public void doProcess() throws Exception { |
+ String mongo_uri = getStringParam("mongo_uri"); |
+ String mongo_collection = getStringParam("mongo_collection"); |
+ String start_date = getStringParam("start_date", null); |
+ String end_date = getStringParam("end_date", null); |
+ boolean metadata_only = getBooleanParam("metadata_only", true); |
+ boolean compute_stats = getBooleanParam("compute_stats", false); |
+ boolean compute_stats_local = getBooleanParam("compute_stats_local", false); |
+ String search_id = getStringParam("search_id", null); |
+ JSONObject search_feature = getJSONParam("search_feature", null); |
+ |
+ run(mongo_uri, mongo_collection, search_feature, search_id, start_date, end_date, metadata_only, compute_stats, compute_stats_local); |
+ } |
+ |
+ @Override |
+ protected void postProcess() throws Exception { |
+ putResult("result", results); |
+ } |
+ |
+ void run(String mongo_uri, String mongo_collection, JSONObject search_feature, String search_id, |
+ String start_date_str, String end_date_str, boolean metadata_only, boolean compute_stats, boolean compute_stats_local) throws JSONException, ServiceException { |
+ |
+ open(mongo_uri); |
+ MongoCollection<Document> collection = db.getCollection(mongo_collection); |
+ |
+ SimpleDateFormat ft = new SimpleDateFormat("yyyy-MM-dd"); |
+ ft.setTimeZone(TimeZone.getTimeZone("UTC")); |
+ Date start_date = null; |
+ Date end_date = null; |
+ if (start_date_str != null) { |
+ try { |
+ start_date = ft.parse(start_date_str); |
+ end_date = ft.parse(end_date_str); |
+ } catch (ParseException ex) { |
+ throw new ServiceException(ex); |
+ } |
} |
|
+ Document locationsOnly = new Document().append("location_id", new Document().append("$exists", false)); |
+ if (search_feature == null) { |
+ FindIterable<Document> c; |
+ if (search_id == null) { |
+ // No search and no location filter, so get all docs in collection |
+ c = collection.find(locationsOnly); |
+ } else { |
+ // Search by document ID. |
+ // Check for object ID (will be a hexidecimal string) |
+ boolean id_test = search_id.matches("\\p{XDigit}+"); |
+ if (id_test) { |
+ ObjectId search_objid = new ObjectId(search_id); |
+ c = collection.find(Filters.eq("_id", search_objid)); |
+ } else { |
+ // Simple search using string |
+ c = collection.find(Filters.eq("_id", search_id)); |
+ } |
+ } |
+ for (Document locationDoc : c) { |
+ JSONObject res = getResult(collection, locationDoc, start_date, end_date, metadata_only, compute_stats, compute_stats_local); |
+ results.put(res); |
+ } |
+ } else { |
+ JSONArray features = search_feature.getJSONArray("features"); |
+ for (int i = 0; i < features.length(); i++) { |
+ JSONObject feat = features.getJSONObject(i); |
+ List<Bson> agg_match = null; |
+ try { |
+ agg_match = getMatch(feat, search_id); |
+ } catch (IOException ex) { |
+ throw new ServiceException(ex); |
+ } |
+ AggregateIterable<Document> c = collection.aggregate(agg_match); |
+ for (Document locationDoc : c) { |
+ JSONObject res = getResult(collection, locationDoc, start_date, end_date, metadata_only, compute_stats, compute_stats_local); |
+ results.put(res); |
+ } |
+ } |
+ } |
+ close(); |
+ } |
|
- protected void close() { |
- mongo.close(); |
+ private JSONObject getResult(MongoCollection<Document> c, Document locationDoc, Date start_date, Date end_date, |
+ boolean metadata_only, boolean compute_stats, boolean compute_stats_local) throws JSONException { |
+ String date_fmt = (String) locationDoc.get("date_fmt"); |
+ if (date_fmt == null) { |
+ date_fmt = "yyyy-MM-dd"; |
+ } |
+ SimpleDateFormat ft = new SimpleDateFormat(date_fmt); |
+ ft.setTimeZone(TimeZone.getTimeZone("UTC")); |
+ |
+ List<String> header = (ArrayList) locationDoc.get("header"); |
+ List<List<Object>> data = (ArrayList) locationDoc.get("data"); |
+ List<List<Object>> filtered = new ArrayList(); |
+ JSONObject statResult = new JSONObject(); |
+ JSONObject statResultLocal = new JSONObject(); |
+ |
+ if (!metadata_only) { |
+ Document datesQuery = new Document(); |
+ if ((start_date != null || end_date != null)) { |
+ datesQuery = new Document(); |
+ if (start_date != null) { |
+ datesQuery.append("$gte", start_date); |
+ } |
+ if (end_date != null) { |
+ datesQuery.append("$lte", end_date); |
+ } |
+ datesQuery = new Document("data.0", datesQuery); |
+ } |
+ |
+ Document project = new Document( |
+ "date", new Document("$arrayElemAt", Arrays.asList("$data", 0)) |
+ ); |
+ for (int i = 1; i < header.size(); i++) { |
+ project.append(header.get(i), new Document("$arrayElemAt", Arrays.asList("$data", i))); |
+ } |
+ |
+ Document group = new Document("_id", "big").append("data", new Document("$push", "$data")); |
+ |
+ // Export for robomongo |
+ List<Bson> aggList = Arrays.asList( |
+ Aggregates.match(new Document("location_id", locationDoc.getString("_id"))), |
+ Aggregates.unwind("$data"), |
+ //Aggregates.project(project) |
+ Aggregates.match(datesQuery), |
+ // Wind array back |
+ new Document("$group", group) |
+ ); |
+ |
+ AggregateIterable<Document> iter = c.aggregate(aggList).allowDiskUse(true); |
+ for (Document d : iter) { |
+ filtered = (List<List<Object>>) d.get("data"); |
+ // Fix date formatting |
+ for (List<Object> objList : filtered) { |
+ Date dt = (Date) objList.get(0); |
+ objList.set(0, ft.format(dt)); |
+ } |
+ } |
+ |
+ if (compute_stats || compute_stats_local) { |
+ Document statsDoc = new Document("_id", null); |
+ List<String> stats = Arrays.asList("min", "max", "avg", "stdDevPop"); |
+ for (String stat : stats) { |
+ for (int i = 1; i < header.size(); i++) { |
+ statsDoc.append(header.get(i) + "_" + stat, new Document("$" + stat, "$" + header.get(i))); |
+ } |
+ } |
+ |
+ aggList = Arrays.asList( |
+ Aggregates.match(new Document("_id", locationDoc.getString("_id"))), |
+ Aggregates.unwind("$data"), |
+ Aggregates.match(datesQuery), |
+ Aggregates.project(project), |
+ new Document("$group", statsDoc) |
+ ); |
+ if (compute_stats) { |
+ iter = c.aggregate(aggList).allowDiskUse(true); |
+ for (Document d : iter) { |
+ for (String stat : stats) { |
+ for (int i = 1; i < header.size(); i++) { |
+ String key = header.get(i) + "_" + stat; |
+ statResult.put(key, d.get(key)); |
+ } |
+ } |
+ } |
+ } |
+ |
+ // Add locally computed stats |
+ if (compute_stats_local) { |
+ for (List<Object> objList : filtered) { |
+ for (int i = 1; i < header.size(); i++) { |
+ double[] vals = new double[filtered.size()]; |
+ for (int j = 0; j < filtered.size(); j++) { |
+ vals[j] = (double) filtered.get(j).get(i); |
+ } |
+ for (String stat : stats) { |
+ String key = header.get(i) + "_" + stat; |
+ if ("min".equals(stat)) { |
+ statResultLocal.put(key, oms3.util.Statistics.min(vals)); |
+ } |
+ if ("max".equals(stat)) { |
+ statResultLocal.put(key, oms3.util.Statistics.max(vals)); |
+ } |
+ if ("avg".equals(stat)) { |
+ statResultLocal.put(key, oms3.util.Statistics.mean(vals)); |
+ } |
+ if ("stdDevPop".equals(stat)) { |
+ statResultLocal.put(key, oms3.util.Statistics.stddev(vals)); |
+ } |
+ } |
+ } |
+ } |
+ } |
+ } |
} |
|
+ JSONObject res = new JSONObject(); |
+ res.put("_id", locationDoc.get("_id")); |
+ Document location = (Document) locationDoc.get("location"); |
+ if (location != null) { |
+ List<Double> coords = (List<Double>) location.get("coordinates"); |
+ res.put("location", coords); |
+ } |
+ res.put("header", header); |
+ if (!metadata_only) { |
+ res.put("data", filtered); |
+ } |
+ Document metadata = (Document) locationDoc.get("metadata"); |
+ res.put("metadata", new JSONObject(metadata.toJson())); |
|
- @Override |
- public void doProcess() throws Exception { |
- String mongo_uri = getStringParam("mongo_uri"); |
- String mongo_collection = getStringParam("mongo_collection"); |
- String start_date = getStringParam("start_date", null); |
- String end_date = getStringParam("end_date", null); |
- boolean metadata_only = getBooleanParam("metadata_only", true); |
- boolean compute_stats = getBooleanParam("compute_stats", false); |
- boolean compute_stats_local = getBooleanParam("compute_stats_local", false); |
- String search_id = getStringParam("search_id", null); |
- JSONObject search_feature = getJSONParam("search_feature", null); |
- |
- run(mongo_uri, mongo_collection, search_feature, search_id, start_date, end_date, metadata_only, compute_stats, compute_stats_local); |
+ if (compute_stats) { |
+ res.put("statistics", statResult); |
+ res.put("statistics_local", statResultLocal); |
} |
|
+ return res; |
+ } |
|
- @Override |
- protected void postProcess() throws Exception { |
- putResult("result", results); |
+ public List<Bson> getMatch(JSONObject feat, String search_id) throws JSONException, IOException { |
+ String geom = feat.getJSONObject("geometry").toString(); |
+ Document match_arg = new Document("location", |
+ new Document("$geoWithin", |
+ new Document("$geometry", Document.parse(geom)) |
+ ) |
+ ); |
+ if (search_id != null) { |
+ // Check for object ID (will be a hexidecimal string) |
+ boolean id_test = search_id.matches("\\p{XDigit}+"); |
+ if (id_test) { |
+ ObjectId search_objid = new ObjectId(search_id); |
+ match_arg.put("_id", search_objid); |
+ } else { |
+ match_arg.put("_id", search_id); |
+ } |
} |
+ Document new_match = new Document("$match", match_arg); |
+ List<Bson> agg_match = new ArrayList<>(); |
+ agg_match.add(new_match); |
+ return agg_match; |
+ } |
|
- |
- void run(String mongo_uri, String mongo_collection, JSONObject search_feature, String search_id, |
- String start_date_str, String end_date_str, boolean metadata_only, boolean compute_stats, boolean compute_stats_local) throws JSONException, ServiceException { |
- |
- open(mongo_uri); |
- MongoCollection<Document> collection = db.getCollection(mongo_collection); |
- |
- SimpleDateFormat ft = new SimpleDateFormat("yyyy-MM-dd"); |
- ft.setTimeZone(TimeZone.getTimeZone("UTC")); |
- Date start_date = null; |
- Date end_date = null; |
- if (start_date_str != null) { |
- try { |
- start_date = ft.parse(start_date_str); |
- end_date = ft.parse(end_date_str); |
- } catch (ParseException ex) { |
- throw new ServiceException(ex); |
- } |
- } |
- |
- Document locationsOnly = new Document().append("location_id", new Document().append("$exists", false)); |
- if (search_feature == null) { |
- FindIterable<Document> c; |
- if (search_id == null) { |
- // No search and no location filter, so get all docs in collection |
- c = collection.find(locationsOnly); |
- } else { |
- // Search by document ID. |
- // Check for object ID (will be a hexidecimal string) |
- boolean id_test = search_id.matches("\\p{XDigit}+"); |
- if (id_test) { |
- ObjectId search_objid = new ObjectId(search_id); |
- c = collection.find(Filters.eq("_id", search_objid)); |
- } else { |
- // Simple search using string |
- c = collection.find(Filters.eq("_id", search_id)); |
- } |
- } |
- for (Document locationDoc : c) { |
- JSONObject res = getResult(collection, locationDoc, start_date, end_date, metadata_only, compute_stats, compute_stats_local); |
- results.put(res); |
- } |
- } else { |
- JSONArray features = search_feature.getJSONArray("features"); |
- for (int i = 0; i < features.length(); i++) { |
- JSONObject feat = features.getJSONObject(i); |
- List<Bson> agg_match = null; |
- try { |
- agg_match = getMatch(feat, search_id); |
- } catch (IOException ex) { |
- throw new ServiceException(ex); |
- } |
- AggregateIterable<Document> c = collection.aggregate(agg_match); |
- for (Document locationDoc : c) { |
- JSONObject res = getResult(collection, locationDoc, start_date, end_date, metadata_only, compute_stats, compute_stats_local); |
- results.put(res); |
- } |
- } |
- } |
- close(); |
- } |
- |
- |
- private JSONObject getResult(MongoCollection<Document> c, Document locationDoc, Date start_date, Date end_date, |
- boolean metadata_only, boolean compute_stats, boolean compute_stats_local) throws JSONException { |
- String date_fmt = (String) locationDoc.get("date_fmt"); |
- if (date_fmt == null) { |
- date_fmt = "yyyy-MM-dd"; |
- } |
- SimpleDateFormat ft = new SimpleDateFormat(date_fmt); |
- ft.setTimeZone(TimeZone.getTimeZone("UTC")); |
- |
- List<String> header = (ArrayList) locationDoc.get("header"); |
- List<List<Object>> data = (ArrayList) locationDoc.get("data"); |
- List<List<Object>> filtered = new ArrayList(); |
- JSONObject statResult = new JSONObject(); |
- JSONObject statResultLocal = new JSONObject(); |
- |
- if (!metadata_only) { |
- Document datesQuery = new Document(); |
- if ((start_date != null || end_date != null)) { |
- datesQuery = new Document(); |
- if (start_date != null) { |
- datesQuery.append("$gte", start_date); |
- } |
- if (end_date != null) { |
- datesQuery.append("$lte", end_date); |
- } |
- datesQuery = new Document("data.0", datesQuery); |
- } |
- |
- Document project = new Document( |
- "date", new Document("$arrayElemAt", Arrays.asList("$data", 0)) |
- ); |
- for (int i=1; i<header.size(); i++) { |
- project.append(header.get(i), new Document("$arrayElemAt", Arrays.asList("$data", i))); |
- } |
- |
- Document group = new Document("_id", "big").append("data", new Document("$push", "$data")); |
- |
- // Export for robomongo |
- List<Bson> aggList = Arrays.asList( |
- Aggregates.match(new Document("location_id", locationDoc.getString("_id"))), |
- Aggregates.unwind("$data"), |
- //Aggregates.project(project) |
- Aggregates.match(datesQuery), |
- // Wind array back |
- new Document("$group", group) |
- ); |
- |
- AggregateIterable<Document> iter = c.aggregate(aggList).allowDiskUse(true); |
- for (Document d: iter) { |
- filtered = (List<List<Object>>) d.get("data"); |
- // Fix date formatting |
- for (List<Object> objList: filtered) { |
- Date dt = (Date)objList.get(0); |
- objList.set(0, ft.format(dt)); |
- } |
- } |
- |
- if (compute_stats || compute_stats_local) { |
- Document statsDoc = new Document("_id", null); |
- List<String> stats = Arrays.asList("min", "max", "avg", "stdDevPop"); |
- for (String stat: stats) { |
- for (int i=1; i<header.size(); i++) { |
- statsDoc.append(header.get(i) + "_" + stat, new Document("$" + stat, "$" + header.get(i))); |
- } |
- } |
- |
- aggList = Arrays.asList( |
- Aggregates.match(new Document("_id", locationDoc.getString("_id"))), |
- Aggregates.unwind("$data"), |
- Aggregates.match(datesQuery), |
- Aggregates.project(project), |
- new Document("$group", statsDoc) |
- ); |
- if (compute_stats) { |
- iter = c.aggregate(aggList).allowDiskUse(true); |
- for (Document d: iter) { |
- for (String stat: stats) { |
- for (int i=1; i<header.size(); i++) { |
- String key = header.get(i) + "_" + stat; |
- statResult.put(key, d.get(key)); |
- } |
- } |
- } |
- } |
- |
- // Add locally computed stats |
- if (compute_stats_local) { |
- for (List<Object> objList: filtered) { |
- for (int i=1; i<header.size(); i++) { |
- double[] vals = new double[filtered.size()]; |
- for (int j=0; j<filtered.size(); j++) { |
- vals[j] = (double) filtered.get(j).get(i); |
- } |
- for (String stat : stats) { |
- String key = header.get(i) + "_" + stat; |
- if ("min".equals(stat)) statResultLocal.put(key, oms3.util.Statistics.min(vals)); |
- if ("max".equals(stat)) statResultLocal.put(key, oms3.util.Statistics.max(vals)); |
- if ("avg".equals(stat)) statResultLocal.put(key, oms3.util.Statistics.mean(vals)); |
- if ("stdDevPop".equals(stat)) statResultLocal.put(key, oms3.util.Statistics.stddev(vals)); |
- } |
- } |
- } |
- } |
- } |
- } |
- |
- JSONObject res = new JSONObject(); |
- res.put("_id", locationDoc.get("_id")); |
- Document location = (Document) locationDoc.get("location"); |
- if (location != null) { |
- List<Double> coords = (List<Double>) location.get("coordinates"); |
- res.put("location", coords); |
- } |
- res.put("header", header); |
- if (!metadata_only) { |
- res.put("data", filtered); |
- } |
- Document metadata = (Document) locationDoc.get("metadata"); |
- res.put("metadata", new JSONObject(metadata.toJson())); |
- |
- if (compute_stats) { |
- res.put("statistics", statResult); |
- res.put("statistics_local", statResultLocal); |
- } |
- |
- return res; |
- } |
- |
- |
- public List<Bson> getMatch(JSONObject feat, String search_id) throws JSONException, IOException { |
- String geom = feat.getJSONObject("geometry").toString(); |
- Document match_arg = new Document("location", |
- new Document("$geoWithin", |
- new Document("$geometry", Document.parse(geom)) |
- ) |
- ); |
- if (search_id != null) { |
- // Check for object ID (will be a hexidecimal string) |
- boolean id_test = search_id.matches("\\p{XDigit}+"); |
- if (id_test) { |
- ObjectId search_objid = new ObjectId(search_id); |
- match_arg.put("_id", search_objid); |
- } else { |
- match_arg.put("_id", search_id); |
- } |
- } |
- Document new_match = new Document("$match", match_arg); |
- List<Bson> agg_match = new ArrayList<>(); |
- agg_match.add(new_match); |
- return agg_match; |
- } |
- |
- |
- public static void main(String[] args) throws FileNotFoundException, IOException, JSONException, ParseException, ServiceException { |
- V1_0 timeseries = new V1_0(); |
- JSONObject searchFeature = new JSONObject("{\n" |
- + " \"type\": \"FeatureCollection\",\n" |
- + " \"features\": [\n" |
- + " {\n" |
- + " \"geometry\": {\n" |
- + " \"type\": \"MultiPolygon\",\n" |
- + " \"coordinates\": [\n" |
- + " [\n" |
- + " [\n" |
- + " [\n" |
- + " -105.09928580027,\n" |
- + " 40.701406829918\n" |
- + " ],\n" |
- + " [\n" |
- + " -105.09447928172,\n" |
- + " 40.365324016903\n" |
- + " ],\n" |
- + " [\n" |
- + " -104.72849722611,\n" |
- + " 40.358522384503\n" |
- + " ],\n" |
- + " [\n" |
- + " -104.72437735306,\n" |
- + " 40.701927386425\n" |
- + " ],\n" |
- + " [\n" |
- + " -105.09928580027,\n" |
- + " 40.701406829918\n" |
- + " ]\n" |
- + " ]\n" |
- + " ]\n" |
- + " ]\n" |
- + " },\n" |
- + " \"type\": \"Feature\",\n" |
- + " \"properties\": {\n" |
- + " \"gid\": 1,\n" |
- + " \"name\": \"gid 1\"\n" |
- + " }\n" |
- + " }\n" |
- + " ]\n" |
- + " }"); |
- String search_id = null; |
- timeseries.run("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", searchFeature, search_id, "2014-04-04", "2014-05-08", false, true, false); |
- } |
+ public static void main(String[] args) throws FileNotFoundException, IOException, JSONException, ParseException, ServiceException { |
+ V1_0 timeseries = new V1_0(); |
+ JSONObject searchFeature = new JSONObject("{\n" |
+ + " \"type\": \"FeatureCollection\",\n" |
+ + " \"features\": [\n" |
+ + " {\n" |
+ + " \"geometry\": {\n" |
+ + " \"type\": \"MultiPolygon\",\n" |
+ + " \"coordinates\": [\n" |
+ + " [\n" |
+ + " [\n" |
+ + " [\n" |
+ + " -105.09928580027,\n" |
+ + " 40.701406829918\n" |
+ + " ],\n" |
+ + " [\n" |
+ + " -105.09447928172,\n" |
+ + " 40.365324016903\n" |
+ + " ],\n" |
+ + " [\n" |
+ + " -104.72849722611,\n" |
+ + " 40.358522384503\n" |
+ + " ],\n" |
+ + " [\n" |
+ + " -104.72437735306,\n" |
+ + " 40.701927386425\n" |
+ + " ],\n" |
+ + " [\n" |
+ + " -105.09928580027,\n" |
+ + " 40.701406829918\n" |
+ + " ]\n" |
+ + " ]\n" |
+ + " ]\n" |
+ + " ]\n" |
+ + " },\n" |
+ + " \"type\": \"Feature\",\n" |
+ + " \"properties\": {\n" |
+ + " \"gid\": 1,\n" |
+ + " \"name\": \"gid 1\"\n" |
+ + " }\n" |
+ + " }\n" |
+ + " ]\n" |
+ + " }"); |
+ String search_id = null; |
+ timeseries.run("mongodb://eds0.engr.colostate.edu:27017/csip_timeseries", "test_coll", searchFeature, search_id, "2014-04-04", "2014-05-08", false, true, false); |
+ } |
} |
@@ -3,7 +3,6 @@ |
* To change this template file, choose Tools | Templates |
* and open the template in the editor. |
*/ |
- |
package utils; |
|
import com.mongodb.client.MongoCollection; |
@@ -29,151 +28,150 @@ |
* |
* @author dave |
*/ |
+public class TimeseriesTable { |
|
+ public static String CHUNK_ALL = "CHUNK_ALL"; |
+ public static String CHUNK_YEAR = "CHUNK_YEAR"; |
+ public static String CHUNK_NONE = "CHUNK_NONE"; |
|
-public class TimeseriesTable { |
- public static String CHUNK_ALL = "CHUNK_ALL"; |
- public static String CHUNK_YEAR = "CHUNK_YEAR"; |
- public static String CHUNK_NONE = "CHUNK_NONE"; |
+ // Header is a list of strings (date, colname1, colname2... |
+ public List<String> header = new ArrayList(); |
+ // Data is a pair of [year, rows], where rows is an array of [dt, val1, val2...] |
+ public List<Pair<Integer, List<List<Object>>>> data = new ArrayList<>(); |
+ Comparator timeseriesCompare = (Comparator<List<Object>>) (List<Object> lhs, List<Object> rhs) -> { |
+ Date lhsDate = (Date) lhs.get(0); |
+ Date rhsDate = (Date) rhs.get(0); |
+ return lhsDate.compareTo(rhsDate); |
+ }; |
|
- // Header is a list of strings (date, colname1, colname2... |
- public List<String> header = new ArrayList(); |
- // Data is a pair of [year, rows], where rows is an array of [dt, val1, val2...] |
- public List<Pair<Integer, List<List<Object>>>> data = new ArrayList<>(); |
- Comparator timeseriesCompare = (Comparator<List<Object>>) (List<Object> lhs, List<Object> rhs) -> { |
- Date lhsDate = (Date)lhs.get(0); |
- Date rhsDate = (Date)rhs.get(0); |
- return lhsDate.compareTo(rhsDate); |
- }; |
- |
- public TimeseriesTable() { |
+ public TimeseriesTable() { |
+ } |
+ |
+ public TimeseriesTable(Document timeseriesDoc) throws ServiceException { |
+ List<String> doc_header = (List<String>) timeseriesDoc.get("header"); |
+ if (doc_header != null) { |
+ header.addAll(doc_header); |
+ //data.addAll((List<List<Object>>)timeseriesDoc.get("data")); |
+ throw new ServiceException("This version of the constructor is not implemented"); |
} |
- |
- public TimeseriesTable(Document timeseriesDoc) throws ServiceException { |
- List<String> doc_header = (List<String>)timeseriesDoc.get("header"); |
- if (doc_header != null) { |
- header.addAll(doc_header); |
- //data.addAll((List<List<Object>>)timeseriesDoc.get("data")); |
- throw new ServiceException("This version of the constructor is not implemented"); |
+ } |
+ |
+ public void readCSV(File dataFile, SimpleDateFormat ft, String dataCol) |
+ throws FileNotFoundException, IOException, ParseException, ServiceException { |
+ readCSV(dataFile, ft, dataCol, CHUNK_NONE); |
+ } |
+ |
+ /** |
+ * |
+ * @param dataFile |
+ * @param ft |
+ * @param dataCol If non-null, then only add data from this column |
+ * @param chunkStrat |
+ * @throws FileNotFoundException |
+ * @throws IOException |
+ * @throws ParseException |
+ */ |
+ public void readCSV(File dataFile, SimpleDateFormat ft, String dataCol, String chunkStrat) |
+ throws FileNotFoundException, IOException, ParseException, ServiceException { |
+ CSVReader reader = new CSVReader(new FileReader(dataFile.getAbsolutePath())); |
+ String[] nextLine; |
+ |
+ // Data is stored as array of arrays of [date, val1, val2] |
+ List<List<Object>> csvData = new ArrayList(); |
+ List<String> csvHeader = new ArrayList(); |
+ int idatacol = 0; |
+ while ((nextLine = reader.readNext()) != null) { |
+ if (csvHeader.isEmpty()) { |
+ csvHeader.add(nextLine[0]); |
+ for (int i = 1; i < nextLine.length; i++) { |
+ String headerItem = nextLine[i].trim(); |
+ if (dataCol == null) { |
+ csvHeader.add(headerItem); |
+ } else if (headerItem.equals(dataCol)) { |
+ idatacol = i; |
+ csvHeader.add(headerItem); |
+ } |
} |
- } |
- |
- public void readCSV(File dataFile, SimpleDateFormat ft, String dataCol) |
- throws FileNotFoundException, IOException, ParseException, ServiceException { |
- readCSV(dataFile, ft, dataCol, CHUNK_NONE); |
- } |
- |
- /** |
- * |
- * @param dataFile |
- * @param ft |
- * @param dataCol If non-null, then only add data from this column |
- * @param chunkStrat |
- * @throws FileNotFoundException |
- * @throws IOException |
- * @throws ParseException |
- */ |
- public void readCSV(File dataFile, SimpleDateFormat ft, String dataCol, String chunkStrat) |
- throws FileNotFoundException, IOException, ParseException, ServiceException { |
- CSVReader reader = new CSVReader(new FileReader(dataFile.getAbsolutePath())); |
- String[] nextLine; |
- |
- // Data is stored as array of arrays of [date, val1, val2] |
- List<List<Object>> csvData = new ArrayList(); |
- List<String> csvHeader = new ArrayList(); |
- int idatacol = 0; |
- while ((nextLine = reader.readNext()) != null) { |
- if (csvHeader.isEmpty()) { |
- csvHeader.add(nextLine[0]); |
- for (int i=1; i<nextLine.length; i++) { |
- String headerItem = nextLine[i].trim(); |
- if (dataCol == null) { |
- csvHeader.add(headerItem); |
- } else if (headerItem.equals(dataCol)) { |
- idatacol = i; |
- csvHeader.add(headerItem); |
- } |
- } |
- } |
- else { |
- List<Object> vals = new ArrayList(); |
- Date dt = ft.parse(nextLine[0]); |
- vals.add(dt); |
- for (int i=1; i<nextLine.length; i++) { |
- if (i == idatacol || idatacol == 0) { |
- vals.add(Double.parseDouble(nextLine[i])); |
- } |
- } |
- csvData.add(vals); |
- } |
+ } else { |
+ List<Object> vals = new ArrayList(); |
+ Date dt = ft.parse(nextLine[0]); |
+ vals.add(dt); |
+ for (int i = 1; i < nextLine.length; i++) { |
+ if (i == idatacol || idatacol == 0) { |
+ vals.add(Double.parseDouble(nextLine[i])); |
+ } |
} |
- |
- header = csvHeader; |
- add(csvData, chunkStrat); |
+ csvData.add(vals); |
+ } |
} |
|
- /** |
- * |
- * @param newHeader |
- * @param newData |
- * @param chunkStrat |
- * @throws csip.ServiceException |
- */ |
- public void add(List<List<Object>> newData, String chunkStrat) throws ServiceException { |
- if (chunkStrat.equals(CHUNK_ALL)) { |
- // Single document mode |
- addYearData(0, newData); |
- } else if (chunkStrat.equals(CHUNK_NONE)) { |
- // Every timeseries item is a document. Index is from 0..n-1 |
- for (int i=0; i<newData.size(); i++) { |
- List<List<Object>> rows = new ArrayList<>(); |
- rows.add(newData.get(i)); |
- addYearData(i, rows); |
- } |
- } else if (chunkStrat.equals(CHUNK_YEAR)) { |
- // Group data array into years |
- int year = 0; |
- List<List<Object>> yearRows = null; |
- for (List<Object> row : newData) { |
- Date date = (Date)row.get(0); |
- LocalDate localDate = date.toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); |
- int this_year = localDate.getYear(); |
- if (this_year != year) { |
- // Year changed, add the pair of year and current list of rows. |
- addYearData(year, yearRows); |
- year = this_year; |
- yearRows = new ArrayList<>(); |
- } |
- yearRows.add(row); |
- } |
- // Add last group of years |
- addYearData(year, yearRows); |
- } else { |
- throw new ServiceException("Invalid chunk strategy " + chunkStrat); |
+ header = csvHeader; |
+ add(csvData, chunkStrat); |
+ } |
+ |
+ /** |
+ * |
+ * @param newHeader |
+ * @param newData |
+ * @param chunkStrat |
+ * @throws csip.ServiceException |
+ */ |
+ public void add(List<List<Object>> newData, String chunkStrat) throws ServiceException { |
+ if (chunkStrat.equals(CHUNK_ALL)) { |
+ // Single document mode |
+ addYearData(0, newData); |
+ } else if (chunkStrat.equals(CHUNK_NONE)) { |
+ // Every timeseries item is a document. Index is from 0..n-1 |
+ for (int i = 0; i < newData.size(); i++) { |
+ List<List<Object>> rows = new ArrayList<>(); |
+ rows.add(newData.get(i)); |
+ addYearData(i, rows); |
+ } |
+ } else if (chunkStrat.equals(CHUNK_YEAR)) { |
+ // Group data array into years |
+ int year = 0; |
+ List<List<Object>> yearRows = null; |
+ for (List<Object> row : newData) { |
+ Date date = (Date) row.get(0); |
+ LocalDate localDate = date.toInstant().atZone(ZoneId.of("UTC")).toLocalDate(); |
+ int this_year = localDate.getYear(); |
+ if (this_year != year) { |
+ // Year changed, add the pair of year and current list of rows. |
+ addYearData(year, yearRows); |
+ year = this_year; |
+ yearRows = new ArrayList<>(); |
} |
+ yearRows.add(row); |
+ } |
+ // Add last group of years |
+ addYearData(year, yearRows); |
+ } else { |
+ throw new ServiceException("Invalid chunk strategy " + chunkStrat); |
} |
- |
- public void addYearData(Integer year, List<List<Object>> rows) { |
- if (rows != null && rows.size() > 0) { |
- MutablePair<Integer, List<List<Object>>> pair = new MutablePair<>(year, rows); |
- data.add(pair); |
- } |
+ } |
+ |
+ public void addYearData(Integer year, List<List<Object>> rows) { |
+ if (rows != null && rows.size() > 0) { |
+ MutablePair<Integer, List<List<Object>>> pair = new MutablePair<>(year, rows); |
+ data.add(pair); |
} |
- |
- /** |
- * Insert the data into mongodb. |
- * @param locationDoc The organizing document with location info |
- */ |
- public void insertData(MongoCollection collection, Document locationDoc) { |
- // Create a document with each chunk |
- for (Pair<Integer, List<List<Object>>> chunk : data) { |
- int year = chunk.getLeft(); |
- List<List<Object>> rows = chunk.getRight(); |
- Document chunkDoc = new Document() |
- .append("location_id", locationDoc.get("_id")) |
- .append("year", year) |
- .append("data", rows); |
- collection.insertOne(chunkDoc); |
- } |
+ } |
+ |
+ /** |
+ * Insert the data into mongodb. |
+ * |
+ * @param locationDoc The organizing document with location info |
+ */ |
+ public void insertData(MongoCollection collection, Document locationDoc) { |
+ // Create a document with each chunk |
+ for (Pair<Integer, List<List<Object>>> chunk : data) { |
+ int year = chunk.getLeft(); |
+ List<List<Object>> rows = chunk.getRight(); |
+ Document chunkDoc = new Document() |
+ .append("location_id", locationDoc.get("_id")) |
+ .append("year", year) |
+ .append("data", rows); |
+ collection.insertOne(chunkDoc); |
} |
+ } |
} |