V1_0.java [src/java/m/lamps] Revision: default Date:
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package m.lamps;
import com.vividsolutions.jts.geom.Geometry;
import csip.Config;
import csip.ModelDataService;
import csip.api.client.ModelDataServiceCall;
import csip.api.server.ServiceException;
import csip.annotations.Options;
import csip.annotations.Resource;
import static csip.annotations.ResourceType.OUTPUT;
import csip.utils.Parallel;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import javax.ws.rs.Path;
import lamps.CSIP_Const;
import lamps.io.FormattedOutput;
import lamps.io.GeometryInput;
import lamps.utils.Partition;
import methods.CSIP_NASS_GeoTIFF;
import methods.CSIP_NASS_LMOD;
import methods.CSIP_NASS_LMOD_Matching;
import methods.CSIP_Read_AND_SPLIT;
import methods.objects.ERU;
import methods.objects.Nass_results;
import csip.annotations.Description;
import csip.annotations.Name;
import java.util.logging.Level;
import org.apache.commons.io.FileUtils;
/**
* LAMPS service.
*
* C:\>curl -X POST -H "Accept:application/json"
* "http://localhost:8080/csip-lamps/m/lamps/1.0" -F param=@c:/scott-req.json -F
* file1=@c:/scott.kmz
*
* @author od
*/
@Name("LAMPS")
@Description("LAMPS")
@Path("m/lamps/1.0")
@Resource(file = "*.tif *.csv *.kml *.kmz", type = OUTPUT)
@Options(timeout = "PT24H")
public class V1_0 extends ModelDataService {
static final String KEY_GEOM = "geometry";
static final String KEY_DELTA = "delta";
static final String KEY_STARTYEAR = "start_year";
static final String KEY_ENDYEAR = "end_year";
static final String KEY_CMZs = "all_CMZs";
static final String KEY_AGESFILES = "ages_files";
static final String KEY_GEOTIFFFILES = "geotiffs";
static final String KEY_CUSTOM = "custom";
static final String KEY_MULTIRUN = "multi_run";
static final String KEY_CDLSTARTYEAR = "cdl_start";
static final String KEY_CDLENDYEAR = "cdl_end";
static final int SERVICE_PEERS = Config.getInt("csip.service.peers");
static final File DATA_DIR = new File(Config.getString("csip.data.dir"));
double difference;
double area_stats[];
String total_time;
String stats[];
List<Double> polyCount = new ArrayList<>();
List<Double> areaChunk = new ArrayList<>();
List<String> CDL_years = new ArrayList<>();
int start_year = 2008;
int end_year = 2023;
int cdl_start_year = 2008;
int cdl_end_year = CSIP_Const.currentNASSyear;
int multi_run = -1;
int chunks;
boolean custom;
boolean allcmzs;
double delta;
@Override
protected void doProcess() throws Exception {
if (parameter().has(KEY_STARTYEAR) && parameter().has(KEY_ENDYEAR)) {
start_year = parameter().getInt(KEY_STARTYEAR);
end_year = parameter().getInt(KEY_ENDYEAR);
if (end_year <= start_year) {
throw new ServiceException("Invalid start_year/end_year");
}
if (start_year < 1900) {
throw new ServiceException("Invalid start_year.");
}
if (end_year > 2101) {
throw new ServiceException("Invalid end_year.");
}
}
delta = parameter().getDouble(KEY_DELTA, CSIP_Const.Accuracy_Delta);
custom = parameter().getBoolean(KEY_CUSTOM, false);
multi_run = parameter().getInt(KEY_MULTIRUN, -1);
allcmzs = parameter().getBoolean(KEY_CMZs, false);
boolean ages_files = parameter().getBoolean(KEY_AGESFILES, false);
boolean geotiffs = parameter().getBoolean(KEY_GEOTIFFFILES, false);
if (parameter().has(KEY_CDLSTARTYEAR)) {
CDL_years.add(parameter().getString(KEY_CDLSTARTYEAR));
}
if (parameter().has(KEY_CDLENDYEAR)) {
cdl_end_year = Integer.parseInt(parameter().getString(KEY_CDLENDYEAR));
}
String file = parameter().getString(KEY_GEOM);
run(file, workspace().getDir(), DATA_DIR, workspace().getDir(), ages_files, geotiffs, multi_run, allcmzs);
}
/**
* run lamps.
*
* @param geometry
* @param outputDir
* @param dataDir
* @param workspace
* @param delta
* @param start_year
* @param end_year
* @param ages_switch
* @throws Exception
*/
private void run(String geometry, File outputDir, File dataDir, File workspace,
boolean ages_files, boolean geotiffs, int multi_run, boolean cmzs) throws Exception {
difference = 0;
long start_time = System.currentTimeMillis();
if (multi_run == -1) {
File CDLdataDirlocation = new File(dataDir, Config.getString("lamps.cdl_layer_dir", CSIP_Const.CDL_LAYER_DIR)); // change for linux!!!!!!!!!!!!!
List<Geometry> Input_Geometries_pre = new ArrayList<>();
List<Geometry> envelope = new ArrayList<>();
List<ERU> hrus_pre = new ArrayList<>();
List<String> unique_states = new ArrayList<>();
List<String> unique_cmzs = new ArrayList<>();
List<ERU> hrus_all = new ArrayList<>();
HashMap<Integer, ArrayList<Nass_results>> nass_data_result_all = new HashMap<>();
//PrintWriter w_log = new PrintWriter(new OutputStreamWriter(System.out));
LOG.info("===> Processing entities.");
URL geomUrl = new File(workspace, geometry).toURI().toURL();
LOG.info("===> Reading AOI");
hrus_pre = GeometryInput.read(dataDir, geomUrl, Input_Geometries_pre, workspace, unique_states, unique_cmzs, envelope, cmzs, LOG);
ArrayList<ArrayList<String>> LMOD_CMZ_Data = new ArrayList<>();
HashMap<String, List<ArrayList<String>>> DB_LMOD_map = new HashMap<>();
CSIP_NASS_LMOD_Matching.query_LAMPS_DB_Rotations_For_CMZ(dataDir, unique_cmzs, LMOD_CMZ_Data, DB_LMOD_map, LOG);
//start here for the sequential run based on 10000 HRUs
List<List<ERU>> tenk_lists = new ArrayList<>();
List<List<Geometry>> tenk_geo_lists = new ArrayList<>();
int hrus_pre_count = hrus_pre.size();
boolean standalone = false;
if (hrus_pre_count > 2000 && standalone) {
int end = hrus_pre.size() - 1;
int ten_times = (end + 1) / 1000;
int rest = (end + 1) - ten_times * 1000;
if (rest > 0) {
ten_times++;
}
LOG.info("===> times : " + ten_times);
tenk_lists = Partition.chopIntoParts(hrus_pre, ten_times);
tenk_geo_lists = Partition.chopIntoParts(Input_Geometries_pre, ten_times);
} else {
tenk_lists.add(new ArrayList<>(hrus_pre));
tenk_geo_lists.add(new ArrayList<>(Input_Geometries_pre));
}
hrus_pre.clear();
Input_Geometries_pre.clear();
LOG.info("===> Check available NASS years.");
if (CDL_years.isEmpty()) {
CDL_years = CSIP_NASS_GeoTIFF.NASS_CDL_YEARS_CHECK(dataDir, unique_states);
} else {
if (Integer.parseInt(CDL_years.get(0)) >= 1997
&& Integer.parseInt(CDL_years.get(0)) <= CSIP_Const.currentNASSyear) {
cdl_start_year = Integer.parseInt(CDL_years.get(0));
CDL_years.clear();
//while (counter <= CSIP_Const.currentNASSyear) {
int counter_year = cdl_start_year;
while (counter_year <= cdl_end_year) {
CDL_years.add("" + counter_year++);
}
} else {
for (int i = 0; i < CSIP_Const.ACC_years.length; i++) {
CDL_years.add(CSIP_Const.ACC_years[i]);
}
}
}
Collections.sort(CDL_years);
if (LOG.isLoggable(Level.INFO)) {
LOG.info(" Available NASS CDL year(s) for State with longest time period : ");
for (String str : CDL_years) {
LOG.info(str + ", ");
}
LOG.info(" ");
LOG.info("===> Irrigation clipping.");
}
HashMap<Integer, ArrayList<HashMap<Integer, Double>>> irrigation_map = new HashMap<>();
int helper = 0;
for (List<Geometry> tenk_geo_list : tenk_geo_lists) {
List<ERU> hrus = tenk_lists.get(helper);
HashMap<Integer, ArrayList<HashMap<Integer, Double>>> irrigation_map_in = CSIP_NASS_GeoTIFF.ConcurrentIrrigation_Overlay(dataDir, tenk_geo_list, hrus.get(0).ID, CDL_years.get(0), LOG);
irrigation_map.putAll(irrigation_map_in);
irrigation_map_in.clear();
helper++;
}
LOG.info("===> Irrigation clipping done.");
LOG.info(" ");
LOG.info("===> number of list parts : " + tenk_lists.size());
int number_of_chopParts = 0;
for (int numberLists = 0; numberLists < tenk_lists.size(); numberLists++) {
number_of_chopParts += tenk_lists.get(numberLists).size();
LOG.info(" ** ");
LOG.info("===> total number of geometries in loop : " + number_of_chopParts);
LOG.info(" ** ");
List<ERU> hrus = tenk_lists.get(numberLists);
List<Geometry> input_Geometries = tenk_geo_lists.get(numberLists);
LOG.info("===> NASS clipping.");
HashMap<Integer, ArrayList<Nass_results>> nass_data_result
= CSIP_NASS_GeoTIFF.NASSGeoTIFF_clip_analysis(dataDir, CDLdataDirlocation, outputDir, CDL_years, input_Geometries, hrus, unique_states, geotiffs, LOG);
for (int i = 0; i < hrus.size(); i++) {
int HRUID = hrus.get(i).ID;
hrus.get(i).nass_list_data = nass_data_result.get(i);
double irri_area = 0;
if (irrigation_map.get(HRUID) != null) {
for (int j = 0; j < irrigation_map.get(HRUID).size(); j++) {
for (Integer keys : irrigation_map.get(HRUID).get(j).keySet()) {
if (2017 == keys && irrigation_map.get(HRUID).get(j).get(keys) > 40) {
irri_area = irrigation_map.get(HRUID).get(j).get(keys);
hrus.get(i).irrigated = true;
hrus.get(i).irrigated_area = irri_area;
//LOG.info(hrus.get(i).ID + " 2017 irrigated ");
break;
}
}
}
}
}
LOG.info("===> Done NASS clipping main file.");
nass_data_result_all.putAll(nass_data_result);
nass_data_result.clear();
LOG.info("===> Going to the Confidence.");
CSIP_NASS_LMOD.NASS_data_CropSeq(dataDir, hrus, delta, LOG);
LOG.info("===> Done Confidence.");
LOG.info("===> Going to the Matching.");
HashMap<Integer, ArrayList<String[]>> map
= CSIP_NASS_LMOD_Matching.ConcurrentMatching(dataDir, hrus, CDL_years, LMOD_CMZ_Data, DB_LMOD_map, start_year, end_year, LOG);
LOG.info("===> Done with the Matching.");
map.clear();
hrus_all.addAll(hrus);
hrus.clear();
}
LMOD_CMZ_Data.clear();
LOG.info("===> Output generation.");
CSIP_NASS_LMOD.NASS_data_check(outputDir, hrus_all, nass_data_result_all, LOG);
nass_data_result_all.clear();
FormattedOutput.writeNASS(outputDir, hrus_all, LOG);
CSIP_NASS_LMOD_Matching.AOI_NASS_LMOD_generic_match_data_writer(outputDir, hrus_all, irrigation_map, delta, LOG);
irrigation_map.clear();
ArrayList<String> Unique_LMOD_OPKEYs = new ArrayList<>();
ArrayList<String> Unique_LMOD_ManKEYs = new ArrayList<>();
HashMap<String, HashMap<Integer, ArrayList<String[]>>> managements = new HashMap<>();
HashMap<String, ArrayList<String[]>> DB_map_managements = new HashMap<>();
HashMap<String, String> OP_map_costum = new HashMap<>();
HashMap<String, String> LMOD_AGES_CID_map = new HashMap<>();
HashMap<String, String> LMOD_AGES_MID_map = new HashMap<>();
HashMap<String, String> only_veg_map = new HashMap<>();
HashMap<String, String> only_landuse_map = new HashMap<>();
HashMap<String, Integer> OP_Till_Man = new HashMap<>();
HashMap<String, String[]> OP_map_tillage = new HashMap<>();
HashMap<String, String[]> DB_map_managements_acr = new HashMap<>();
CSIP_NASS_LMOD_Matching.AOI_NASS_DETECTED_LMODDATA_PREPARATION(dataDir, hrus_all,
DB_map_managements, Unique_LMOD_OPKEYs, Unique_LMOD_ManKEYs, managements,
OP_map_costum, DB_map_managements_acr, OP_map_tillage, LMOD_AGES_CID_map,
only_veg_map, only_landuse_map, LOG);
if (custom) {
CSIP_NASS_LMOD_Matching.AOI_NASS_LMODDATA_costum_generic_writer(outputDir,
hrus_all, DB_map_managements, managements, OP_map_costum,
DB_map_managements_acr, LMOD_AGES_MID_map, only_veg_map,
start_year, end_year, LOG);
}
managements.clear();
OP_map_costum.clear();
if (ages_files) {
LOG.info("===> Output generation. AgES-W input files");
FormattedOutput.writeAGESTillageFile(outputDir, OP_map_tillage, OP_Till_Man);
FormattedOutput.writeAGESManagementFile(outputDir, hrus_all, OP_Till_Man,
DB_map_managements, DB_map_managements_acr, LMOD_AGES_CID_map,
LMOD_AGES_MID_map, only_veg_map, only_landuse_map);
}
} else {
LOG.info("===> Processing entities for splitting.");
URL geomUrl = new File(workspace, geometry).toURI().toURL();
LOG.info("===> Reading AOI");
chunks = CSIP_Read_AND_SPLIT.URL_Read_Geometry_File_new(geomUrl, workspace, multi_run, polyCount, areaChunk, LOG);
LOG.info("slice file into " + chunks + " chunks");
List<Long> cpu_time = Collections.synchronizedList(new ArrayList<>(chunks));
long timestart = System.currentTimeMillis();
File orgFileName = new File(geomUrl.toURI());
String fileName_pre = orgFileName.getName();
fileName_pre = fileName_pre.substring(0, fileName_pre.lastIndexOf(".") + 1);
fileName_pre = fileName_pre.replace(".", "");
final String fileName = fileName_pre;
Parallel.run(SERVICE_PEERS, chunks, (Integer i) -> () -> {
long timestart_i = System.currentTimeMillis();
ModelDataServiceCall mds = new ModelDataServiceCall()
.put(KEY_GEOM, "SplitKML_" + i + "_" + fileName + ".kmz")
.put(KEY_DELTA, "" + delta)
.put(KEY_CDLSTARTYEAR, "" + cdl_start_year)
.put(KEY_CDLENDYEAR, "" + cdl_end_year)
.put(KEY_MULTIRUN, "-1")
.url(request().getURL())
.withTimeout(14400)
.asAsync() // call async
.attach(new File(outputDir, "SplitKML_" + i + "_" + fileName + ".kmz"))
.call();
if (mds.serviceFinished()) {
mds.download(CSIP_Const.AOI_NASS_dominant_result, workspace().getFile(CSIP_Const.AOI_NASS_dominant_result + "_" + i + ".csv"));
mds.download(CSIP_Const.AOI_NASS_LMOD_generic_result, workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result + "_" + i + ".csv"));
mds.download(CSIP_Const.AOI_NASS_Plain_result, workspace().getFile(CSIP_Const.AOI_NASS_Plain_result + "_" + i + ".csv"));
mds.download(CSIP_Const.AOI_LAMPS_CONF_result, workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result + "_" + i + ".csv"));
if (custom) {
mds.download(CSIP_Const.AOI_NASS_LMOD_result_custom, workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom + "_" + i + ".csv"));
}
} else {
throw new ServiceException("service error: " + mds.getError());
}
long timeend_i = System.currentTimeMillis();
cpu_time.add(timeend_i - timestart_i);
LOG.info("DONE !! run #" + i + " of " + chunks);
});
long timeend = System.currentTimeMillis();
total_time = Duration.ofMillis(timeend - timestart).toString();
stats = statsTime(cpu_time);
area_stats = stats(areaChunk);
}
LOG.info("Finished.");
long end_time = System.currentTimeMillis();
difference = (end_time - start_time) / 1000;
LOG.info("===> Total LAMPS application time : " + difference + " s");
}
@Override
protected void postProcess() throws IOException {
results().put("application time", " " + difference + " s");
if (multi_run > -1) {
// assemble the files back again only if we have to reduce.
combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), chunks);
if (custom) {
combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), chunks);
}
combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), chunks);
combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), chunks);
combineFiles(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), chunks);
results().put("multi_run", multi_run);
results().put("chunks", chunks);
results().put("real_time", total_time);
results().put("min_time", stats[0]);
results().put("max_time", stats[1]);
results().put("avg_time", stats[2]);
results().put("total_time", stats[3]);
results().put("min_area", area_stats[0]);
results().put("max_area", area_stats[1]);
results().put("total_area", area_stats[3]);
for (int i = 0; i < polyCount.size(); i++) {
results().put("#ofPoly" + i, polyCount.get(i));
results().put("avgChunkArea" + i, (areaChunk.get(i) / polyCount.get(i)));
results().put("ChunkArea" + i, areaChunk.get(i));
}
}
results().put(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), "output file1");
if (custom) {
results().put(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), "output file2");
}
results().put(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), "output file3");
results().put(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), "output file4");
results().put(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), "output file5");
//delFiles(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), chunks);
// if (custom) {
// delFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), chunks);
// }
// delFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), chunks);
// delFiles(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), chunks);
// delFiles(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), chunks);
}
private String[] statsTime(List<Long> list) {
if (list.isEmpty()) {
return new String[]{
Duration.ofMillis(0).toString(),
Duration.ofMillis(0).toString(),
Duration.ofMillis(0).toString(),
Duration.ofMillis(0).toString()
};
}
long max = Long.MIN_VALUE;
long min = Long.MAX_VALUE;
long sum = 0;
for (long l : list) {
sum += l;
if (l > max) {
max = l;
}
if (l < min) {
min = l;
}
}
return new String[]{
Duration.ofMillis(min).toString(),
Duration.ofMillis(max).toString(),
Duration.ofMillis(sum / list.size()).toString(),
Duration.ofMillis(sum).toString()
};
}
private double[] stats(List<Double> list) {
if (list.isEmpty()) {
return new double[]{0, 0, 0, 0};
}
double max = Double.MIN_VALUE;
double min = Double.MAX_VALUE;
double sum = 0;
for (double l : list) {
sum += l;
if (l > max) {
max = l;
}
if (l < min) {
min = l;
}
}
return new double[]{min, max, (sum / list.size()), sum};
}
private static void combineFiles(File out, int chunks) throws IOException {
for (int i = 0; i < chunks; i++) {
File of = new File(out.getParentFile(), out.getName() + "_" + i + ".csv");
FileUtils.copyFile(of, FileUtils.openOutputStream(out, true));
}
}
// private void delFiles(File out, int chunks) throws IOException {
// for (int i = 0; i < chunks; i++) {
// //File of1 = new File(workspace().getDir(), CSIP_Const.AOI_NASS_dominant_result + "-" + i);//
// File of = new File(workspace().getDir(), out.getName() + "_" + i + ".csv");
// LOG.info(" Deleting ." + i + " " + of.getAbsolutePath());
// of.delete();
// if (of.exists()) {
// LOG.info(" OH NO : " + of.getName());
// }
// }
// }
}