Displaying differences for changeset
 
display as  

src/java/lamps/utils/Partition.java

@@ -16,13 +16,14 @@
 public class Partition {
 
     public static int getThreadCount(int size) {
-        if (Config.hasProperty("lamps.threads")) {
-            return Config.getInt("lamps.threads");
+        if (Config.hasProperty("csip.lamps.threads")) {
+            return Config.getInt("csip.lamps.threads");
         }
         int threadnr = 1;
-        if (size > 30) {
-            threadnr = Runtime.getRuntime().availableProcessors() + 1;
-        } else if (size < 6) {
+        //if (size > 30) {
+        //    threadnr = Runtime.getRuntime().availableProcessors() + 1;
+        //} else 
+        if (size < 6) {
             threadnr = 1;
         } else {
             threadnr = 2;
@@ -30,7 +31,6 @@
         return threadnr;
     }
 
-
     public static <T> List<List<T>> chopIntoParts(final List<T> ls, final int iParts) {
         final List<List<T>> lsParts = new ArrayList<>();
         final int iChunkSize = ls.size() / iParts;

src/java/m/lamps/V1_0.java

@@ -53,367 +53,364 @@
 @Options(timeout = "PT12H")
 public class V1_0 extends ModelDataService {
 
-  static final String KEY_GEOM = "geometry";
-  static final String KEY_DELTA = "delta";
-  static final String KEY_STARTYEAR = "start_year";
-  static final String KEY_ENDYEAR = "end_year";
-  static final String KEY_CMZs = "all_CMZs";
-  static final String KEY_AGESFILES = "ages_files";
-  static final String KEY_GEOTIFFFILES = "geotiffs";
-  static final String KEY_CUSTOM = "custom";
-  static final String KEY_MULTIRUN = "multi_run";
-  static final String KEY_CDLSTARTYEAR = "cdl_start";
+    static final String KEY_GEOM = "geometry";
+    static final String KEY_DELTA = "delta";
+    static final String KEY_STARTYEAR = "start_year";
+    static final String KEY_ENDYEAR = "end_year";
+    static final String KEY_CMZs = "all_CMZs";
+    static final String KEY_AGESFILES = "ages_files";
+    static final String KEY_GEOTIFFFILES = "geotiffs";
+    static final String KEY_CUSTOM = "custom";
+    static final String KEY_MULTIRUN = "multi_run";
+    static final String KEY_CDLSTARTYEAR = "cdl_start";
 
-  File dataDir = new File(Config.getString("csip.data.dir"));
-  static final Integer parallelThreadsForChunkFeature = Integer.parseInt(Config.getString("csip.service.peers"));
+    File dataDir = new File(Config.getString("csip.data.dir"));
+    static final Integer parallelThreadsForChunkFeature = Integer.parseInt(Config.getString("csip.service.peers"));
 
-  double difference;
-  double area_stats[];
+    double difference;
+    double area_stats[];
 
-  String total_time;
-  String stats[];
+    String total_time;
+    String stats[];
 
-  List<Double> polyCount = new ArrayList<>();
-  List<Double> areaChunk = new ArrayList<>();
-  List<String> CDL_years = new ArrayList<>();
+    List<Double> polyCount = new ArrayList<>();
+    List<Double> areaChunk = new ArrayList<>();
+    List<String> CDL_years = new ArrayList<>();
 
-  int start_year = 2008;
-  int end_year = 2020;
+    int start_year = 2008;
+    int end_year = 2020;
 
-  int multi_run = -1;
-  int chunks;
-  boolean custom = false;
-  boolean allcmzs = false;
+    int multi_run = -1;
+    int chunks;
+    boolean custom = false;
+    boolean allcmzs = false;
 
-  double delta = CSIP_Const.Accuracy_Delta;
+    double delta = CSIP_Const.Accuracy_Delta;
 
+    @Override
+    protected void doProcess() throws Exception {
+        String file = parameter().getString(KEY_GEOM);
 
-  @Override
-  protected void doProcess() throws Exception {
-    String file = parameter().getString(KEY_GEOM);
+        boolean ages_files = false;
+        boolean geotiffs = false;
 
-    boolean ages_files = false;
-    boolean geotiffs = false;
+        if (parameter().has(KEY_DELTA)) {
+            delta = parameter().getDouble(KEY_DELTA);
+        }
+        if (parameter().has(KEY_STARTYEAR) && parameter().has(KEY_ENDYEAR)) {
+            start_year = parameter().getInt(KEY_STARTYEAR);
+            end_year = parameter().getInt(KEY_ENDYEAR);
+            if (end_year <= start_year) {
+                throw new ServiceException("Invalid start_year/end_year");
+            }
+            if (start_year < 1900) {
+                throw new ServiceException("Invalid start_year.");
+            }
+            if (end_year > 2101) {
+                throw new ServiceException("Invalid end_year.");
+            }
+        }
+        if (parameter().has(KEY_AGESFILES)) {
+            ages_files = parameter().getBoolean(KEY_AGESFILES);
+        }
+        if (parameter().has(KEY_GEOTIFFFILES)) {
+            geotiffs = parameter().getBoolean(KEY_GEOTIFFFILES);
+        }
+        if (parameter().has(KEY_CUSTOM)) {
+            custom = parameter().getBoolean(KEY_CUSTOM);
+        }
+        if (parameter().has(KEY_MULTIRUN)) {
+            multi_run = parameter().getInt(KEY_MULTIRUN);
+        }
+        if (parameter().has(KEY_CMZs)) {
+            allcmzs = parameter().getBoolean(KEY_CMZs);
+        }
+        if (parameter().has(KEY_CDLSTARTYEAR)) {
+            CDL_years.add(parameter().getString(KEY_CDLSTARTYEAR));
+        }
 
-    if (parameter().has(KEY_DELTA)) {
-      delta = parameter().getDouble(KEY_DELTA);
-    }
-    if (parameter().has(KEY_STARTYEAR) && parameter().has(KEY_ENDYEAR)) {
-      start_year = parameter().getInt(KEY_STARTYEAR);
-      end_year = parameter().getInt(KEY_ENDYEAR);
-      if (end_year <= start_year) {
-        throw new ServiceException("Invalid start_year/end_year");
-      }
-      if (start_year < 1900) {
-        throw new ServiceException("Invalid start_year.");
-      }
-      if (end_year > 2101) {
-        throw new ServiceException("Invalid end_year.");
-      }
-    }
-    if (parameter().has(KEY_AGESFILES)) {
-      ages_files = parameter().getBoolean(KEY_AGESFILES);
-    }
-    if (parameter().has(KEY_GEOTIFFFILES)) {
-      geotiffs = parameter().getBoolean(KEY_GEOTIFFFILES);
-    }
-    if (parameter().has(KEY_CUSTOM)) {
-      custom = parameter().getBoolean(KEY_CUSTOM);
-    }
-    if (parameter().has(KEY_MULTIRUN)) {
-      multi_run = parameter().getInt(KEY_MULTIRUN);
-    }
-    if (parameter().has(KEY_CMZs)) {
-      allcmzs = parameter().getBoolean(KEY_CMZs);
-    }
-    if (parameter().has(KEY_CDLSTARTYEAR)) {
-      CDL_years.add(parameter().getString(KEY_CDLSTARTYEAR));
+        run(file, workspace().getDir(), dataDir, workspace().getDir(), ages_files, geotiffs, multi_run, allcmzs);
     }
 
-    run(file, workspace().getDir(), dataDir, workspace().getDir(), ages_files, geotiffs, multi_run, allcmzs);
-  }
+    /**
+     * run lamps.
+     *
+     * @param geometry
+     * @param outputDir
+     * @param dataDir
+     * @param workspace
+     * @param delta
+     * @param start_year
+     * @param end_year
+     * @param ages_switch
+     * @throws Exception
+     */
+    private void run(String geometry, File outputDir, File dataDir, File workspace,
+            boolean ages_files, boolean geotiffs, Integer multi_run, boolean cmzs) throws Exception {
 
+        difference = 0;
+        long start_time = System.currentTimeMillis();
 
-  /**
-   * run lamps.
-   *
-   * @param geometry
-   * @param outputDir
-   * @param dataDir
-   * @param workspace
-   * @param delta
-   * @param start_year
-   * @param end_year
-   * @param ages_switch
-   * @throws Exception
-   */
-  private void run(String geometry, File outputDir, File dataDir, File workspace,
-      boolean ages_files, boolean geotiffs, Integer multi_run, boolean cmzs) throws Exception {
+        if (multi_run == -1) {
 
-    difference = 0;
-    long start_time = System.currentTimeMillis();
+            File CDLdataDirlocation = new File(dataDir, Config.getString("lamps.cdl_layer_dir", CSIP_Const.CDL_LAYER_DIR)); // change for linux!!!!!!!!!!!!!
 
-    if (multi_run == -1) {
+            List<Geometry> Input_Geometries_pre = new ArrayList<>();
+            List<Geometry> envelope = new ArrayList<>();
+            List<ERU> hrus_pre = new ArrayList<>();
+            List<String> unique_states = new ArrayList<>();
+            List<String> unique_cmzs = new ArrayList<>();
+            List<ERU> hrus_all = new ArrayList<>();
+            HashMap<Integer, ArrayList<Nass_results>> nass_data_result_all = new HashMap<>();
 
-      File CDLdataDirlocation = new File(dataDir, Config.getString("lamps.cdl_layer_dir", CSIP_Const.CDL_LAYER_DIR)); // change for linux!!!!!!!!!!!!!
+            //PrintWriter w_log = new PrintWriter(new OutputStreamWriter(System.out));
+            LOG.info("===>  Processing entities.");
 
-      List<Geometry> Input_Geometries_pre = new ArrayList<>();
-      List<Geometry> envelope = new ArrayList<>();
-      List<ERU> hrus_pre = new ArrayList<>();
-      List<String> unique_states = new ArrayList<>();
-      List<String> unique_cmzs = new ArrayList<>();
-      List<ERU> hrus_all = new ArrayList<>();
-      HashMap<Integer, ArrayList<Nass_results>> nass_data_result_all = new HashMap<>();
+            URL geomUrl = new File(workspace, geometry).toURI().toURL();
 
-      //PrintWriter w_log = new PrintWriter(new OutputStreamWriter(System.out));
-      LOG.info("===>  Processing entities.");
+            LOG.info("===>  Reading AOI");
 
-      URL geomUrl = new File(workspace, geometry).toURI().toURL();
+            hrus_pre = GeometryInput.read(dataDir, geomUrl, Input_Geometries_pre, workspace, unique_states, unique_cmzs, envelope, cmzs, LOG);
 
-      LOG.info("===>  Reading AOI");
+            ArrayList<ArrayList<String>> LMOD_CMZ_Data = new ArrayList<>();
+            HashMap<String, List<ArrayList<String>>> DB_LMOD_map = new HashMap<>();
 
-      hrus_pre = GeometryInput.read(dataDir, geomUrl, Input_Geometries_pre, workspace, unique_states, unique_cmzs, envelope, cmzs, LOG);
+            CSIP_NASS_LMOD_Matching.query_LAMPS_DB_Rotations_For_CMZ(dataDir, unique_cmzs, LMOD_CMZ_Data, DB_LMOD_map, LOG);
 
-      ArrayList<ArrayList<String>> LMOD_CMZ_Data = new ArrayList<>();
-      HashMap<String, List<ArrayList<String>>> DB_LMOD_map = new HashMap<>();
+            //start here for the sequential run based on 10000 HRUs
+            List<List<ERU>> tenk_lists = new ArrayList<>();
+            List<List<Geometry>> tenk_geo_lists = new ArrayList<>();
+            int hrus_pre_count = hrus_pre.size();
 
-      CSIP_NASS_LMOD_Matching.query_LAMPS_DB_Rotations_For_CMZ(dataDir, unique_cmzs, LMOD_CMZ_Data, DB_LMOD_map, LOG);
+            boolean standalone = false;
+            if (hrus_pre_count > 2000 && standalone) {
+                int end = hrus_pre.size() - 1;
+                int ten_times = (end + 1) / 1000;
+                int rest = (end + 1) - ten_times * 1000;
+                if (rest > 0) {
+                    ten_times++;
+                }
+                LOG.info("===> times : " + ten_times);
 
-      //start here for the sequential run based on 10000 HRUs
-      List<List<ERU>> tenk_lists = new ArrayList<>();
-      List<List<Geometry>> tenk_geo_lists = new ArrayList<>();
-      int hrus_pre_count = hrus_pre.size();
+                tenk_lists = Partition.chopIntoParts(hrus_pre, ten_times);
+                tenk_geo_lists = Partition.chopIntoParts(Input_Geometries_pre, ten_times);
+            } else {
+                tenk_lists.add(new ArrayList<>(hrus_pre));
+                tenk_geo_lists.add(new ArrayList<>(Input_Geometries_pre));
+            }
+            hrus_pre.clear();
+            Input_Geometries_pre.clear();
 
-      boolean standalone = false;
-      if (hrus_pre_count > 2000 && standalone) {
-        int end = hrus_pre.size() - 1;
-        int ten_times = (end + 1) / 1000;
-        int rest = (end + 1) - ten_times * 1000;
-        if (rest > 0) {
-          ten_times++;
-        }
-        LOG.info("===> times : " + ten_times);
+            LOG.info("===>  Check available NASS years.");
 
-        tenk_lists = Partition.chopIntoParts(hrus_pre, ten_times);
-        tenk_geo_lists = Partition.chopIntoParts(Input_Geometries_pre, ten_times);
-      } else {
-        tenk_lists.add(new ArrayList<>(hrus_pre));
-        tenk_geo_lists.add(new ArrayList<>(Input_Geometries_pre));
-      }
-      hrus_pre.clear();
-      Input_Geometries_pre.clear();
+            if (CDL_years.isEmpty()) {
+                CDL_years = CSIP_NASS_GeoTIFF.NASS_CDL_YEARS_CHECK(dataDir, unique_states);
+            } else {
+                if (Integer.parseInt(CDL_years.get(0)) >= 1997 && Integer.parseInt(CDL_years.get(0)) <= CSIP_Const.currentNASSyear) {
+                    int counter = Integer.parseInt(CDL_years.get(0));
+                    CDL_years.clear();
+                    while (counter <= CSIP_Const.currentNASSyear) {
+                        CDL_years.add("" + counter++);
+                    }
+                } else {
+                    CDL_years.add("2008");
+                    CDL_years.add("2009");
+                    CDL_years.add("2010");
+                    CDL_years.add("2011");
+                    CDL_years.add("2012");
+                    CDL_years.add("2013");
+                    CDL_years.add("2014");
+                    CDL_years.add("2015");
+                    CDL_years.add("2016");
+                    CDL_years.add("2017");
+                    CDL_years.add("2018");
+                    CDL_years.add("2019");
+                    CDL_years.add("2020");
+                }
+            }
+            Collections.sort(CDL_years);
+            LOG.info(" Available NASS CDL year(s) for State with longest time period  : ");
+            for (String str : CDL_years) {
+                LOG.info(str + ", ");
+            }
+            LOG.info(" ");
+            LOG.info("===>  Irrigation clipping.");
 
-      LOG.info("===>  Check available NASS years.");
+            HashMap<Integer, ArrayList<HashMap<Integer, Double>>> irrigation_map = new HashMap<>();
+            int helper = 0;
+            for (List<Geometry> tenk_geo_list : tenk_geo_lists) {
+                List<ERU> hrus = tenk_lists.get(helper);
+                HashMap<Integer, ArrayList<HashMap<Integer, Double>>> irrigation_map_in = CSIP_NASS_GeoTIFF.ConcurrentIrrigation_Overlay(dataDir, tenk_geo_list, hrus.get(0).ID, CDL_years.get(0), LOG);
+                irrigation_map.putAll(irrigation_map_in);
+                irrigation_map_in.clear();
+                helper++;
+            }
 
-      if (CDL_years.isEmpty()) {
-        CDL_years = CSIP_NASS_GeoTIFF.NASS_CDL_YEARS_CHECK(dataDir, unique_states);
-      } else {
-        if (Integer.parseInt(CDL_years.get(0)) >= 1997 && Integer.parseInt(CDL_years.get(0)) <= CSIP_Const.currentNASSyear) {
-          int counter = Integer.parseInt(CDL_years.get(0));
-          CDL_years.clear();
-          while (counter <= CSIP_Const.currentNASSyear) {
-            CDL_years.add("" + counter++);
-          }
+            LOG.info("===>  Irrigation clipping done.");
+            LOG.info(" ");
+            LOG.info("===> number of list parts : " + tenk_lists.size());
+
+            int number_of_chopParts = 0;
+            for (int numberLists = 0; numberLists < tenk_lists.size(); numberLists++) {
+
+                number_of_chopParts += tenk_lists.get(numberLists).size();
+
+                LOG.info(" ** ");
+                LOG.info("===> total number of geometries in loop : " + number_of_chopParts);
+                LOG.info(" ** ");
+
+                List<ERU> hrus = tenk_lists.get(numberLists);
+                List<Geometry> input_Geometries = tenk_geo_lists.get(numberLists);
+
+                LOG.info("===>  NASS clipping.");
+                HashMap<Integer, ArrayList<Nass_results>> nass_data_result = CSIP_NASS_GeoTIFF.NASSGeoTIFF_clip_analysis(dataDir, CDLdataDirlocation, outputDir, CDL_years, input_Geometries, hrus, unique_states, geotiffs, LOG);
+
+                for (int i = 0; i < hrus.size(); i++) {
+                    int HRUID = hrus.get(i).ID;
+                    hrus.get(i).nass_list_data = nass_data_result.get(i);
+                    double irri_area = 0;
+                    if (irrigation_map.get(HRUID) != null) {
+                        for (int j = 0; j < irrigation_map.get(HRUID).size(); j++) {
+                            for (Integer keys : irrigation_map.get(HRUID).get(j).keySet()) {
+                                if (2017 == keys && irrigation_map.get(HRUID).get(j).get(keys) > 40) {
+                                    irri_area = irrigation_map.get(HRUID).get(j).get(keys);
+                                    hrus.get(i).irrigated = true;
+                                    hrus.get(i).irrigated_area = irri_area;
+                                    //LOG.info(hrus.get(i).ID + "  2017 irrigated ");
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                }
+                LOG.info("===> Done NASS clipping main file.");
+
+                nass_data_result_all.putAll(nass_data_result);
+                nass_data_result.clear();
+
+                LOG.info("===> Going to the Confidence.");
+
+                CSIP_NASS_LMOD.NASS_data_CropSeq(dataDir, hrus, delta, LOG);
+
+                LOG.info("===> Done Confidence.");
+                LOG.info("===> Going to the Matching.");
+
+                HashMap<Integer, ArrayList<String[]>> map
+                        = CSIP_NASS_LMOD_Matching.ConcurrentMatching(dataDir, hrus, CDL_years, LMOD_CMZ_Data, DB_LMOD_map, start_year, end_year, LOG);
+
+                LOG.info("===> Done with the Matching.");
+                map.clear();
+
+                hrus_all.addAll(hrus);
+                hrus.clear();
+            }
+            LMOD_CMZ_Data.clear();
+
+            LOG.info("===>  Output generation.");
+
+            CSIP_NASS_LMOD.NASS_data_check(outputDir, hrus_all, nass_data_result_all, LOG);
+            nass_data_result_all.clear();
+
+            FormattedOutput.writeNASS(outputDir, hrus_all, LOG);
+            CSIP_NASS_LMOD_Matching.AOI_NASS_LMOD_generic_match_data_writer(outputDir, hrus_all, irrigation_map, delta, LOG);
+
+            irrigation_map.clear();
+
+            ArrayList<String> Unique_LMOD_OPKEYs = new ArrayList<>();
+            ArrayList<String> Unique_LMOD_ManKEYs = new ArrayList<>();
+
+            HashMap<String, HashMap<Integer, ArrayList<String[]>>> managements = new HashMap<>();
+            HashMap<String, ArrayList<String[]>> DB_map_managements = new HashMap<>();
+            HashMap<String, String> OP_map_costum = new HashMap<>();
+            HashMap<String, String> LMOD_AGES_CID_map = new HashMap<>();
+            HashMap<String, String> LMOD_AGES_MID_map = new HashMap<>();
+            HashMap<String, String> only_veg_map = new HashMap<>();
+            HashMap<String, String> only_landuse_map = new HashMap<>();
+            HashMap<String, Integer> OP_Till_Man = new HashMap<>();
+            HashMap<String, String[]> OP_map_tillage = new HashMap<>();
+            HashMap<String, String[]> DB_map_managements_acr = new HashMap<>();
+
+            CSIP_NASS_LMOD_Matching.AOI_NASS_DETECTED_LMODDATA_PREPARATION(dataDir, hrus_all, DB_map_managements, Unique_LMOD_OPKEYs, Unique_LMOD_ManKEYs, managements, OP_map_costum, DB_map_managements_acr, OP_map_tillage, LMOD_AGES_CID_map, only_veg_map, only_landuse_map, LOG);
+
+            if (custom) {
+                CSIP_NASS_LMOD_Matching.AOI_NASS_LMODDATA_costum_generic_writer(outputDir, hrus_all, DB_map_managements, managements, OP_map_costum, DB_map_managements_acr, LMOD_AGES_MID_map, only_veg_map, start_year, end_year, LOG);
+            }
+
+            managements.clear();
+            OP_map_costum.clear();
+
+            if (ages_files) {
+                LOG.info("===>    Output generation.  AgES-W input files");
+
+                FormattedOutput.writeAGESTillageFile(outputDir, OP_map_tillage, OP_Till_Man);
+                FormattedOutput.writeAGESManagementFile(outputDir, hrus_all, OP_Till_Man, DB_map_managements, DB_map_managements_acr, LMOD_AGES_CID_map, LMOD_AGES_MID_map, only_veg_map, only_landuse_map);
+            }
         } else {
-          CDL_years.add("2008");
-          CDL_years.add("2009");
-          CDL_years.add("2010");
-          CDL_years.add("2011");
-          CDL_years.add("2012");
-          CDL_years.add("2013");
-          CDL_years.add("2014");
-          CDL_years.add("2015");
-          CDL_years.add("2016");
-          CDL_years.add("2017");
-          CDL_years.add("2018");
-          CDL_years.add("2019");
-          CDL_years.add("2020");
-        }
-      }
-      Collections.sort(CDL_years);
-      LOG.info(" Available NASS CDL year(s) for State with longest time period  : ");
-      for (String str : CDL_years) {
-        LOG.info(str + ", ");
-      }
-      LOG.info(" ");
-      LOG.info("===>  Irrigation clipping.");
 
-      HashMap<Integer, ArrayList<HashMap<Integer, Double>>> irrigation_map = new HashMap<>();
-      int helper = 0;
-      for (List<Geometry> tenk_geo_list : tenk_geo_lists) {
-        List<ERU> hrus = tenk_lists.get(helper);
-        HashMap<Integer, ArrayList<HashMap<Integer, Double>>> irrigation_map_in = CSIP_NASS_GeoTIFF.ConcurrentIrrigation_Overlay(dataDir, tenk_geo_list, hrus.get(0).ID, CDL_years.get(0), LOG);
-        irrigation_map.putAll(irrigation_map_in);
-        irrigation_map_in.clear();
-        helper++;
-      }
+            LOG.info("===>  Processing entities for splitting.");
 
-      LOG.info("===>  Irrigation clipping done.");
-      LOG.info(" ");
-      LOG.info("===> number of list parts : " + tenk_lists.size());
+            URL geomUrl = new File(workspace, geometry).toURI().toURL();
 
-      int number_of_chopParts = 0;
-      for (int numberLists = 0; numberLists < tenk_lists.size(); numberLists++) {
+            LOG.info("===>  Reading AOI");
+            chunks = CSIP_Read_AND_SPLIT.URL_Read_Geometry_File_new(geomUrl, workspace, multi_run, polyCount, areaChunk, LOG);
 
-        number_of_chopParts += tenk_lists.get(numberLists).size();
+            LOG.info("slice file into " + chunks + " chunks");
+            List<Long> cpu_time = Collections.synchronizedList(new ArrayList<>(chunks));
 
-        LOG.info(" ** ");
-        LOG.info("===> total number of geometries in loop : " + number_of_chopParts);
-        LOG.info(" ** ");
+            long timestart = System.currentTimeMillis();
 
-        List<ERU> hrus = tenk_lists.get(numberLists);
-        List<Geometry> input_Geometries = tenk_geo_lists.get(numberLists);
+            File orgFileName = new File(geomUrl.toURI());
 
-        LOG.info("===>  NASS clipping.");
-        HashMap<Integer, ArrayList<Nass_results>> nass_data_result = CSIP_NASS_GeoTIFF.NASSGeoTIFF_clip_analysis(dataDir, CDLdataDirlocation, outputDir, CDL_years, input_Geometries, hrus, unique_states, geotiffs, LOG);
+            String fileName_pre = orgFileName.getName();
+            fileName_pre = fileName_pre.substring(0, fileName_pre.lastIndexOf(".") + 1);
+            fileName_pre = fileName_pre.replace(".", "");
+            final String fileName = fileName_pre;
 
-        for (int i = 0; i < hrus.size(); i++) {
-          int HRUID = hrus.get(i).ID;
-          hrus.get(i).nass_list_data = nass_data_result.get(i);
-          double irri_area = 0;
-          if (irrigation_map.get(HRUID) != null) {
-            for (int j = 0; j < irrigation_map.get(HRUID).size(); j++) {
-              for (Integer keys : irrigation_map.get(HRUID).get(j).keySet()) {
-                if (2017 == keys && irrigation_map.get(HRUID).get(j).get(keys) > 40) {
-                  irri_area = irrigation_map.get(HRUID).get(j).get(keys);
-                  hrus.get(i).irrigated = true;
-                  hrus.get(i).irrigated_area = irri_area;
-                  //LOG.info(hrus.get(i).ID + "  2017 irrigated ");
-                  break;
+            Parallel.run(parallelThreadsForChunkFeature, new Parallel.RunFactory() {
+
+                @Override
+                public Run create(final int i) {
+
+                    return new Run() {
+                        @Override
+                        public void run() throws Exception {
+                            long timestart_i = System.currentTimeMillis();
+                            ModelDataServiceCall mds = new ModelDataServiceCall()
+                                    .put(KEY_GEOM, "SplitKML_" + i + "_" + fileName + ".kmz")
+                                    .put(KEY_DELTA, "" + delta)
+                                    .put(KEY_CDLSTARTYEAR, "" + start_year)
+                                    .put(KEY_MULTIRUN, "-1")
+                                    .url(request().getURL())
+                                    .asAsync() // call async
+                                    .attach(
+                                            new File[]{
+                                                new File(outputDir, "SplitKML_" + i + "_" + fileName + ".kmz")
+                                            })
+                                    .call();
+
+                            if (mds.serviceFinished()) {
+                                mds.download(CSIP_Const.AOI_NASS_dominant_result, workspace().getFile(CSIP_Const.AOI_NASS_dominant_result + "_" + i + ".csv"));
+                                mds.download(CSIP_Const.AOI_NASS_LMOD_generic_result, workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result + "_" + i + ".csv"));
+                                mds.download(CSIP_Const.AOI_NASS_Plain_result, workspace().getFile(CSIP_Const.AOI_NASS_Plain_result + "_" + i + ".csv"));
+                                mds.download(CSIP_Const.AOI_LAMPS_CONF_result, workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result + "_" + i + ".csv"));
+                                if (custom) {
+                                    mds.download(CSIP_Const.AOI_NASS_LMOD_result_custom, workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom + "_" + i + ".csv"));
+                                }
+                            } else {
+                                throw new ServiceException("service error: " + mds.getError());
+                            }
+                            long timeend_i = System.currentTimeMillis();
+                            cpu_time.add(timeend_i - timestart_i);
+                            LOG.info("DONE !! run #" + i + " of " + chunks);
+                        }
+                    };
                 }
-              }
-            }
-          }
-        }
-        LOG.info("===> Done NASS clipping main file.");
 
-        nass_data_result_all.putAll(nass_data_result);
-        nass_data_result.clear();
-
-        LOG.info("===> Going to the Confidence.");
-
-        CSIP_NASS_LMOD.NASS_data_CropSeq(dataDir, hrus, delta, LOG);
-
-        LOG.info("===> Done Confidence.");
-        LOG.info("===> Going to the Matching.");
-
-        HashMap<Integer, ArrayList<String[]>> map
-            = CSIP_NASS_LMOD_Matching.ConcurrentMatching(dataDir, hrus, CDL_years, LMOD_CMZ_Data, DB_LMOD_map, start_year, end_year, LOG);
-
-        LOG.info("===> Done with the Matching.");
-        map.clear();
-
-        hrus_all.addAll(hrus);
-        hrus.clear();
-      }
-      LMOD_CMZ_Data.clear();
-
-      LOG.info("===>  Output generation.");
-
-      CSIP_NASS_LMOD.NASS_data_check(outputDir, hrus_all, nass_data_result_all, LOG);
-      nass_data_result_all.clear();
-
-      FormattedOutput.writeNASS(outputDir, hrus_all, LOG);
-      CSIP_NASS_LMOD_Matching.AOI_NASS_LMOD_generic_match_data_writer(outputDir, hrus_all, irrigation_map, delta, LOG);
-
-      irrigation_map.clear();
-
-      ArrayList<String> Unique_LMOD_OPKEYs = new ArrayList<>();
-      ArrayList<String> Unique_LMOD_ManKEYs = new ArrayList<>();
-
-      HashMap<String, HashMap<Integer, ArrayList<String[]>>> managements = new HashMap<>();
-      HashMap<String, ArrayList<String[]>> DB_map_managements = new HashMap<>();
-      HashMap<String, String> OP_map_costum = new HashMap<>();
-      HashMap<String, String> LMOD_AGES_CID_map = new HashMap<>();
-      HashMap<String, String> LMOD_AGES_MID_map = new HashMap<>();
-      HashMap<String, String> only_veg_map = new HashMap<>();
-      HashMap<String, String> only_landuse_map = new HashMap<>();
-      HashMap<String, Integer> OP_Till_Man = new HashMap<>();
-      HashMap<String, String[]> OP_map_tillage = new HashMap<>();
-      HashMap<String, String[]> DB_map_managements_acr = new HashMap<>();
-
-      CSIP_NASS_LMOD_Matching.AOI_NASS_DETECTED_LMODDATA_PREPARATION(dataDir, hrus_all, DB_map_managements, Unique_LMOD_OPKEYs, Unique_LMOD_ManKEYs, managements, OP_map_costum, DB_map_managements_acr, OP_map_tillage, LMOD_AGES_CID_map, only_veg_map, only_landuse_map, LOG);
-
-      if (custom) {
-        CSIP_NASS_LMOD_Matching.AOI_NASS_LMODDATA_costum_generic_writer(outputDir, hrus_all, DB_map_managements, managements, OP_map_costum, DB_map_managements_acr, LMOD_AGES_MID_map, only_veg_map, start_year, end_year, LOG);
-      }
-
-      managements.clear();
-      OP_map_costum.clear();
-
-      if (ages_files) {
-        LOG.info("===>    Output generation.  AgES-W input files");
-
-        FormattedOutput.writeAGESTillageFile(outputDir, OP_map_tillage, OP_Till_Man);
-        FormattedOutput.writeAGESManagementFile(outputDir, hrus_all, OP_Till_Man, DB_map_managements, DB_map_managements_acr, LMOD_AGES_CID_map, LMOD_AGES_MID_map, only_veg_map, only_landuse_map);
-      }
-    } else {
-
-      LOG.info("===>  Processing entities for splitting.");
-
-      URL geomUrl = new File(workspace, geometry).toURI().toURL();
-
-      LOG.info("===>  Reading AOI");
-      chunks = CSIP_Read_AND_SPLIT.URL_Read_Geometry_File_new(geomUrl, workspace, multi_run, polyCount, areaChunk, LOG);
-
-      LOG.info("slice file into " + chunks + " chunks");
-      List<Long> cpu_time = Collections.synchronizedList(new ArrayList<>(chunks));
-
-      long timestart = System.currentTimeMillis();
-
-      File orgFileName = new File(geomUrl.toURI());
-
-      String fileName_pre = orgFileName.getName();
-      fileName_pre = fileName_pre.substring(0, fileName_pre.lastIndexOf(".") + 1);
-      fileName_pre = fileName_pre.replace(".", "");
-      final String fileName = fileName_pre;
-
-      Parallel.run(parallelThreadsForChunkFeature, new Parallel.RunFactory() {
-
-        @Override
-        public Run create(final int i) {
-
-          return new Run() {
-            @Override
-            public void run() throws Exception {
-              long timestart_i = System.currentTimeMillis();
-              ModelDataServiceCall mds = new ModelDataServiceCall()
-                  .put(KEY_GEOM, "SplitKML_" + i + "_" + fileName + ".kmz")
-                  .put(KEY_DELTA, "" + delta)
-                  .put(KEY_CDLSTARTYEAR, "" + start_year)
-                  .put(KEY_MULTIRUN, "-1")
-                  .url(request().getURL())
-                  .withTimeout(10800)
-                  .attach(
-                      new File[]{
-                        new File(outputDir, "SplitKML_" + i + "_" + fileName + ".kmz")
-                      })
-                  .call();
-
-              if (mds.serviceFinished()) {
-                mds.download(CSIP_Const.AOI_NASS_dominant_result, workspace().getFile(CSIP_Const.AOI_NASS_dominant_result + "_" + i + ".csv"));
-                mds.download(CSIP_Const.AOI_NASS_LMOD_generic_result, workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result + "_" + i + ".csv"));
-                mds.download(CSIP_Const.AOI_NASS_Plain_result, workspace().getFile(CSIP_Const.AOI_NASS_Plain_result + "_" + i + ".csv"));
-                mds.download(CSIP_Const.AOI_LAMPS_CONF_result, workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result + "_" + i + ".csv"));
-                if (custom) {
-                  mds.download(CSIP_Const.AOI_NASS_LMOD_result_custom, workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom + "_" + i + ".csv"));
+                @Override
+                public int count() {
+                    return chunks;
                 }
-              } else {
-                throw new ServiceException("service error: " + mds.getError());
-              }
-              long timeend_i = System.currentTimeMillis();
-              cpu_time.add(timeend_i - timestart_i);
-              LOG.info("DONE !! run #" + i + " of " + chunks);
-            }
-          };
-        }
-
-
-        @Override
-        public int count() {
-          return chunks;
-        }
-      });
+            });
 
 //            Services.runParallel(chunks, (final int i) -> (Callable) () -> {
 //                long timestart_i = System.currentTimeMillis();
@@ -452,135 +449,130 @@
 //                cpu_time.add(timeend_i - timestart_i);
 //                return null;
 //            });
-      long timeend = System.currentTimeMillis();
-      total_time = Duration.ofMillis(timeend - timestart).toString();
-      stats = statsTime(cpu_time);
-      area_stats = stats(areaChunk);
+            long timeend = System.currentTimeMillis();
+            total_time = Duration.ofMillis(timeend - timestart).toString();
+            stats = statsTime(cpu_time);
+            area_stats = stats(areaChunk);
+        }
+
+        LOG.info("Finished.");
+
+        long end_time = System.currentTimeMillis();
+        difference = (end_time - start_time) / 1000;
+        LOG.info("===>  Total LAMPS application time : " + difference + " s");
     }
 
-    LOG.info("Finished.");
+    @Override
+    protected void postProcess() throws IOException {
+        results().put("application time", " " + difference + " s");
+        if (multi_run > -1) {
+            // assemble the files back again only if we have to reduce.
+            combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), chunks);
+            if (custom) {
+                combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), chunks);
+            }
+            combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), chunks);
+            combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), chunks);
+            combineFiles(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), chunks);
 
-    long end_time = System.currentTimeMillis();
-    difference = (end_time - start_time) / 1000;
-    LOG.info("===>  Total LAMPS application time : " + difference + " s");
-  }
+            results().put("multi_run", multi_run);
+            results().put("chunks", chunks);
+            results().put("real_time", total_time);
+            results().put("min_time", stats[0]);
+            results().put("max_time", stats[1]);
+            results().put("avg_time", stats[2]);
+            results().put("total_time", stats[3]);
+            results().put("min_area", area_stats[0]);
+            results().put("max_area", area_stats[1]);
+            results().put("total_area", area_stats[3]);
+            for (int i = 0; i < polyCount.size(); i++) {
+                results().put("#ofPoly" + i, polyCount.get(i));
+                results().put("avgChunkArea" + i, (areaChunk.get(i) / polyCount.get(i)));
+                results().put("ChunkArea" + i, areaChunk.get(i));
+            }
+        }
+        results().put(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), "output file1");
+        if (custom) {
+            results().put(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), "output file2");
+        }
+        results().put(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), "output file3");
+        results().put(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), "output file4");
+        results().put(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), "output file5");
 
-
-  @Override
-  protected void postProcess() throws IOException {
-    results().put("application time", " " + difference + " s");
-    if (multi_run > -1) {
-      // assemble the files back again only if we have to reduce.
-      combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), chunks);
-      if (custom) {
-        combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), chunks);
-      }
-      combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), chunks);
-      combineFiles(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), chunks);
-      combineFiles(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), chunks);
-
-      results().put("multi_run", multi_run);
-      results().put("chunks", chunks);
-      results().put("real_time", total_time);
-      results().put("min_time", stats[0]);
-      results().put("max_time", stats[1]);
-      results().put("avg_time", stats[2]);
-      results().put("total_time", stats[3]);
-      results().put("min_area", area_stats[0]);
-      results().put("max_area", area_stats[1]);
-      results().put("total_area", area_stats[3]);
-      for (int i = 0; i < polyCount.size(); i++) {
-        results().put("#ofPoly" + i, polyCount.get(i));
-        results().put("avgChunkArea" + i, (areaChunk.get(i) / polyCount.get(i)));
-        results().put("ChunkArea" + i, areaChunk.get(i));
-      }
-    }
-    results().put(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), "output file1");
-    if (custom) {
-      results().put(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), "output file2");
-    }
-    results().put(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), "output file3");
-    results().put(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), "output file4");
-    results().put(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), "output file5");
-
-    //delFiles(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), chunks);
+        //delFiles(workspace().getFile(CSIP_Const.AOI_NASS_dominant_result), chunks);
 //        if (custom) {
 //            delFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_result_custom), chunks);
 //        }
 //        delFiles(workspace().getFile(CSIP_Const.AOI_NASS_LMOD_generic_result), chunks);
 //        delFiles(workspace().getFile(CSIP_Const.AOI_NASS_Plain_result), chunks);
 //        delFiles(workspace().getFile(CSIP_Const.AOI_LAMPS_CONF_result), chunks);
-  }
-
-
-  private String[] statsTime(List<Long> list) {
-    if (list.isEmpty()) {
-      return new String[]{
-        Duration.ofMillis(0).toString(),
-        Duration.ofMillis(0).toString(),
-        Duration.ofMillis(0).toString(),
-        Duration.ofMillis(0).toString()
-      };
-    }
-    long max = Long.MIN_VALUE;
-    long min = Long.MAX_VALUE;
-    long sum = 0;
-    for (long l : list) {
-      sum += l;
-      if (l > max) {
-        max = l;
-      }
-      if (l < min) {
-        min = l;
-      }
-    }
-    return new String[]{
-      Duration.ofMillis(min).toString(),
-      Duration.ofMillis(max).toString(),
-      Duration.ofMillis(sum / list.size()).toString(),
-      Duration.ofMillis(sum).toString()
-    };
-  }
-
-
-  private double[] stats(List<Double> list) {
-    if (list.isEmpty()) {
-      return new double[]{0, 0, 0, 0};
-    }
-    double max = Double.MIN_VALUE;
-    double min = Double.MAX_VALUE;
-    double sum = 0;
-    for (double l : list) {
-      sum += l;
-      if (l > max) {
-        max = l;
-      }
-      if (l < min) {
-        min = l;
-      }
-    }
-    return new double[]{min, max, (sum / list.size()), sum};
-  }
-
-
-  private static void combineFiles(File out, int chunks) throws IOException {
-    for (int i = 0; i < chunks; i++) {
-      File of = new File(out.getParentFile(), out.getName() + "_" + i + ".csv");
-      FileUtils.copyFile(of, FileUtils.openOutputStream(out, true));
     }
 
-  }
+    private String[] statsTime(List<Long> list) {
+        if (list.isEmpty()) {
+            return new String[]{
+                Duration.ofMillis(0).toString(),
+                Duration.ofMillis(0).toString(),
+                Duration.ofMillis(0).toString(),
+                Duration.ofMillis(0).toString()
+            };
+        }
+        long max = Long.MIN_VALUE;
+        long min = Long.MAX_VALUE;
+        long sum = 0;
+        for (long l : list) {
+            sum += l;
+            if (l > max) {
+                max = l;
+            }
+            if (l < min) {
+                min = l;
+            }
+        }
+        return new String[]{
+            Duration.ofMillis(min).toString(),
+            Duration.ofMillis(max).toString(),
+            Duration.ofMillis(sum / list.size()).toString(),
+            Duration.ofMillis(sum).toString()
+        };
+    }
 
+    private double[] stats(List<Double> list) {
+        if (list.isEmpty()) {
+            return new double[]{0, 0, 0, 0};
+        }
+        double max = Double.MIN_VALUE;
+        double min = Double.MAX_VALUE;
+        double sum = 0;
+        for (double l : list) {
+            sum += l;
+            if (l > max) {
+                max = l;
+            }
+            if (l < min) {
+                min = l;
+            }
+        }
+        return new double[]{min, max, (sum / list.size()), sum};
+    }
 
-  private void delFiles(File out, int chunks) throws IOException {
-    for (int i = 0; i < chunks; i++) {
-      //File of1 = new File(workspace().getDir(), CSIP_Const.AOI_NASS_dominant_result + "-" + i);//                 
-      File of = new File(workspace().getDir(), out.getName() + "_" + i + ".csv");
-      LOG.info(" Deleting ." + i + " " + of.getAbsolutePath());
-      of.delete();
-      if (of.exists()) {
-        LOG.info(" OH NO : " + of.getName());
-      }
+    private static void combineFiles(File out, int chunks) throws IOException {
+        for (int i = 0; i < chunks; i++) {
+            File of = new File(out.getParentFile(), out.getName() + "_" + i + ".csv");
+            FileUtils.copyFile(of, FileUtils.openOutputStream(out, true));
+        }
+
     }
-  }
+
+    private void delFiles(File out, int chunks) throws IOException {
+        for (int i = 0; i < chunks; i++) {
+            //File of1 = new File(workspace().getDir(), CSIP_Const.AOI_NASS_dominant_result + "-" + i);//                 
+            File of = new File(workspace().getDir(), out.getName() + "_" + i + ".csv");
+            LOG.info(" Deleting ." + i + " " + of.getAbsolutePath());
+            of.delete();
+            if (of.exists()) {
+                LOG.info(" OH NO : " + of.getName());
+            }
+        }
+    }
 }

src/java/methods/CSIP_NASS_LMOD_Matching.java

@@ -221,7 +221,7 @@
     public static HashMap<Integer, ArrayList<String[]>> ConcurrentMatching(File dataDir, List<ERU> hrus, List<String> CDL_years, List<ArrayList<String>> LMOD_CMZ_Data, HashMap<String, List<ArrayList<String>>> DB_LMOD_map, Integer start_year, Integer end_year, SessionLogger LOG)
             throws InterruptedException, ExecutionException {
 
-        final int threadNum = Partition.getThreadCount(hrus.size());;
+        final int threadNum = Partition.getThreadCount(hrus.size());
         final int first = 0;
         final int last = hrus.size() - 1;
         final List<String> years = CDL_years;
@@ -1366,7 +1366,7 @@
 
         records.add("FieldID / HRU, Date,Operation,Vegetation,Tillage-Intensity,Tillage-Depth-Max,Tillage-Depth-Min,Depth-Unit,Description \r\n");
 
-        final int threadNum = Partition.getThreadCount(hrus.size());;
+        final int threadNum = Partition.getThreadCount(hrus.size());
         final int first = 0;
         final int last = hrus.size() - 1;
         final List<ERU> fhrus = hrus;