V2_3.java [src/java/m/ghg/daycent] Revision: default  Date:
/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package m.ghg.daycent;

import csip.Config;
import csip.api.server.Executable;
import csip.api.client.ModelDataServiceCall;
import csip.api.server.PayloadParameter;
import csip.api.server.ServiceException;
import csip.SessionLogger;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.annotations.Options;
import csip.annotations.Resource;
import csip.cosu.ObjFunc;
import csip.utils.Client;
import csip.utils.Parallel;
import gisobjects.GISObject;
import gisobjects.GISObjectFactory;
import gisobjects.db.GISEngineFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.sql.Connection;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.DoubleStream;
import java.util.stream.IntStream;
import javax.ws.rs.Path;
import m.ghg.ApplicationResources;
import static m.ghg.ApplicationResources.*;
import static m.ghg.daycent.V1_0.ppdf3Def;
import static m.ghg.daycent.V1_0.ppdf4Def;
import static m.ghg.daycent.V1_0.pramxDef;
import oms3.ObjectiveFunction;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import util.DateConversion;
import util.ServiceUtils;
import util.ObjFuncs.*;
import static util.ServiceUtils.getStringColumnData;

/**
 *
 * @author sidereus, od
 */
@Name("Daycent simulation execution")
@Description("Daycent CSIP service")
@Path("m/daycent/2.3")
@Options(timeout = "PT10M")
@Resource(from = ApplicationResources.class)
public class V2_3 extends V1_0 {

  static final Map<String, Double> yieldConv = new HashMap<>();

  // slice by time (start and end) and convert it
  /*
    Corn:  grams/square meter C to corn bushels per acre:  cgracc value * 0.41652
    Factor:  8.92179 / 0.45 / (56 * 0.85) = 0.41652

    Soybean:  cgracc value * 0.37982 bu/acre
    Factor:  8.92179 / 0.45 / (60 * 0.87) = 0.37982

    Wheat:  cgracc value * 0.38200  bu/acre
    Factor:  8.92179 / 0.45 / (60 * 0.865) = 0.38200

    Oats:  cgracc value * 0.72043 bu/acre
    Factor:  8.92179 / 0.45 / (32 * 0.86) = 0.72043

    Sorghum:  cgracc value * 0.40694 bu/ac
    Factor:  8.92179 / 0.45 / (56 * 0.87) = 0.38200

    Sorghum Silage: cgracc value * 0.0124903 tons/acre
    Factor: 0.00404686 Mg/ac / 0.45 C fraction / 0.72 dry fraction = 0.0124903 Mg/ac

    Alfalfa Hay:  cgracc value * 0. 0051275 tons/acre 
    Factor:  0.0044609 / 0.45 / 0.87 = 0.011394

    Corn Silage:  cgracc value * 0. 0118957 tons/acre 
    Factor:  0.0044609 / 0.45 / 0.375 = 0.026434

    Sorghum Forage: cgracc value * 0. 0124903 tons/acre

    Barley:  cgracc value * 0.48594  bu/acre

    Rice: cgracc value * 0.5507
    Factor: 8.92179 / 0.45 / (45 * 0.8) = 0.5507

    Sugar beet: cgracc value * 0.04956
    Factor: 0.0044609 / 0.45 / 0.2 = 0.04956
    https://catalog.extension.oregonstate.edu/em9324/html

   */
  static {
    yieldConv.put("C1", 0.41652);
    yieldConv.put("C3", 0.41652);
    yieldConv.put("C5", 0.41652);
    yieldConv.put("C7", 0.41652);
    yieldConv.put("C9", 0.41652);
    yieldConv.put("C10", 0.41652);
    yieldConv.put("C11", 0.41652);
    yieldConv.put("C12", 0.41652);
    yieldConv.put("C13", 0.41652);

    yieldConv.put("PC13", 22.98690); // lbs/A 13.75% moisture

    yieldConv.put("SC13", 79.30471); // lbs/A 75% moisture

    yieldConv.put("SYBN1", 0.37982);
    yieldConv.put("SYBN2", 0.37982);
    yieldConv.put("SYBN3", 0.37982);
    yieldConv.put("SYBN4", 0.37982);

    yieldConv.put("BKW", 0.4539); // bu/ac 16% moisture

    yieldConv.put("OAT1", 0.72043); // bu/ac 14% moisture
    yieldConv.put("OAT2", 0.72043); // bu/ac 14% moisture
    yieldConv.put("OAT3", 0.72043); // bu/ac 14% moisture

    yieldConv.put("W1", 0.38200); // bu/ac 13.5% moisture
    yieldConv.put("W2", 0.38200); // bu/ac 13.5% moisture
    yieldConv.put("W3", 0.38200); // bu/ac 13.5% moisture
    yieldConv.put("W4", 0.38200); // bu/ac 13.5% moisture
    yieldConv.put("WSL4", 0.026435); // tons/ac 62.5% moisture
    yieldConv.put("SW3", 0.38200); // bu/ac 13.5% moisture
    yieldConv.put("SWSL4", 0.026435); // tons/ac 62.5% moisture
    yieldConv.put("WBL4", 0.028323); // tons/ac 40% moisture    
    yieldConv.put("SW3P", 0.38200); // bu/ac 13.5% moisture
    yieldConv.put("SW4", 0.38200); // bu/ac 13.5% moisture

    yieldConv.put("SORG3", 0.40694); // bu/ac 13% moisture

    yieldConv.put("FSORG", 0.012089); // hay short tons/ac 18% moisture
    yieldConv.put("BSORG", 0.019826); // tons/ac 50% moisture
    yieldConv.put("SSORG", 0.028323); // tons/ac 65% moisture

    yieldConv.put("ALF1", 0.011394); // short tons/ac 13% moisture
    yieldConv.put("ALF2", 0.011394); // short tons/ac 13% moisture
    yieldConv.put("ALF3", 0.011394); // short tons/ac 13% moisture
    yieldConv.put("ALF4", 0.011394); // short tons/ac 13% moisture

    yieldConv.put("ALFB", 0.022029); // short tons/ac 55% moisture    

    yieldConv.put("G3CPI", 0.011394); // tons/ac

    yieldConv.put("CSL9", 0.026435); // tons/ac 62.5% moisture
    yieldConv.put("CSL10", 0.026435); // tons/ac 62.5% moisture
    yieldConv.put("CSL11", 0.026435); // tons/ac 62.5% moisture
    yieldConv.put("CSL12", 0.026435); // tons/ac 62.5% moisture
    yieldConv.put("CSL13", 0.026435); // tons/ac 62.5% moisture

    yieldConv.put("JTOM", 1.770197);
    yieldConv.put("SUN", 21.9074); // lbs/ac 9.5% moisture
    yieldConv.put("COT", 8.1812); // lbs/ac 7.5% moisture * 0.3817 since reported cotton yields in lbs/ac fiber from Truterra
    yieldConv.put("COT2", 8.1812); // lbs/ac 7.5% moisture * 0.3817 since reported cotton yields in lbs/ac fiber from Truterra
    yieldConv.put("BAR3", 0.46937); // bu/ac 12% moisture
    yieldConv.put("TRIT", 0.42445); // triticale bu/ac 13.5% moisture
    yieldConv.put("RYE", 0.40929); // rye bu/ac 13% moisture
    yieldConv.put("RSIL", 0.02832);// ton/ac 65% moisture for silage
    yieldConv.put("RHAY", 0.01983);// currently ton/ac 50% moisture for hay
    yieldConv.put("BSIL", 0.02832);// ton/ac 65% moisture for silage
    yieldConv.put("DBEAN", 23.603); // bu/ac 16% moisture
    yieldConv.put("MBEAN", 22.5298); // lbs/ac 12% moisture
    yieldConv.put("PEA", 23.60262); // lbs/ac 16% moisture content
    yieldConv.put("PNUT", 22.15225); // 
//    yieldConv.put("CLV1", 23.325);
    yieldConv.put("CLV1", 0.0116625); // 15% moisture clover in tons/A
    yieldConv.put("LENT", 23.05372); // lbs/ac 14% moisture
    yieldConv.put("SAFF", 22.65851); // lbs/ac canola
    yieldConv.put("RICL", 0.50642); // bu/ac 13% moisture
    yieldConv.put("SUGB", 0.033044); // short tons/ac 30% moisture content
    yieldConv.put("CHAY", 0.01208916); // tons/ac 18% moisture content
    yieldConv.put("MS", 21.9074); // lbs/ac 9.5% moisture content
  }


  protected JSONObject rotation;
  protected List<Integer> crop_id;
  protected Collection<String> JSONparams;
  protected Integer cokey;
  protected Double area;
  protected Integer yearPractChange;
  protected Integer staticYearPractChange = 2011;
  protected Integer endPractChange;
  protected Integer projectionYear;
  protected Double nodata;
  protected Boolean calibonly;

  protected int colGracc = 0;
  protected int colAgcacc = 0;
  protected int colAbove = 0;
  protected int colBelowFineRoots = 0;
  protected int colBelowMatureRoots = 0;
  protected int colSOM = 0;

  protected String lrr = null;

  @Override
  protected void preProcess() throws ServiceException {
    schedule = parameter().getString(SCHEDULE_FILE);
    schedule_IC = parameter().getString(SCHEDULE_FILE_IC, "");
    useClimateWind = parameter().getBoolean(USE_CLIMATE_WIND, false);
    fileAsJSON = parameter().getBoolean(STREAM_FILE, false);
    weatherDuration = parameter().getInt(WEATHER_DURATION_TXT);
    startingYear = parameter().getInt(STARTING_YEAR_TXT);

    schedule = ServiceUtils.checkRemoveExtension(schedule);
    aoa_geometry = parameter().getJSON(soils.AoA.AOA_GEOMETRY);
    cokey = parameter().getInt("cokey");
    area = parameter().getDouble("area");
    yearPractChange = parameter().getInt("yearPractChange");
    endPractChange = parameter().getInt("endPractChange", 2020);
    projectionYear = parameter().getInt("projectionYear", 2030);
    calibonly = parameter().getBoolean("calibration");
    nodata = parameter().getDouble("nodata", -9999.0);
    JSONparams = parameter().getNames();
  }


  private void createCopy(String original, String copy) {
    String sourcePath = workspace().getFile(original).getAbsolutePath();
    String destPath = workspace().getDir().getAbsolutePath()
        .concat(File.separator).concat(copy);
    File sourceTemplate = new File(sourcePath);
    File destTemplate = new File(destPath);

    try {
      Files.copy(sourceTemplate.toPath(), destTemplate.toPath());
    } catch (IOException ex) {
      throw new RuntimeException(ex.getMessage());
    }
  }


  protected void fetchClimate(int startYear, int duration, String outputfile, Integer counter) throws JSONException, Exception {
    ModelDataServiceCall mds = new ModelDataServiceCall()
        .put(STARTING_YEAR_TXT, startYear)
        .put(WEATHER_DURATION_TXT, duration) // 10 ears for 2030 projection
        .put(STREAM_FILE, fileAsJSON)
        .put(USE_CLIMATE_WIND, useClimateWind)
        .put(soils.AoA.AOA_GEOMETRY, new JSONObject()
            .put(TYPE, aoa_geometry.get(TYPE))
            .put(COORDINATES, aoa_geometry.get(COORDINATES)))
        .url(Config.getString(WEATHER_URL_KEY,
            request().getCodebase() + "d/weather/cligen/1.0"))
        .withRetryPause(1000)
        .withRetries(10) // 10 retries for climate to avoid failures
        .call();

    if (mds.serviceFinished()) {
      mds.download(DAYCENT_WEATHER, workspace().getFile(outputfile));
    } else if (counter != null && counter.equals(10)) {
      throw new ServiceException("GHG Weather service error: " + mds.getError());
    } else {
      if (counter == null) {
        counter = 1;
      } else {
        counter++;
      }
      LOG.info("Climate service call Counter: " + counter);
      fetchClimate(startYear, duration, outputfile, counter);
    }
  }


  private String getLRR(JSONArray centroid) throws Exception {
    ModelDataServiceCall res = new ModelDataServiceCall()
        .put("assessment_location", "Point", centroid)
        .url(LRR_URL)
        .withDefaultLogger()
        .call();
    String zone;
    if (res.serviceFinished()) {
      zone = res.getString("lrrsym");
    } else {
      String msg = res.getError();
      throw new RuntimeException(msg);
    }

    return zone;
  }


  @Override
  protected void doProcess() throws Exception {

    //  Get centroid of shape, if it is not a point already.  (If it's a point, the centroid is the point)
    try (Connection conn = resources().getJDBC(GISDB_SQLSVR)) {
      GISObject shape = GISObjectFactory.createGISObject(aoa_geometry, GISEngineFactory.createGISEngine(conn));

      //  getLat/Lon functions get point value or the centroid of the shape.
      latlon[0] = shape.getLatitude();
      latlon[1] = shape.getLongitude();
    }

    JSONArray centroid = new JSONArray();
    centroid.put(latlon[1]);
    centroid.put(latlon[0]);

    lrr = getLRR(centroid);

    createCopy("fix.100", "fix_2.100");
    createCopy("sitepar.in", "sitepar_2.in");

//    if (lrr.toLowerCase().equals("c") ||
//        lrr.toLowerCase().equals("r")) {
//      createCopy("fix.100", "fix_3.100");
//      createCopy("sitepar.in", "sitepar_3.in");
//    }

    getJSONParams();

    Parallel.run(Config.getBoolean("ghg.serial.datafetch", false),
        () -> {
          int duration = endPractChange - 1979;
          fetchClimate(1979, duration, "dayCentWeather.IN", null);
          computeWeatherStats();
        },
        () -> {
          fetchSoil(calibonly, null);
          countLayers(); // testing if we need the fix structure in Black et al. 2017
        },
        () -> {
          int duration = endPractChange - 2010;
          fetchClimate(2010, duration, "dayCentW2030.IN", null);
        }
    );

    if (!sitepars.isEmpty()) {
      if (sitepars.get("site").containsKey("pa_dmpflux")) {
        double val = sitepars.get("site").get("pa_dmpflux");
        sitepars.get("site").put("dmpflux", val);
      }
      if (sitepars.get("site").containsKey("pa_MaxNitAmt")) {
        double val = sitepars.get("site").get("pa_MaxNitAmt");
        sitepars.get("site").put("MaxNitAmt", val);
      }
      if (sitepars.get("site").containsKey("pa_wfpsdnitadj")) {
        double val = sitepars.get("site").get("pa_wfpsdnitadj");
        sitepars.get("site").put("wfpsdnitadj", val);
      }
      if (sitepars.get("site").containsKey("pa_n2n2oadj")) {
        double val = sitepars.get("site").get("pa_n2n2oadj");
        sitepars.get("site").put("n2n2oadj", val);
      }
    }

    if (!fixes.isEmpty()) {
      if (fixes.get("fixes").containsKey("pa_dec52")) {
        double val = fixes.get("fixes").get("pa_dec52");
        fixes.get("fixes").put("dec52", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec11")) {
        double val = fixes.get("fixes").get("pa_dec11");
        fixes.get("fixes").put("dec11", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec12")) {
        double val = fixes.get("fixes").get("pa_dec12");
        fixes.get("fixes").put("dec12", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec21")) {
        double val = fixes.get("fixes").get("pa_dec21");
        fixes.get("fixes").put("dec21", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec22")) {
        double val = fixes.get("fixes").get("pa_dec22");
        fixes.get("fixes").put("dec22", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec31")) {
        double val = fixes.get("fixes").get("pa_dec31");
        fixes.get("fixes").put("dec31", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec51")) {
        double val = fixes.get("fixes").get("pa_dec51");
        fixes.get("fixes").put("dec51", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec4")) {
        double val = fixes.get("fixes").get("pa_dec4");
        fixes.get("fixes").put("dec4", val);
      }
      if (fixes.get("fixes").containsKey("pa_dec32")) {
        double val = fixes.get("fixes").get("pa_dec32");
        fixes.get("fixes").put("dec32", val);
      }
      if (fixes.get("fixes").containsKey("pa_teff1")) {
        double val = fixes.get("fixes").get("pa_teff1");
        fixes.get("fixes").put("teff1", val);
      }
      if (fixes.get("fixes").containsKey("pa_teff2")) {
        double val = fixes.get("fixes").get("pa_teff2");
        fixes.get("fixes").put("teff2", val);
      }
      if (fixes.get("fixes").containsKey("pa_teff3")) {
        double val = fixes.get("fixes").get("pa_teff3");
        fixes.get("fixes").put("teff3", val);
      }
      if (fixes.get("fixes").containsKey("pa_teff4")) {
        double val = fixes.get("fixes").get("pa_teff4");
        fixes.get("fixes").put("teff4", val);
      }
      if (fixes.get("fixes").containsKey("pa_ps1s31")) {
        double val = fixes.get("fixes").get("pa_ps1s31");
        fixes.get("fixes").put("ps1s31", val);
      }
      if (fixes.get("fixes").containsKey("pa_ps1s32")) {
        double val = fixes.get("fixes").get("pa_ps1s32");
        fixes.get("fixes").put("ps1s32", val);
      }
      if (fixes.get("fixes").containsKey("pa_ps2s31")) {
        double val = fixes.get("fixes").get("pa_ps2s31");
        fixes.get("fixes").put("ps2s31", val);
      }
      if (fixes.get("fixes").containsKey("pa_ps2s32")) {
        double val = fixes.get("fixes").get("pa_ps2s32");
        fixes.get("fixes").put("ps2s32", val);
      }
      if (fixes.get("fixes").containsKey("pa_fleach3")) {
        double val = fixes.get("fixes").get("pa_fleach3");
        fixes.get("fixes").put("fleach3", val);
      }
      if (fixes.get("fixes").containsKey("pa_pmco22")) {
        double val = fixes.get("fixes").get("pa_pmco22");
        fixes.get("fixes").put("pmco22", val);
      }
      if (fixes.get("fixes").containsKey("pa_p2co22")) {
        double val = fixes.get("fixes").get("pa_p2co22");
        fixes.get("fixes").put("p2co22", val);
      }
      if (fixes.get("fixes").containsKey("pa_ps1co22")) {
        double val = fixes.get("fixes").get("pa_ps1co22");
        fixes.get("fixes").put("ps1co22", val);
      }
      if (fixes.get("fixes").containsKey("pa_p1co2a2")) {
        double val = fixes.get("fixes").get("pa_p1co2a2");
        fixes.get("fixes").put("p1co2a2", val);
      }
      if (fixes.get("fixes").containsKey("pa_p1co2b2")) {
        double val = fixes.get("fixes").get("pa_p1co2b2");
        fixes.get("fixes").put("p1co2b2", val);
      }
      if (fixes.get("fixes").containsKey("pa_p3co2")) {
        double val = fixes.get("fixes").get("pa_p3co2");
        fixes.get("fixes").put("p3co2", val);
      }
    }

    // RENAME PRE-AG SCH
    java.nio.file.Path source = workspace().getFile("pa_Truterra.sch").toPath();
    Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
    createConfig("crop.100");
    createConfig(getSite100());
    createConfig("fix.100"); // values from payload _pa
    createConfig("sitepar.in");
    createConfig("soils.in");
    createConfig("tree.100");
    runDaycent();
    runDaycentList();
    source = workspace().getFile("Truterra.sch").toPath();
    Files.move(source, source.resolveSibling("Truterra_pa.sch"), StandardCopyOption.REPLACE_EXISTING);

    java.nio.file.Path sourceBin = workspace().getFile("Truterra.bin").toPath();
    Files.move(sourceBin, sourceBin.resolveSibling("Truterra_pa.bin"), StandardCopyOption.REPLACE_EXISTING);

    java.nio.file.Path sourceFix = workspace().getFile("fix.100").toPath();
    Files.move(sourceFix, sourceFix.resolveSibling("fix_pa.100"), StandardCopyOption.REPLACE_EXISTING);

    sourceFix = workspace().getFile("fix_2.100").toPath();
    Files.move(sourceFix, sourceFix.resolveSibling("fix.100"), StandardCopyOption.REPLACE_EXISTING);

    java.nio.file.Path sourceSite = workspace().getFile("sitepar.in").toPath();
    Files.move(sourceSite, sourceSite.resolveSibling("sitepar_pa.in"), StandardCopyOption.REPLACE_EXISTING);

    sourceSite = workspace().getFile("sitepar_2.in").toPath();
    Files.move(sourceSite, sourceSite.resolveSibling("sitepar.in"), StandardCopyOption.REPLACE_EXISTING);

    java.nio.file.Path sourceLis = workspace().getFile("Truterra.lis").toPath();
    Files.move(sourceLis, sourceLis.resolveSibling("Truterra_pa.lis"), StandardCopyOption.REPLACE_EXISTING);

    java.nio.file.Path outfiles = workspace().getFile("outfiles.in").toPath();
    Files.move(outfiles, outfiles.resolveSibling("outfiles_pa.in"), StandardCopyOption.REPLACE_EXISTING);
    outfiles = workspace().getFile("outfiles_ag.in").toPath();
    Files.move(outfiles, outfiles.resolveSibling("outfiles.in"), StandardCopyOption.REPLACE_EXISTING);

    Files.delete(workspace().getFile("year_summary.out").toPath());

    if (!sitepars.isEmpty()) {
      if (sitepars.get("site").containsKey("ag_dmpflux")) {
        double val = sitepars.get("site").get("ag_dmpflux");
        sitepars.get("site").put("dmpflux", val);
      }
      if (sitepars.get("site").containsKey("ag_MaxNitAmt")) {
        double val = sitepars.get("site").get("ag_MaxNitAmt");
        sitepars.get("site").put("MaxNitAmt", val);
      }
      if (sitepars.get("site").containsKey("ag_wfpsdnitadj")) {
        double val = sitepars.get("site").get("ag_wfpsdnitadj");
        sitepars.get("site").put("wfpsdnitadj", val);
      }
      if (sitepars.get("site").containsKey("ag_n2n2oadj")) {
        double val = sitepars.get("site").get("ag_n2n2oadj");
        sitepars.get("site").put("n2n2oadj", val);
      }
    }

    if (!fixes.isEmpty()) {
      if (fixes.get("fixes").containsKey("ag_dec52")) {
        double val = fixes.get("fixes").get("ag_dec52");
        fixes.get("fixes").put("dec52", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec11")) {
        double val = fixes.get("fixes").get("ag_dec11");
        fixes.get("fixes").put("dec11", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec12")) {
        double val = fixes.get("fixes").get("ag_dec12");
        fixes.get("fixes").put("dec12", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec22")) {
        double val = fixes.get("fixes").get("ag_dec22");
        fixes.get("fixes").put("dec22", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec21")) {
        double val = fixes.get("fixes").get("ag_dec21");
        fixes.get("fixes").put("dec21", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec31")) {
        double val = fixes.get("fixes").get("ag_dec31");
        fixes.get("fixes").put("dec31", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec51")) {
        double val = fixes.get("fixes").get("ag_dec51");
        fixes.get("fixes").put("dec51", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec4")) {
        double val = fixes.get("fixes").get("ag_dec4");
        fixes.get("fixes").put("dec4", val);
      }
      if (fixes.get("fixes").containsKey("ag_dec32")) {
        double val = fixes.get("fixes").get("ag_dec32");
        fixes.get("fixes").put("dec32", val);
      }
      if (fixes.get("fixes").containsKey("ag_teff1")) {
        double val = fixes.get("fixes").get("ag_teff1");
        fixes.get("fixes").put("teff1", val);
      }
      if (fixes.get("fixes").containsKey("ag_teff2")) {
        double val = fixes.get("fixes").get("ag_teff2");
        fixes.get("fixes").put("teff2", val);
      }
      if (fixes.get("fixes").containsKey("ag_teff3")) {
        double val = fixes.get("fixes").get("ag_teff3");
        fixes.get("fixes").put("teff3", val);
      }
      if (fixes.get("fixes").containsKey("ag_teff4")) {
        double val = fixes.get("fixes").get("ag_teff4");
        fixes.get("fixes").put("teff4", val);
      }
      if (fixes.get("fixes").containsKey("ag_ps1s31")) {
        double val = fixes.get("fixes").get("ag_ps1s31");
        fixes.get("fixes").put("ps1s31", val);
      }
      if (fixes.get("fixes").containsKey("ag_ps1s32")) {
        double val = fixes.get("fixes").get("ag_ps1s32");
        fixes.get("fixes").put("ps1s32", val);
      }
      if (fixes.get("fixes").containsKey("ag_ps2s31")) {
        double val = fixes.get("fixes").get("ag_ps2s31");
        fixes.get("fixes").put("ps2s31", val);
      }
      if (fixes.get("fixes").containsKey("ag_ps2s32")) {
        double val = fixes.get("fixes").get("ag_ps2s32");
        fixes.get("fixes").put("ps2s32", val);
      }
      if (fixes.get("fixes").containsKey("ag_fleach3")) {
        double val = fixes.get("fixes").get("ag_fleach3");
        fixes.get("fixes").put("fleach3", val);
      }
      if (fixes.get("fixes").containsKey("ag_pmco22")) {
        double val = fixes.get("fixes").get("ag_pmco22");
        fixes.get("fixes").put("pmco22", val);
      }
      if (fixes.get("fixes").containsKey("ag_p2co22")) {
        double val = fixes.get("fixes").get("ag_p2co22");
        fixes.get("fixes").put("p2co22", val);
      }
      if (fixes.get("fixes").containsKey("ag_ps1co22")) {
        double val = fixes.get("fixes").get("ag_ps1co22");
        fixes.get("fixes").put("ps1co22", val);
      }
      if (fixes.get("fixes").containsKey("ag_p1co2a2")) {
        double val = fixes.get("fixes").get("ag_p1co2a2");
        fixes.get("fixes").put("p1co2a2", val);
      }
      if (fixes.get("fixes").containsKey("ag_p1co2b2")) {
        double val = fixes.get("fixes").get("ag_p1co2b2");
        fixes.get("fixes").put("p1co2b2", val);
      }
      if (fixes.get("fixes").containsKey("ag_p3co2")) {
        double val = fixes.get("fixes").get("ag_p3co2");
        fixes.get("fixes").put("p3co2", val);
      }
    }

    // RENAME AG SCH
    source = workspace().getFile("ag_Truterra.sch").toPath();
    Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
    createConfig("fix.100"); // values from payload _ag
    createConfig("sitepar.in");
    schedule_IC = "Truterra_pa.bin";
    runDaycent();
    runDaycentList();
//    if (lrr.toLowerCase().equals("c") ||
//        lrr.toLowerCase().equals("r")) {
//      source = workspace().getFile("Truterra.sch").toPath();
//      Files.move(source, source.resolveSibling("Truterra_ag.sch"), StandardCopyOption.REPLACE_EXISTING);
//
//      sourceBin = workspace().getFile("Truterra.bin").toPath();
//      Files.move(sourceBin, sourceBin.resolveSibling("Truterra_ag.bin"), StandardCopyOption.REPLACE_EXISTING);
//
//      sourceFix = workspace().getFile("fix.100").toPath();
//      Files.move(sourceFix, sourceFix.resolveSibling("fix_ag.100"), StandardCopyOption.REPLACE_EXISTING);
//
//      sourceFix = workspace().getFile("fix_3.100").toPath();
//      Files.move(sourceFix, sourceFix.resolveSibling("fix.100"), StandardCopyOption.REPLACE_EXISTING);
//
//      sourceSite = workspace().getFile("sitepar.in").toPath();
//      Files.move(sourceSite, sourceSite.resolveSibling("sitepar_ag.in"), StandardCopyOption.REPLACE_EXISTING);
//
//      sourceSite = workspace().getFile("sitepar_3.in").toPath();
//      Files.move(sourceSite, sourceSite.resolveSibling("sitepar.in"), StandardCopyOption.REPLACE_EXISTING);
//
//      sourceLis = workspace().getFile("Truterra.lis").toPath();
//      Files.move(sourceLis, sourceLis.resolveSibling("Truterra_ag.lis"), StandardCopyOption.REPLACE_EXISTING);
////
////    outfiles = workspace().getFile("outfiles.in").toPath();
////    Files.move(outfiles, outfiles.resolveSibling("outfiles_pa.in"), StandardCopyOption.REPLACE_EXISTING);
////    outfiles = workspace().getFile("outfiles_ag.in").toPath();
////    Files.move(outfiles, outfiles.resolveSibling("outfiles.in"), StandardCopyOption.REPLACE_EXISTING);
//
//      Files.delete(workspace().getFile("year_summary.out").toPath());
//
//      if (!sitepars.isEmpty()) {
//        if (sitepars.get("site").containsKey("tt_dmpflux")) {
//          double val = sitepars.get("site").get("tt_dmpflux");
//          sitepars.get("site").put("dmpflux", val);
//        }
//        if (sitepars.get("site").containsKey("tt_MaxNitAmt")) {
//          double val = sitepars.get("site").get("tt_MaxNitAmt");
//          sitepars.get("site").put("MaxNitAmt", val);
//        }
//        if (sitepars.get("site").containsKey("tt_wfpsdnitadj")) {
//          double val = sitepars.get("site").get("tt_wfpsdnitadj");
//          sitepars.get("site").put("wfpsdnitadj", val);
//        }
//        if (sitepars.get("site").containsKey("tt_n2n2oadj")) {
//          double val = sitepars.get("site").get("tt_n2n2oadj");
//          sitepars.get("site").put("n2n2oadj", val);
//        }
//      }
//
//      if (!fixes.isEmpty()) {
//        if (fixes.get("fixes").containsKey("tt_dec52")) {
//          double val = fixes.get("fixes").get("tt_dec52");
//          fixes.get("fixes").put("dec52", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec11")) {
//          double val = fixes.get("fixes").get("tt_dec11");
//          fixes.get("fixes").put("dec11", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec12")) {
//          double val = fixes.get("fixes").get("tt_dec12");
//          fixes.get("fixes").put("dec12", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec22")) {
//          double val = fixes.get("fixes").get("tt_dec22");
//          fixes.get("fixes").put("dec22", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec21")) {
//          double val = fixes.get("fixes").get("tt_dec21");
//          fixes.get("fixes").put("dec21", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec31")) {
//          double val = fixes.get("fixes").get("tt_dec31");
//          fixes.get("fixes").put("dec31", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec51")) {
//          double val = fixes.get("fixes").get("tt_dec51");
//          fixes.get("fixes").put("dec51", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec4")) {
//          double val = fixes.get("fixes").get("tt_dec4");
//          fixes.get("fixes").put("dec4", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_dec32")) {
//          double val = fixes.get("fixes").get("tt_dec32");
//          fixes.get("fixes").put("dec32", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_teff1")) {
//          double val = fixes.get("fixes").get("tt_teff1");
//          fixes.get("fixes").put("teff1", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_teff2")) {
//          double val = fixes.get("fixes").get("tt_teff2");
//          fixes.get("fixes").put("teff2", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_teff3")) {
//          double val = fixes.get("fixes").get("tt_teff3");
//          fixes.get("fixes").put("teff3", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_teff4")) {
//          double val = fixes.get("fixes").get("tt_teff4");
//          fixes.get("fixes").put("teff4", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_ps1s31")) {
//          double val = fixes.get("fixes").get("tt_ps1s31");
//          fixes.get("fixes").put("ps1s31", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_ps1s32")) {
//          double val = fixes.get("fixes").get("tt_ps1s32");
//          fixes.get("fixes").put("ps1s32", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_ps2s31")) {
//          double val = fixes.get("fixes").get("tt_ps2s31");
//          fixes.get("fixes").put("ps2s31", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_ps2s32")) {
//          double val = fixes.get("fixes").get("tt_ps2s32");
//          fixes.get("fixes").put("ps2s32", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_fleach3")) {
//          double val = fixes.get("fixes").get("tt_fleach3");
//          fixes.get("fixes").put("fleach3", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_pmco22")) {
//          double val = fixes.get("fixes").get("tt_pmco22");
//          fixes.get("fixes").put("pmco22", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_p2co22")) {
//          double val = fixes.get("fixes").get("tt_p2co22");
//          fixes.get("fixes").put("p2co22", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_ps1co22")) {
//          double val = fixes.get("fixes").get("tt_ps1co22");
//          fixes.get("fixes").put("ps1co22", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_p1co2a2")) {
//          double val = fixes.get("fixes").get("tt_p1co2a2");
//          fixes.get("fixes").put("p1co2a2", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_p1co2b2")) {
//          double val = fixes.get("fixes").get("tt_p1co2b2");
//          fixes.get("fixes").put("p1co2b2", val);
//        }
//        if (fixes.get("fixes").containsKey("tt_p3co2")) {
//          double val = fixes.get("fixes").get("tt_p3co2");
//          fixes.get("fixes").put("p3co2", val);
//        }
//      }
//
//      // RENAME AG SCH
//      source = workspace().getFile("tt_Truterra.sch").toPath();
//      Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
//      createConfig("fix.100"); // values from payload _ag
//      createConfig("sitepar.in");
//      schedule_IC = "Truterra_ag.bin";
//      runDaycent();
//      runDaycentList();
//
//      source = workspace().getFile("Truterra.sch").toPath();
//      Files.move(source, source.resolveSibling("Truterra_tt.sch"), StandardCopyOption.REPLACE_EXISTING);
//
//      File f = workspace().getFile("Truterra.sch");
//      f.createNewFile();
//      java.nio.file.Path p = workspace().getFile("Truterra.sch").toPath();
//      java.nio.file.Path ag = workspace().getFile("Truterra_ag.sch").toPath();
//      java.nio.file.Path tt = workspace().getFile("Truterra_tt.sch").toPath();
//      List<String> ff = Files.readAllLines(tt);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      ff.remove(0);
//      try (FileWriter fileWriter = new FileWriter(workspace().getFile("tt.sch"))) {
//        for (String s : ff) {
//          fileWriter.write(s + System.lineSeparator());
//        }
//      }
//      java.nio.file.Path agg = workspace().getFile("tt.sch").toPath();
//      Files.write(p, Files.readAllBytes(ag), StandardOpenOption.APPEND);
//      Files.write(p, Files.readAllBytes(agg), StandardOpenOption.APPEND);
//
//    }

//    checkHarvest();
  }


  public static Map<Integer, Double> getMapFor(File wsFile, String column) throws IOException {
    String[] time = getStringColumnData(wsFile, "time");
    String[] col = getStringColumnData(wsFile, column);
    return createlookup(time, col);
  }


  static Map<Integer, Double> createlookup(String[] key, String[] val) {
    Map<Integer, Double> m = new LinkedHashMap<>();
    for (int i = 0; i < val.length; i++) {
      if (val[i].equals("-nan")) {
        val[i] = "0.0"; // when CH4 is -nan
      }
      m.put(Double.valueOf(key[i]).intValue() - MODEL_OFFSET, Double.parseDouble(val[i]));
    }
    return m;
  }


  /* slice by time
   */
  static Map<Integer, Double> slize(int start, int end, Map<Integer, Double> orig) {
    return orig.entrySet()
        .stream()
        .filter(map -> map.getKey() >= start && map.getKey() <= end)
        .collect(Collectors.toMap(map -> map.getKey(), map -> map.getValue()));
  }


  /* convert the value
   */
  static Map<Integer, Double> convert(double factor, Map<Integer, Double> orig) {
    return orig.entrySet()
        .stream()
        .collect(Collectors.toMap(map -> map.getKey(), map -> map.getValue() * factor));
  }


  void parseYearSummary() throws IOException {
    File ys_out = workspace().getFile("year_summary.out");

    Map<Integer, Double> N2Oflux = getMapFor(ys_out, "N2Oflux");
    Map<Integer, Double> pcChangeN2Oflux = slize(yearPractChange, endPractChange, N2Oflux);
    // N2O conversion: 1 g/m2 N2O-N = 1.895087 Mg/ac CO2e
    Map<Integer, Double> conv_pcChangeN2Oflux = convert(1.895087, pcChangeN2Oflux);
    double sumN2Oflux = conv_pcChangeN2Oflux.values().stream().mapToDouble(Double::doubleValue).sum();

    Map<Integer, Double> CH4flux = getMapFor(ys_out, "CH4");
    Map<Integer, Double> pcChangeCH4flux = slize(yearPractChange, endPractChange, CH4flux);
    // CH4 conversion: 1 g/m2 CH4-C = 0.453248 Mg/ac CO2e
    Map<Integer, Double> conv_pcChangeCH4flux = convert(0.453248, pcChangeCH4flux);
    double sumCH4flux = conv_pcChangeCH4flux.values().stream().mapToDouble(Double::doubleValue).sum();

    // put it into the result payload
    results().put("N2O", sumN2Oflux);
    results().put("CH4", sumCH4flux);
  }


  @Override
  protected void postProcess() throws Exception {
//    List<String> l = getRequestedObjfunc();
    File ys_out = workspace().getFile("year_summary.out");
    Map<Integer, Double> N2Oflux = getMapFor(ys_out, "N2Oflux");
    Map<Integer, Double> CH4flux = getMapFor(ys_out, "CH4");

    if (cosu().isRequested()) {
      calibrate();
    } else {
      parseLis(N2Oflux, CH4flux);
//      parseYearSummary();
    }
  }


  //////////// Calibration
  void calibrate() throws Exception {
    for (String name : cosu().getNames()) {
      String[] data = cosu().getData(name);
      if (data.length != 2) {
        throw new ServiceException("2 elements expected");
      }

      // this returns null is the OF is unknown.
      ObjFunc of = cosu().getObjFunc(name);

      // data[0] : calibration target (e.g. yield)
      // data[1] : calibration details (startyear/endyear)
      double v = calcObjFunc(of, LOG,
          workspace().getFile("Truterra.sch"),
          workspace().getFile("Truterra.lis"),
          workspace().getFile("vswc.out"),
          workspace().getFile("year_summary.out"),
          data[0], data[1], nodata);

      cosu().setValue(name, v);
    }
  }
//
//  //////////// Calibration
//  void calibrate(List<String> l) throws Exception {
//    for (String ofName : l) {
//
//      // 2 values
//      String[] data = parameter().getStringArray(ofName);
//      if (data.length != 2) {
//        throw new ServiceException("Invalid of content, 2 elements expected");
//      }
//
//      // data[0] : calibration target (e.g. yield)
//      // data[1] : calibration details (startyear/endyear)
//      double v = calcObjFunc(LOG,
//          workspace().getFile("Truterra.sch"),
//          workspace().getFile("Truterra.lis"),
//          data[0], data[1]);
//      results().put(ofName, v);
//    }
//  }


  static double[] getSimulatedOUT(SessionLogger LOG, File outfile, int column,
      LocalDate starty, LocalDate endy, Double nodata) throws IOException, ServiceException {

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, String> c0_2 = ServiceUtils.getMapFor(outfile, column); // layer 1 0-2 cm
    Map<String, String> c2_5 = ServiceUtils.getMapFor(outfile, column + 1); // layer 2 2-5 cm
    Map<String, String> c5_10 = ServiceUtils.getMapFor(outfile, column + 2); // layer 3 5-10 cm
    Map<String, String> c10_20 = ServiceUtils.getMapFor(outfile, column + 3); // layer 4 10-20 cm
    Map<String, String> c20_30 = ServiceUtils.getMapFor(outfile, column + 4); // layer 5 20-30 cm
    Map<String, String> c30_45 = ServiceUtils.getMapFor(outfile, column + 5); // layer 6 30-45 cm
    Map<String, String> c45_60 = ServiceUtils.getMapFor(outfile, column + 6); // layer 7 45-60 cm
    Map<String, String> c60_75 = ServiceUtils.getMapFor(outfile, column + 7); // layer 8 60-75 cm
    Map<String, String> c75_90 = ServiceUtils.getMapFor(outfile, column + 8); // layer 9 75-90 cm
    Map<String, String> c90_105 = ServiceUtils.getMapFor(outfile, column + 9); // layer 10 90-105 cm

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("vswc 0-2 : " + c0_2);
      LOG.info("vswc 2-5 : " + c2_5);
    }

    // lisDates of harvest from schedule file
//    List<String> harvestDates = getDates(LOG, starty, endy, schFile, croptype, termination);
    // extract the values based on keys (harvest dates) into columnVals
    List<String> columnVals = new ArrayList<>();
    for (LocalDate date = starty; date.isBefore(endy); date = date.plusDays(7)) {
      String tmp = date.format(DateTimeFormatter.ISO_ORDINAL_DATE);
      String val0_2 = c0_2.get(tmp);
      String val2_5 = c2_5.get(tmp);
      String val5_10 = c5_10.get(tmp);
      String val10_20 = c10_20.get(tmp);
      String val20_30 = c20_30.get(tmp);
      String val30_45 = c30_45.get(tmp);
      String val45_60 = c45_60.get(tmp);
      String val60_75 = c60_75.get(tmp);
      String val75_90 = c75_90.get(tmp);
      String val90_105 = c90_105.get(tmp);
      if (val0_2 == null || val2_5 == null || val5_10 == null || val10_20 == null
          || val20_30 == null || val30_45 == null || val45_60 == null || val60_75 == null
          || val75_90 == null || val90_105 == null) {
        throw new ServiceException("No value for date: " + date.format(DateTimeFormatter.ISO_DATE));
      }
      Double weightedAvg = (Double.parseDouble(val0_2) * 2
          + Double.parseDouble(val2_5) * 3
          + Double.parseDouble(val5_10) * 5
          + Double.parseDouble(val10_20) * 10
          + Double.parseDouble(val20_30) * 10
          + Double.parseDouble(val30_45) * 15
          + Double.parseDouble(val45_60) * 15
          + Double.parseDouble(val60_75) * 15
          + Double.parseDouble(val75_90) * 15
          + Double.parseDouble(val90_105) * 10) / 100; // 90-105 just 10 because we have data for 100cm tot
      if (weightedAvg > 1.0) {
        String msg = "Soil moisture bigger than 1";
        throw new IllegalArgumentException(msg);
      }
      String val = String.valueOf(weightedAvg);
      columnVals.add(val);
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("columnVals " + columnVals);
    }

//    // create the average of those values
//    double avg = columnVals.stream().mapToDouble(Double::parseDouble).average().getAsDouble();
//
//    if (LOG.isLoggable(Level.INFO)) {
//      LOG.info("avg: " + avg);
//    }
    return columnVals.stream().mapToDouble(Double::parseDouble).toArray();
  }


  static double[] getSimulatedYearSummaryOUT(SessionLogger LOG, File outfile, String column,
      int starty, int endy, Double nodata) throws IOException, ServiceException {

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, String> val = ServiceUtils.getMapFor(outfile, column);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("N2O emissions : " + val);
    }

    // lisDates of harvest from schedule file
//    List<String> harvestDates = getDates(LOG, starty, endy, schFile, croptype, termination);
    // extract the values based on keys (harvest dates) into columnVals
    List<String> columnVals = new ArrayList<>();
    for (int i = starty + MODEL_OFFSET; i <= endy + MODEL_OFFSET; i++) {
      String v = val.get(String.valueOf(i) + ".00");
      if (v == null) {
        throw new ServiceException("No value for year: " + i);
      }
      columnVals.add(String.valueOf(v));
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("columnVals " + columnVals);
    }

//    // create the average of those values
//    double avg = columnVals.stream().mapToDouble(Double::parseDouble).average().getAsDouble();
//
//    if (LOG.isLoggable(Level.INFO)) {
//      LOG.info("avg: " + avg);
//    }
    return columnVals.stream().mapToDouble(Double::parseDouble).toArray();
  }


  static double[] getSimulatedValues(SessionLogger LOG, File schFile, File lisFile, String column,
      int starty, int endy, String croptype, String termination, Double nodata) throws Exception {

    int starty_list = starty + MODEL_OFFSET; // Truterra .sch
    int endy_list = endy + MODEL_OFFSET; // Truterra .sch
    List<Integer> cropYear = IntStream.rangeClosed(starty_list, endy_list).boxed().collect(Collectors.toList());

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, String> c = ServiceUtils.getMapFor(lisFile, column);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("caggr : " + c);
    }

    // lisDates of harvest from schedule file
    List<String> harvestDates = getDates(LOG, starty, endy, schFile, croptype, termination);

    // extract the values based on keys (harvest dates) into columnVals
    List<String> columnVals = new ArrayList<>();
    for (Integer y : cropYear) {
//    for (String harvestDate : harvestDates) {
      String hy = harvestDatesHasYear(harvestDates, y);
      if (!hy.isEmpty()) {
        String caggr = c.get(hy);
        if (caggr == null) {
          throw new ServiceException("No caggr value for harvestDate: " + y);
        }
        columnVals.add(c.get(hy));
      } else {
        columnVals.add(nodata.toString());
      }

    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("columnVals " + columnVals);
    }

//    // create the average of those values
//    double avg = columnVals.stream().mapToDouble(Double::parseDouble).average().getAsDouble();
//
//    if (LOG.isLoggable(Level.INFO)) {
//      LOG.info("avg: " + avg);
//    }
    return columnVals.stream().mapToDouble(Double::parseDouble).toArray();
  }


  private static String harvestDatesHasYear(List<String> harvestDates, Integer y) {
    for (String date : harvestDates) {
      String val = date.split("\\.")[0];
      String mm = date.split("\\.")[1];
      if (mm.contains("00")) {
        if ((Integer.parseInt(val) - 1) == y) {
          return date;
        }
      } else if (Integer.parseInt(val) == y) {
        return date;
      }
    }
    return "";
  }


  public static boolean isLeapYear(int year) {
    Calendar cal = Calendar.getInstance();
    cal.set(Calendar.YEAR, year);
    return cal.getActualMaximum(Calendar.DAY_OF_YEAR) > 365;
  }


  static double getVal(SessionLogger LOG, File schFile, File lisFile, String column,
      int year) throws Exception {

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, String> c = ServiceUtils.getMapFor(lisFile, column);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("caggr : " + c);
    }
    String tdate;
    if (isLeapYear(year)) {
      tdate = String.valueOf(year) + ".366";
    } else {
      tdate = String.valueOf(year) + ".365";
    }
    String date = DateConversion.date2daycentLis(tdate);

    // extract the values based on keys (harvest dates) into columnVals
    String somsc = c.get(date);
    if (somsc == null) {
      throw new ServiceException("No somsc value for year: " + year);
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("somsc " + somsc);
    }

    return Double.parseDouble(somsc);
  }


  static List<String> getDates(SessionLogger LOG, int starty, int endy,
      File schFile, String croptype, String termination) throws IOException {

    List<String> schDates = ServiceUtils.getCropHarvest(schFile, croptype, termination);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("schDates: " + schDates);
    }

    List<String> filtered_schDates = new ArrayList<>();

    // filter by date range (starty - endy)
    for (String schDate : schDates) {
      String[] sd = schDate.split("\\.");
      int y = Integer.parseInt(sd[0]);
      if (y >= starty && y <= endy) {
        if (termination.equals("LAST")) {
          int day = Integer.parseInt(sd[1]) - 30; // one month earlier
          schDate = y + "." + day;
        }
        filtered_schDates.add(schDate);
      }
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("Filtered dates: " + filtered_schDates);
    }

    // convert all schDates to lisDates
    List<String> lisDates = new ArrayList<>();
    for (String schDate : filtered_schDates) {
      lisDates.add(DateConversion.date2daycentLis(schDate));
    }
    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("lisDates: " + lisDates);
    }
    return lisDates;
  }

//  public static void main(String[] args) {
//    Integer a = new Double("2077.00").intValue();
//    System.out.println(a);
//  }

  public static void main(String[] args) throws Exception {

    ObjFunc of = ObjFunc.of(ObjFunc.NS);
    String path = "/tmp/csip/work/21/14/0a6c933f-b232-11ed-86f8-bd73f95f4079/";
//    calcObjFunc(new SessionLogger(),
//        new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.sch"),
//        new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.lis"),
//        "yield", "60, 2011, 2020, W4");
//
//    calcObjFunc(new SessionLogger(),
//        new File("/home/sidereus/documents/daycentsim/wheat/check/Truterra.sch"),
//        new File("/home/sidereus/documents/daycentsim/wheat/check/Truterra.lis"),
//        "yield", "180, 2011, 2020, C13");
//    calcObjFunc(new SessionLogger(),
//        new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.sch"),
//        new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.lis"),
//        "yield", "111.67, 2011, 2020, SORG3");
//    calcObjFunc(of, new SessionLogger(),
//        new File("/home/sidereus/Downloads/arr/tmp/Truterra.sch"),
//        new File("/home/sidereus/Downloads/arr/tmp/Truterra.lis"),
//        new File("/home/sidereus/Downloads/arr/tmp/vswc.out"),
//        new File("/home/sidereus/Downloads/arr/tmp/year_summary.out"),
//        "n2o", "[0.0301, 0.0301, 0.0282], 2013, 2015, n2o", -9999.0);
//
//    calcObjFunc(new SessionLogger(),
//        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.sch"),
//        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.lis"),
//        "biomass", "120, 2011, 2020, CC2");
    calcObjFunc(of, new SessionLogger(),
        new File(path + "Truterra.sch"),
        new File(path + "Truterra.lis"),
        new File(path + "vswc.out"),
        new File(path + "year_summary.out"),
        "n2o", "[2.073252116447484, 2.073252116447484, 2.073252116447484], 2018, 2020, n2o", -9999.0);
//    calcObjFunc(of, new SessionLogger(),
//        new File("/home/sidereus/Downloads/archive/Truterra.sch"),
//        new File("/home/sidereus/Downloads/archive/Truterra.lis"),
//        new File("/home/sidereus/Downloads/archive/vswc.out"),
//        "vswc", "[0.3354247957468033, 0.3432268388569355, 0.34121252596378326, 0.3451493978500366, 0.33685942739248276], 2015-04-01, 2015-04-30, vswc", -9999.0);
  }


  static double[] parseObsDouble(String obsValStr) {
    String[] vals = obsValStr.split("\\s*,\\s*");
    return Arrays.stream(vals).mapToDouble(Double::parseDouble).toArray();
  }


  static double calcObjFunc(ObjFunc objfunc, SessionLogger LOG, File schFile, File lisFile, File vswc, File n2o,
      String target, String targetInfo, Double nodata) throws Exception {

    // objfunc is ot used yet
    if (target.equals("yield")) {
      String[] ti = targetInfo.split("\\s*]\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for yield.");
      }

      String obsValStr = ti[0].replaceAll("\\[", "");    // obs data value for period
      String[] vals = ti[1].split("\\s*,\\s*");

      if (vals.length != 4) {
        throw new ServiceException("Invalid targetinfo for yeild crop and years.");
      }

      String start = vals[1];  // start 
      String end = vals[2];    // end
      String ct = vals[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double[] obsVal = parseObsDouble(obsValStr);

      // simulated yield 
      double[] simVal = getSimulatedValues(LOG, schFile, lisFile,
          "cgracc", Integer.parseInt(start), Integer.parseInt(end), ct, "HARV", nodata);

      if (obsVal.length != simVal.length) {
        String msg = "Variable " + ct + " length of observed " + obsVal.length + " differs from length of simulated " + simVal.length;
        throw new IllegalArgumentException(msg);
      }

      Double conv = yieldConv.get(ct);
      if (conv == null) {
        throw new ServiceException("No yield conversion factor for: " + ct);
      }

      simVal = DoubleStream.of(simVal).map(v -> (v != nodata) ? v * conv : v).toArray();

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim yield: " + Arrays.toString(simVal));
      }

      List<Double> vSim = Arrays.stream(simVal).boxed().collect(Collectors.toList());
      List<Double> vObs = Arrays.stream(obsVal).boxed().collect(Collectors.toList());

      List<Integer> index = new ArrayList<>();
      for (int i = 0; i < vSim.size(); i++) {
        if (vSim.get(i).equals(nodata)) {
          index.add(i);
        }
      }

      if (index.size() >= vSim.size()) {
        String msg = vSim.size() + " simulated values vs " + index.size() + " values to remove variable: " + ct;
        throw new IllegalArgumentException(msg);
      }

      if (index.size() >= vObs.size()) {
        String msg = vObs.size() + " observed values vs " + index.size() + " values to remove variable: " + ct;
        throw new IllegalArgumentException(msg);
      }

      Collections.reverse(index);
      for (int v : index) {
        vSim.remove(v);
        vObs.remove(v);
      }

      if (vSim.isEmpty()) {
        String msg = "Simulated " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vObs.isEmpty()) {
        String msg = "Observed " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vSim.size() != vObs.size()) {
        String msg = "Simulated and observed have different size for variable " + ct;
        throw new IllegalArgumentException(msg);
      }

      simVal = vSim.stream().mapToDouble(Double::doubleValue).toArray();
      obsVal = vObs.stream().mapToDouble(Double::doubleValue).toArray();

      return objfunc.eval(obsVal, simVal, nodata); // double check the NA
    } else if (target.equals("biomass")) {
      String[] ti = targetInfo.split("\\s*]\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for biomass.");
      }

      String obsValStr = ti[0].replaceAll("\\[", "");    // obs data value for period
      String[] vals = ti[1].split("\\s*,\\s*");

      if (vals.length != 4) {
        throw new ServiceException("Invalid targetinfo for biomass crop and years.");
      }

      String start = vals[1];  // start 
      String end = vals[2];    // end
      String ct = vals[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double[] obsVal = parseObsDouble(obsValStr);

      // simulated yield 
      double[] simVal = getSimulatedValues(LOG, schFile, lisFile,
          "crmvst", Integer.parseInt(start), Integer.parseInt(end), ct, "HARV", nodata);

      if (obsVal.length != simVal.length) {
        String msg = "Variable " + ct + " length of observed " + obsVal.length + " differs from length of simulated " + simVal.length;
        throw new IllegalArgumentException(msg);
      }

      Double conv = yieldConv.get(ct);
      if (conv == null) {
        throw new ServiceException("No biomass conversion factor for: " + ct);
      }
      simVal = DoubleStream.of(simVal).map(v -> (v != nodata) ? v * conv : v).toArray();

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim biomass: " + Arrays.toString(simVal));
      }

      List<Double> vSim = Arrays.stream(simVal).boxed().collect(Collectors.toList());
      List<Double> vObs = Arrays.stream(obsVal).boxed().collect(Collectors.toList());

      List<Integer> index = new ArrayList<>();
      for (int i = 0; i < vSim.size(); i++) {
        if (vSim.get(i).equals(nodata)) {
          index.add(i);
        }
      }

      if (index.size() >= vSim.size()) {
        String msg = vSim.size() + " simulated values vs " + index.size() + " values to remove variable: " + ct;
        throw new IllegalArgumentException(msg);
      }

      if (index.size() >= vObs.size()) {
        String msg = vObs.size() + " observed values vs " + index.size() + " values to remove variable: " + ct;
        throw new IllegalArgumentException(msg);
      }

      Collections.reverse(index);
      for (int v : index) {
        vSim.remove(v);
        vObs.remove(v);
      }

      if (vSim.isEmpty()) {
        String msg = "Simulated " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vObs.isEmpty()) {
        String msg = "Observed " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vSim.size() != vObs.size()) {
        String msg = "Simulated and observed have different size for variable " + ct;
        throw new IllegalArgumentException(msg);
      }

      simVal = vSim.stream().mapToDouble(Double::doubleValue).toArray();
      obsVal = vObs.stream().mapToDouble(Double::doubleValue).toArray();

      return objfunc.eval(obsVal, simVal, nodata); // double check the NA
    } else if (target.equals("root")) {
      String[] ti = targetInfo.split("\\s*]\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for root.");
      }

      String obsValStr = ti[0].replaceAll("\\[", "");    // obs data value for period
      String[] vals = ti[1].split("\\s*,\\s*");

      if (vals.length != 4) {
        throw new ServiceException("Invalid targetinfo for root crop and years.");
      }

      String start = vals[1];  // start 
      String end = vals[2];    // end
      String ct = vals[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double[] obsVal = parseObsDouble(obsValStr);

      // simulated yield 
      double[] simVal = getSimulatedValues(LOG, schFile, lisFile,
          "bgcmacc", Integer.parseInt(start), Integer.parseInt(end), ct, "HARV", nodata);

      if (obsVal.length != simVal.length) {
        String msg = "Variable " + ct + " length of observed " + obsVal.length + " differs from length of simulated " + simVal.length;
        throw new IllegalArgumentException(msg);
      }

      Double conv = yieldConv.get(ct);
      if (conv == null) {
        throw new ServiceException("No root conversion factor for: " + ct);
      }
      simVal = DoubleStream.of(simVal).map(v -> (v != nodata) ? v * conv : v).toArray();

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim root: " + Arrays.toString(simVal));
      }

      List<Double> vSim = Arrays.stream(simVal).boxed().collect(Collectors.toList());
      List<Double> vObs = Arrays.stream(obsVal).boxed().collect(Collectors.toList());

      List<Integer> index = new ArrayList<>();
      for (int i = 0; i < vSim.size(); i++) {
        if (vSim.get(i).equals(nodata)) {
          index.add(i);
        }
      }

      if (index.size() >= vSim.size()) {
        String msg = vSim.size() + " simulated values vs " + index.size() + " values to remove variable: " + ct;
        throw new IllegalArgumentException(msg);
      }

      if (index.size() >= vObs.size()) {
        String msg = vObs.size() + " observed values vs " + index.size() + " values to remove variable: " + ct;
        throw new IllegalArgumentException(msg);
      }

      Collections.reverse(index);
      for (int v : index) {
        vSim.remove(v);
        vObs.remove(v);
      }

      if (vSim.isEmpty()) {
        String msg = "Simulated " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vObs.isEmpty()) {
        String msg = "Observed " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vSim.size() != vObs.size()) {
        String msg = "Simulated and observed have different size for variable " + ct;
        throw new IllegalArgumentException(msg);
      }

      simVal = vSim.stream().mapToDouble(Double::doubleValue).toArray();
      obsVal = vObs.stream().mapToDouble(Double::doubleValue).toArray();

      return objfunc.eval(obsVal, simVal, nodata); // double check the NA
    } else if (target.equals("soc")) {

      String[] ti = targetInfo.split("\\s*,\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for yield.");
      }

      String obsValStr = ti[0];    // obs data value for period
      String year = ti[1];  // year

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double simVal = getVal(LOG, schFile, lisFile,
          "somsc", Integer.parseInt(year));

      double obsVal = Double.parseDouble(obsValStr);

      double[] obsArr = new double[]{obsVal};
      double[] simArr = new double[]{simVal};

      double v = Math.abs(obsVal - simVal);

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" efficiency: " + v);
      }

      return v;
    } else if (target.equals("vswc")) {
      String[] ti = targetInfo.split("\\s*]\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for volumetric soil water content.");
      }

      String obsValStr = ti[0].replaceAll("\\[", "");    // obs data value for period
      String[] vals = ti[1].split("\\s*,\\s*");

      if (vals.length != 4) {
        throw new ServiceException("Invalid targetinfo for volumetric soil water content and years.");
      }

      LocalDate start = LocalDate.parse(vals[1]);  // start format YYYY-MM-DD
      LocalDate end = LocalDate.parse(vals[2]);    // end format YYYY-MM-DD
      String ct = vals[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double[] obsVal = parseObsDouble(obsValStr);

      int column = 2;
      // simulated yield 
      double[] simVal = getSimulatedOUT(LOG, vswc, column, start, end, nodata);

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim yield: " + Arrays.toString(simVal));
      }

      List<Double> vSim = Arrays.stream(simVal).boxed().collect(Collectors.toList());
      List<Double> vObs = Arrays.stream(obsVal).boxed().collect(Collectors.toList());

      List<Integer> index = new ArrayList<>();
      for (int i = 0; i < vSim.size(); i++) {
        if (vSim.get(i).equals(nodata)) {
          index.add(i);
        }
      }

      Collections.reverse(index);
      for (int v : index) {
        vSim.remove(v);
        vObs.remove(v);
      }

      if (vSim.isEmpty()) {
        String msg = "Simulated " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vObs.isEmpty()) {
        String msg = "Observed " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vSim.size() != vObs.size()) {
        String msg = "Simulated and observed have different size for variable " + ct;
        throw new IllegalArgumentException(msg);
      }

      simVal = vSim.stream().mapToDouble(Double::doubleValue).toArray();
      obsVal = vObs.stream().mapToDouble(Double::doubleValue).toArray();

      return objfunc.eval(obsVal, simVal, -9999.0); // double check the NA
    } else if (target.equals("n2o")) {
      String[] ti = targetInfo.split("\\s*]\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for N2O emissions.");
      }

      String obsValStr = ti[0].replaceAll("\\[", "");    // obs data value for period
      String[] vals = ti[1].split("\\s*,\\s*");

      if (vals.length != 4) {
        throw new ServiceException("Invalid targetinfo for N2O emissions and years.");
      }

      int start = Integer.parseInt(vals[1]);
      int end = Integer.parseInt(vals[2]);
      String ct = vals[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double[] obsVal = parseObsDouble(obsValStr);

      String column = "N2Oflux";
      // simulated yield 
      double[] simVal = getSimulatedYearSummaryOUT(LOG, n2o, column, start, end, nodata);

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim yield: " + Arrays.toString(simVal));
      }

      List<Double> vSim = Arrays.stream(simVal).boxed().collect(Collectors.toList());
      List<Double> vObs = Arrays.stream(obsVal).boxed().collect(Collectors.toList());

      List<Integer> index = new ArrayList<>();
      for (int i = 0; i < vSim.size(); i++) {
        if (vSim.get(i).equals(nodata)) {
          index.add(i);
        }
      }

      Collections.reverse(index);
      for (int v : index) {
        vSim.remove(v);
        vObs.remove(v);
      }

      if (vSim.isEmpty()) {
        String msg = "Simulated " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vObs.isEmpty()) {
        String msg = "Observed " + ct + " is empty";
        throw new NullPointerException(msg);
      }

      if (vSim.size() != vObs.size()) {
        String msg = "Simulated and observed have different size for variable " + ct;
        throw new IllegalArgumentException(msg);
      }

      simVal = vSim.stream().mapToDouble(Double::doubleValue).toArray();
      obsVal = vObs.stream().mapToDouble(Double::doubleValue).toArray();

      return objfunc.eval(obsVal, simVal, -9999.0); // double check the NA
    } else {
      throw new Exception("calibration target not supported: " + target);
    }
  }

//  static final Map<String, ObjectiveFunction> OF = new HashMap<>();
//  static {
//    OF.put("kge", new KGE());
//    OF.put("ns", new NS());
//    OF.put("nslog", new NS2LOG());
//    OF.put("nslog1p", new NSLOG1P());
//    OF.put("nslog2", new NSLOG2());
//    OF.put("rmse", new RMSE());
//    OF.put("trmse", new TRMSE());
//    OF.put("pbias", new PBIAS());
//  }
//
//  private List<String> getRequestedObjfunc() {
//    List<String> l = new ArrayList<>();
//    for (String p : parameter().getNames()) {
//      if (p.toLowerCase().startsWith("pbias.")) {
//        l.add(p);
//      }
//    }
//    return l;
//  }
  /////////////////////

  protected Map<String, List<String>> getHarvValues(SessionLogger LOG, String harvFile, String column,
      int starty, int endy) throws Exception {

    int starty_list = starty + MODEL_OFFSET; // Truterra .sch
    int endy_list = endy + MODEL_OFFSET; // Truterra .sch
    List<Integer> cropYear = IntStream.rangeClosed(starty_list, endy_list).boxed().collect(Collectors.toList());

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, List<String>> c = ServiceUtils.getMapList(workspace().getFile(harvFile), column);

    return c;
  }


  protected void parseLis(Map<Integer, Double> N2Oflux, Map<Integer, Double> CH4flux) throws FileNotFoundException, JSONException, Exception {
    List<Integer> practiceChange = new LinkedList<>();
    // adjusted to get
    for (int i = staticYearPractChange; i <= projectionYear; i++) {
      practiceChange.add(i);
    }
    String lisFile = schedule + ".lis";
    String schFile = schedule + ".sch";
    String harvFile = "harvest.csv";

    Map<String, List<String>> harvVals = getHarvValues(LOG, harvFile, "crmvst", staticYearPractChange, projectionYear);
    Map<String, List<String>> cgraccVals = getHarvValues(LOG, harvFile, "cgrain", staticYearPractChange, projectionYear);

    int lineNumber = 1;
    int colTime = 0;
    boolean skiplastline = Boolean.FALSE;
    try (Scanner scanner = new Scanner(workspace().getFile(lisFile));) {
      String[] prevData = null;
      Integer yearNewMgt = practiceChange.remove(0); // first element always
      while (scanner.hasNextLine()) {
        String line = scanner.nextLine();
        double prevCgain = 0;
        if (lineNumber == 1) {
          // read header
          String[] data = line.split("\\s+");
          for (int col = 0; col < data.length; col++) {
            if (data[col].equals("cgracc")) {
              colGracc = col;
            } else if (data[col].equals("agcacc")) {
              colAbove = col;
            } else if (data[col].equals("bgcjacc")) {
              colBelowFineRoots = col;
            } else if (data[col].equals("bgcmacc")) {
              colBelowMatureRoots = col;
            } else if (data[col].equals("somsc")) {
              colSOM = col;
            } else if (data[col].equals("time")) {
              colTime = col;
            }
          }
        } else if (lineNumber == 2) {
          // skip line because empty
        } else {
          // check the line
          String[] data = line.split("\\s+");
          double year = Double.parseDouble(data[colTime]);
          if (prevData == null && year == (yearNewMgt + MODEL_OFFSET)) {
            prevData = data;
          }

          if (!skiplastline) {
            // +2000 because 4000 years of simulation
            // +1 because of extra year - December 2012 = 2013.00 from Daycent .lis file
            if (year == (yearNewMgt + MODEL_OFFSET + 1)) {
              String sYear = String.valueOf(yearNewMgt + MODEL_OFFSET) + ".00";
              List<Double> s = null;
              List<Double> cgr = null;
              if (harvVals.containsKey(sYear)) {
                s = harvVals.get(sYear).stream().map(Double::valueOf).collect(Collectors.toList());
              } else {
                s = new ArrayList<>();
                s.add(0.0);
              }
              if (cgraccVals.containsKey(sYear)) {
                cgr = cgraccVals.get(sYear).stream().map(Double::valueOf).collect(Collectors.toList());
              } else {
                cgr = new ArrayList<>();
                cgr.add(0.0);
              }
              double som = Double.parseDouble(prevData[colSOM]);
              prevCgain = som * 0.00404686 * area;
              Integer actYear = yearNewMgt;
              results().put(actYear.toString(), getValues(data, prevCgain, N2Oflux.get(actYear), CH4flux.get(actYear), s, cgr));
              prevData = data;
              // last year is duplicated and need to skip it
              if (!practiceChange.isEmpty()) {
                yearNewMgt = practiceChange.remove(0);
              } else {
                skiplastline = Boolean.TRUE;
              }
            }
          }
        }
        lineNumber++;
      }
    }
  }


  private JSONArray getValues(String[] data, double prevCgain, double comp_n2o_emission, double comp_ch4_emission, List<Double> biomass, List<Double> cgracc) throws JSONException {
//    double comp_crop_yld_c = Double.parseDouble(data[colGracc]) * 0.00404686 * area;
    List<Double> comp_crop_yld_c = new ArrayList<>();
    for (Double v : cgracc) {
      double tmp = v * 0.00404686 * area;
      comp_crop_yld_c.add(tmp);
    }
    List<Double> comp_crop_biom_c = new ArrayList<>();
    for (Double v : biomass) {
      double tmp = v * 0.00404686 * area;
      comp_crop_biom_c.add(tmp);
    }
    double comp_crop_res_c = (Double.parseDouble(data[colAbove])
        + Double.parseDouble(data[colBelowFineRoots])
        + Double.parseDouble(data[colBelowMatureRoots])) * 0.00404686 * area;
    double comp_soil_c_stock = Double.parseDouble(data[colSOM]) * 0.00404686 * area;
    double comp_soil_c_stock_unit_area = Double.parseDouble(data[colSOM]);
    double comp_soil_c_gain = comp_soil_c_stock - prevCgain;
    comp_n2o_emission = comp_n2o_emission * 1.895087; // g/m2 of N2O-N to Mg/ac CO2e
    comp_n2o_emission = comp_n2o_emission * area; // Mg/ac CO2e to Mg (metric tons) CO2e

    comp_ch4_emission = comp_ch4_emission * 0.453248; // g/m2 of CH4-C to Mg/ac CO2e
    comp_ch4_emission = comp_ch4_emission * area; // Mg/ac CO2e to Mg (metric tons) CO2e

    JSONObject jy = new JSONObject()
        .put("name", "comp_crop_yld_c")
        .put("description", "Crop yield carbon of intersected mapunit soil component leaving field")
        .put("units", "Metric tons")
        .put("value", comp_crop_yld_c);
    JSONObject jb = new JSONObject()
        .put("name", "comp_crop_biom_c")
        .put("description", "Crop biomass carbon of intersected mapunit soil component leaving field")
        .put("units", "Metric tons")
        .put("value", comp_crop_biom_c);
    JSONObject jc = new JSONObject()
        .put("name", "comp_crop_res_c")
        .put("description", "Crop residue carbon above and below ground of intersected mapunit soil component remaining in field")
        .put("units", "Metric tons")
        .put("value", comp_crop_res_c);
    JSONObject js = new JSONObject()
        .put("name", "comp_soil_c_stock")
        .put("description", "Soil carbon stock of intersected mapunit soil component at the end of crop")
        .put("units", "Metric tons")
        .put("value", comp_soil_c_stock);
    JSONObject jg = new JSONObject()
        .put("name", "comp_soil_c_gain")
        .put("description", "Soil carbon stock gain or loss for intersected mapunit soil component through crop year")
        .put("units", "Metric tons")
        .put("value", comp_soil_c_gain);
    JSONObject jsua = new JSONObject()
        .put("name", "comp_soil_c_stock_unit_area")
        .put("description", "Soil carbon stock per unit area of intersected mapunit soil component at the end of crop")
        .put("units", "g per meter squared")
        .put("value", comp_soil_c_stock_unit_area);
    JSONObject n2o_emission = new JSONObject()
        .put("name", "comp_n2o_emission")
        .put("description", "Nitrous oxide emissions from farm field in carbon dioxide (CO2) equivalents")
        .put("units", "Metric tons")
        .put("value", comp_n2o_emission);
    JSONObject ch4_emission = new JSONObject()
        .put("name", "comp_ch4_emission")
        .put("description", "Methane emissions from farm field in carbon dioxide (CO2) equivalents")
        .put("units", "Metric tons")
        .put("value", comp_ch4_emission);
//      JSONObject net_co2_removed = new JSONObject()
//          .put("name", "comp_net_co2_removed_unit_area")
//          .put("description", "Soil carbon CO2 equivalents minus CO2 equivalents for nitrous oxide and methans")
//          .put("units", "Metric tons")
//          .put("value", "");
    JSONArray ja = new JSONArray()
        .put(jy)
        .put(jb)
        .put(jc)
        .put(js)
        .put(jg)
        .put(jsua)
        .put(n2o_emission)
        .put(ch4_emission);
    return ja;
  }


  public void checkHarvest() throws FileNotFoundException {
    String filename = schedule + ".lis";
    int lineNumber = 1;
    int colGracc = 0;
    int colTime = 0;
    try (Scanner scanner = new Scanner(workspace().getFile(filename));) {
      while (scanner.hasNextLine()) {
        String line = scanner.nextLine();
        if (lineNumber == 1) {
          // read header
          String[] data = line.split("\\s+");
          for (int col = 0; col < data.length; col++) {
            if (data[col].equals("cgracc")) {
              colGracc = col;
            } else if (data[col].equals("time")) {
              colTime = col;
            }
          }
        } else if (lineNumber == 2) {
          // skip line because empty
        } else {
          // check the line
          String[] data = line.split("\\s+");
          double year = Double.parseDouble(data[colTime]);
          if (year > 4010 && year < 4021) {
            double grainHarvest = Double.parseDouble(data[colGracc]);
            if (grainHarvest == 0.0) {
              String msg = "cgracc for year " + year + " is " + grainHarvest;
              throw new UnsupportedOperationException(msg);
            }
          }
        }
        lineNumber++;
      }
    }
  }


  static final Map<String, Map<String, String>> defMap = new HashMap() {
    {
      put("prdx", prdxDef);
      put("ppdf1", ppdf1Def);
      put("himax", himaxDef);
      put("hiwsf", hiwsfDef);
      put("ppdf2", ppdf2Def);
      put("ppdf3", ppdf3Def);
      put("ppdf4", ppdf4Def);
      put("wscoeff1", wscoeff1Def);
      put("wscoeff2", wscoeff2Def);
      put("pramn", pramnDef);
      put("pramx", pramxDef);
      put("sfavail", sfavailDef);

    }


  };

  static final Map<String, Map<String, String>> defTreeMap = new HashMap() {
    {
      put("prdx", prdxTree);
      put("ppdf1", ppdf1Tree);
      put("ppdf2", ppdf2Tree);

    }


  };

  protected void countLayers() {
    try {
      profileDepths = new LinkedList<>();
      List<Double> profiles = Files.lines(workspace().getFile("soils.in").toPath())
          .map(str -> str.split("\\s+"))
          .map(str -> Double.parseDouble(str[1]))
          .collect(Collectors.toList());
      double[] defaultProfile = new double[]{10, 20, 15, 15, 30, 30, 30, 30, 30, 30};
      double val = 0;
      double bottom = profiles.get(profiles.size() - 1);
      for (int i = 0; i < defaultProfile.length; i++) {
        double prof = defaultProfile[i];
        val += prof;
        if (val == bottom) {
          profileDepths.add(prof);
          break;
        } else if (val > bottom) {
          double tmpval = val - prof;
          profileDepths.add(bottom - tmpval);
          break;
        } else {
          profileDepths.add(prof);
        }
      }
      nlayer = profileDepths.size(); // - 2; // the first three layers are meshed into 1
    } catch (IOException ex) {
      throw new RuntimeException(ex.getMessage());
    }
  }


  protected Map<String, Map<String, Double>> dictPopulation(Map<String, Map<String, Double>> hashmap, String s) throws ServiceException {
    String[] parts = s.split("\\.");
    String cropName = parts[1];
    String parameterName = parts[2];
    Double parameterValue = parameter().getDouble(s);
    if (hashmap.containsKey(cropName)) {
      hashmap.get(cropName).put(parameterName, parameterValue);
    } else {
      Map<String, Double> val1 = new HashMap<>();
      val1.put(parameterName, parameterValue);
      hashmap.put(cropName, val1);
    }
    return hashmap;
  }


  protected void fillCropValues() {
    for (Map.Entry<String, Map<String, Double>> entry : crops.entrySet()) {

      List<String> cropparamList = Arrays.asList("prdx", "ppdf1", "ppdf2", "ppdf3", "ppdf4", "wscoeff1", "wscoeff2", "himax", "hiwsf", "pramn", "pramx", "sfavail");
      for (String param : cropparamList) {
        if (!entry.getValue().containsKey(param)) {

          crops.get(entry.getKey()).put(param, Double.parseDouble(defMap.get(param).get(entry.getKey())));
        }
      }
    }
  }


  protected void fillTreeValues() {
    for (Map.Entry<String, Map<String, Double>> entry : trees.entrySet()) {

      List<String> treeparamList = Arrays.asList("prdx", "ppdf1", "ppdf2");
      for (String param : treeparamList) {
        if (!entry.getValue().containsKey(param)) {

          trees.get(entry.getKey()).put(param, Double.parseDouble(defTreeMap.get(param).get(entry.getKey())));
        }
      }
    }
  }


  protected void fillFileValues(List<String> paramList, Map<String, Map<String, Double>> hashmap, Map<String, String> defName) {

    for (Map.Entry<String, Map<String, Double>> entry : hashmap.entrySet()) {
      for (String param : paramList) {
        if (!entry.getValue().containsKey(param)) {

          hashmap.get(entry.getKey()).put(param, Double.parseDouble(defName.get(param)));
        }
      }
    }

  }


  protected void fillFileFixes(List<String> paramList, Map<String, Map<String, Double>> hashmap, Map<String, String> defName) {

    for (Map.Entry<String, Map<String, Double>> entry : hashmap.entrySet()) {
      for (String param : paramList) {
        if (!entry.getValue().containsKey(param)) {
          if (param == null) {
            String msg = "Null " + paramList;
            throw new NullPointerException(msg);
          }
          hashmap.get(entry.getKey()).put(param, Double.parseDouble(defName.get(param)));
        }
      }
    }

  }


  protected void getJSONParams() throws JSONException, ServiceException {
    List<String> paramList = Arrays.asList(
        "fwloss1", "fwloss2", "fwloss3", "fwloss4",
        "ag_fwloss1", "ag_fwloss2", "ag_fwloss3", "ag_fwloss4",
        "pa_fwloss1", "pa_fwloss2", "pa_fwloss3", "pa_fwloss4",
        "tt_fwloss1", "tt_fwloss2", "tt_fwloss3", "tt_fwloss4",
        "ag_dec32", "pa_dec32", "ag_dec4", "pa_dec4", "pa_dec52", "ag_dec52",
        "dec32", "dec4", "dec52",
        "tt_dec32", "tt_dec4", "tt_dec52",
        "ag_teff1", "ag_teff2", "ag_teff3", "ag_teff4",
        "pa_teff1", "pa_teff2", "pa_teff3", "pa_teff4",
        "tt_teff1", "tt_teff2", "tt_teff3", "tt_teff4",
        "ps1s31", "ps1s32", "ps2s31", "ps2s32", "fleach3",
        "pa_ps1s31", "pa_ps1s32", "pa_ps2s31", "pa_ps2s32",
        "ag_ps1s31", "ag_ps1s32", "ag_ps2s31", "ag_ps2s32",
        "tt_ps1s31", "tt_ps1s32", "tt_ps2s31", "tt_ps2s32",
        "dec11", "dec12", "dec22", "dec21", "dec31", "dec51", "ag_fleach3", "pa_fleach3", "tt_fleach3",
        "pmco22", "p2co22", "ps1co22", "p1co2a2", "p1co2b2", "p3co2",
        "ag_pmco22", "ag_p2co22", "ag_ps1co22", "ag_p1co2a2", "ag_p1co2b2", "ag_p3co2",
        "tt_pmco22", "tt_p2co22", "tt_ps1co22", "tt_p1co2a2", "tt_p1co2b2", "tt_p3co2",
        "pa_pmco22", "pa_p2co22", "pa_ps1co22", "pa_p1co2a2", "pa_p1co2b2", "pa_p3co2",
        "pa_dec11", "ag_dec11", "pa_dec12", "ag_dec12", "pa_dec21", "ag_dec21", "pa_dec22", "ag_dec22", "pa_dec31", "ag_dec31", "pa_dec51", "ag_dec51",
        "tt_dec21", "tt_dec22", "tt_dec31", "tt_dec51", "tt_dec11", "tt_dec12");

    List<String> siteparList = Arrays.asList("dmpflux", "pa_dmpflux", "ag_dmpflux", "tt_dmpflux",
        "MaxNitAmt", "pa_MaxNitAmt", "ag_MaxNitAmt", "tt_MaxNitAmt",
        "wfpsdnitadj", "pa_wfpsdnitadj", "ag_wfpsdnitadj", "tt_wfpsdnitadj",
        "n2n2oadj", "pa_n2n2oadj", "ag_n2n2oadj", "tt_n2n2oadj");
    List<String> lrrmList = Arrays.asList("basef");
    List<String> soilList = Arrays.asList("fieldCapacity1", "fieldCapacity2",
        "fieldCapacity3", "fieldCapacity4", "fieldCapacity5", "fieldCapacity6",
        "fieldCapacity7", "fieldCapacity8", "fieldCapacity9", "fieldCapacity10",
        "wiltingPoint1", "wiltingPoint2", "wiltingPoint3", "wiltingPoint4",
        "wiltingPoint5", "wiltingPoint6", "wiltingPoint7", "wiltingPoint8",
        "wiltingPoint9", "wiltingPoint10"
    );
    for (String s : JSONparams) {
      String[] parts = s.split("\\.");
      if (parts.length > 1) {
        String cropType = parts[0];
        String fileName = parts[1];
        if (cropType.equals("crop")) {
          crops = dictPopulation(crops, s);
        } else if (cropType.equals("tree")) {
          trees = dictPopulation(trees, s);
        } else if (cropType.equals("param") && fileName.equals("fixes")) {
          fixes = dictPopulation(fixes, s);
        } else if (cropType.equals("param") && fileName.equals("lrrm")) {
          lrrm = dictPopulation(lrrm, s);
        } else if (cropType.equals("param") && fileName.equals("site")) {
          sitepars = dictPopulation(sitepars, s);
        } else if (cropType.equals("param") && fileName.equals("soil")) {
          calibonly = Boolean.TRUE; // inject calibrated parameters into soil file
          soilpars = dictPopulation(soilpars, s);
        }
      }
    }

    fillFileFixes(paramList, fixes, fixDef);
    fillFileValues(lrrmList, lrrm, lrrmDef);
    fillFileValues(siteparList, sitepars, siteparDef);
    fillFileValues(soilList, soilpars, soilDef);
    fillCropValues();
    fillTreeValues();
  }


  protected void fetchSoil(Boolean calibonly, Integer counter) throws Exception {
    ModelDataServiceCall mds = new ModelDataServiceCall()
        .put(STREAM_FILE, fileAsJSON)
        .put("cokey", cokey)
        .put("calibration", calibonly)
        .put("ignore_bad_caco3", Boolean.TRUE)
        .put(soils.AoA.AOA_GEOMETRY, new JSONObject()
            .put(TYPE, aoa_geometry.get(TYPE))
            .put(COORDINATES, aoa_geometry.get(COORDINATES)))
        .url(Config.getString("ghg.soilfile.url",
            request().getCodebase() + "d/insoilfile/2.2"))
        .withRetries(10) // 10 retries for soil as well
        .withRetryPause(1000)
        .call();

    if (mds.serviceFinished()) {
      mds.download("soils.in", workspace().getFile("soils.in"));
    } else if (counter != null && counter >= 10) {
      throw new RuntimeException("GHG Soil service error: " + mds.getError());
    } else {
      if (counter == null) {
        counter = 1;
      } else {
        counter++;
      }
      LOG.info("Soil service Counter: " + counter);
      fetchSoil(calibonly, counter);
    }
  }


}