V3_0.java [src/java/m/ghg/daycent] Revision: default  Date:
/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package m.ghg.daycent;

import csip.Config;
import csip.api.server.Executable;
import csip.ModelDataService;
import csip.api.client.ModelDataServiceCall;
import csip.api.server.ServiceException;
import csip.SessionLogger;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.annotations.Options;
import csip.annotations.Resource;
import csip.utils.Parallel;
import csip.utils.TextParser;
import gisobjects.GISObject;
import gisobjects.GISObjectFactory;
import gisobjects.db.GISEngineFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.ws.rs.Path;
import m.ghg.ApplicationResources;
import static m.ghg.ApplicationResources.*;
import static m.ghg.daycent.V2_1.yieldConv;
import oms3.ObjectiveFunction;
import org.apache.commons.io.input.ReversedLinesFileReader;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.runtime.RuntimeConstants;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import util.CRLMODData;
import util.DateConversion;
import util.DayCentData;
import util.ObjFuncs.*;
import util.ObjFuncs.PBIAS;
import util.ServiceUtils;
import util.WeatherStatistics;

/**
 *
 * @author sidereus, od
 */
@Name("Daycent simulation execution")
@Description("Daycent CSIP service")
//@Path("m/daycent/3.0")
@Options(timeout = "P1H")
@Resource(from = ApplicationResources.class)
public class V3_0 extends ModelDataService {

  private final WeatherStatistics ws = new WeatherStatistics();

  protected String schedule;
  protected String schedule_IC;
  protected boolean fileAsJSON;
  protected String aoaId;
  protected JSONObject aoa_geometry;
  protected double[] latlon = {Double.NaN, Double.NaN};
  protected int weatherDuration;
  protected int startingYear;
  protected boolean useClimateWind;

  protected double nlayer;
  protected List<Double> profileDepths;

  // slice by time (start and end) and convert it
  protected JSONObject rotation;
  protected List<Integer> crop_id;
  protected Collection<String> JSONparams;
  protected Integer cokey;
  protected Double area;
  protected Integer yearPractChange;
  protected Integer staticYearPractChange = 2011;
  protected Integer endPractChange;

  protected int colGracc = 0;
  protected int colAbove = 0;
  protected int colBelow = 0;
  protected int colSOM = 0;

  protected CRLMODData crlmod;
  protected DayCentData dayCentData;

  @Override
  protected void preProcess() throws ServiceException {
    schedule = parameter().getString(SCHEDULE_FILE);
    schedule_IC = parameter().getString(SCHEDULE_FILE_IC, "");
    useClimateWind = parameter().getBoolean(USE_CLIMATE_WIND, false);
    fileAsJSON = parameter().getBoolean(STREAM_FILE, false);
    weatherDuration = parameter().getInt(WEATHER_DURATION_TXT);
    startingYear = parameter().getInt(STARTING_YEAR_TXT);

    schedule = ServiceUtils.checkRemoveExtension(schedule);
    aoa_geometry = parameter().getJSON(soils.AoA.AOA_GEOMETRY);
    cokey = parameter().getInt(soils.db.tables.TableComponent.COKEY);
    area = parameter().getDouble("area");
    yearPractChange = parameter().getInt("yearPractChange");
    endPractChange = parameter().getInt("endPractChange", 2020);
    JSONparams = parameter().getNames();

    crlmod = new CRLMODData(parameter().getJSON("crlmod"));
    dayCentData = new DayCentData(parameter().getNames(), parameter());

  }

  @Override
  protected void doProcess() throws Exception {

    //  Get centroid of shape, if it is not a point already.  (If it's a point, the centroid is the point)
    try (Connection conn = resources().getJDBC(GISDB_SQLSVR)) {
      GISObject shape = GISObjectFactory.createGISObject(aoa_geometry, GISEngineFactory.createGISEngine(conn));

      //  getLat/Lon functions get point value or the centroid of the shape.
      latlon[0] = shape.getLatitude();
      latlon[1] = shape.getLongitude();
    }

    String sourcePath = workspace().getFile("fix.100").getAbsolutePath();
    String destPath = workspace().getDir().getAbsolutePath()
        .concat(File.separator).concat("fix_2.100");
    File sourceTemplate = new File(sourcePath);
    File destTemplate = new File(destPath);

    Files.copy(sourceTemplate.toPath(), destTemplate.toPath());

    Parallel.run(Config.getBoolean("ghg.serial.datafetch", false),
        () -> {
          fetchClimate();
          computeWeatherStats();
        },
        () -> {
          fetchSoil();
          countLayers(); // testing if we need the fix structure in Black et al. 2017
        }
    );

    dayCentData.preRunFixes();
    
    // RENAME PRE-AG SCH
    java.nio.file.Path source = workspace().getFile("pa_Truterra.sch").toPath();
    Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
    createConfig("crop.100");
    createConfig(getSite100());
    createConfig("fix.100"); // values from payload _pa
    runDaycent();
    runDaycentList();
    
    
    source = workspace().getFile("Truterra.sch").toPath();
    Files.move(source, source.resolveSibling("Truterra_pa.sch"), StandardCopyOption.REPLACE_EXISTING);
    java.nio.file.Path sourceBin = workspace().getFile("Truterra.bin").toPath();
    Files.move(sourceBin, sourceBin.resolveSibling("Truterra_pa.bin"), StandardCopyOption.REPLACE_EXISTING);
    java.nio.file.Path sourceFix = workspace().getFile("fix.100").toPath();
    Files.move(sourceFix, sourceFix.resolveSibling("fix_pa.100"), StandardCopyOption.REPLACE_EXISTING);
    sourceFix = workspace().getFile("fix_2.100").toPath();
    Files.move(sourceFix, sourceFix.resolveSibling("fix.100"), StandardCopyOption.REPLACE_EXISTING);
    java.nio.file.Path sourceLis = workspace().getFile("Truterra.lis").toPath();
    Files.move(sourceLis, sourceLis.resolveSibling("Truterra_pa.lis"), StandardCopyOption.REPLACE_EXISTING);
    Files.delete(workspace().getFile("year_summary.out").toPath());

    dayCentData.postRunFixes();

    // RENAME AG SCH
    source = workspace().getFile("ag_Truterra.sch").toPath();
    Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
    createConfig("fix.100"); // values from payload _ag
    schedule_IC = "Truterra_pa.bin";
    runDaycent();
    runDaycentList();
//    checkHarvest();
  }

  @Override
  protected void postProcess() throws Exception {
    List<String> l = getRequestedObjfunc();
    if (l.isEmpty()) {
      parseLis();
    } else {
      calibrate(l);
    }
  }

  protected String getSite100() throws IOException {
    String scheduleWithExtension = schedule + ".sch";
    return new TextParser(workspace().getFile(scheduleWithExtension))
        .toLineContaining(".100")
        .tokens().asStringArray()[0];
  }

  protected List<String> parseOutfile() throws IOException {
    try (Stream<String> stream = workspace().lines("outfiles.in")) {
      return stream
          .filter(l -> l.startsWith("1"))
          .map(l -> l.split("\\s+")[1])
          .collect(Collectors.toList());
    }
  }

  // because templates are currently inputs
  private File renameInputTemplate(String template) throws IOException {
    String sourcePath = workspace().getFile(template).getAbsolutePath();
    String destPath = workspace().getDir().getAbsolutePath()
        .concat(File.separator).concat(TMP100);
    File sourceTemplate = new File(sourcePath);
    File destTemplate = new File(destPath);

    Files.copy(sourceTemplate.toPath(), destTemplate.toPath());

    //Why?  If copy suceeded, then they are equal.
    if (!Files.readAllLines(sourceTemplate.toPath()).equals(Files.readAllLines(destTemplate.toPath()))) {
      throw new IOException("File not fully copied");
    } else {
      Files.delete(sourceTemplate.toPath());
    }

    return sourceTemplate;
  }

  private void cleanUpWorkspace() throws IOException {
    Files.delete(workspace().getFile(TMP100).toPath());
  }

  protected void createConfig(String template) throws IOException {
    VelocityEngine velocity = new VelocityEngine();
    velocityInit(velocity);
    VelocityContext context = new VelocityContext();
    context.put(DAYCENT.toLowerCase(), this);

    File filledTemplate = renameInputTemplate(template);

    try (FileWriter w = new FileWriter(filledTemplate.getAbsolutePath())) {
      Template vt = velocity.getTemplate(TMP100, "utf-8");
      vt.merge(context, w);
    }
    cleanUpWorkspace();
  }

  public void velocityInit(VelocityEngine velocity) {
    velocity.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH,
        workspace().getDir().getAbsolutePath());
    velocity.init();
  }

  private void throwError(int ret, Executable exe) throws IOException, ServiceException {
    File stderr = exe.stderr();
    File stdout = exe.stdout();
    String err = "ret: " + ret + "\n";
    if (stderr.exists() && stderr.length() > 0) {
      err = err + "stderr: \n" + workspace().readString(stderr.getName());
    }
    if (stdout.exists() && stdout.length() > 0) {
      String out = "";
      ReversedLinesFileReader r = new ReversedLinesFileReader(stdout, Charset.forName("UTF-8"));
      for (int i = 0; i < 20; i++) {
        String l = r.readLine();
        if (l == null) {
          break;
        }
        out = l + '\n' + out;
      }
      r.close();
      err = err + "stdout (last 10 lines): ...\n" + out;
    }
    throw new ServiceException("Error running " + exe.getName() + ": " + err);
  }

  private Executable buildExe(Executable exe) {
    exe.addArguments(
        "-s", schedule,
        "-n", schedule
    );
    return exe;
  }

  private Executable buildExeIC(Executable exe) {
    schedule_IC = ServiceUtils.checkRemoveExtension(schedule_IC);
    exe.addArguments(
        "-s", schedule,
        "-n", schedule,
        "-e", schedule_IC
    );
    return exe;
  }

  protected boolean simulationFailure(Executable exe) throws IOException {
    try (Stream<String> stream = workspace().lines(exe.stderr())) {
      return stream
          .filter(l -> l.toLowerCase().contains("execution success"))
          .collect(Collectors.toList())
          .isEmpty();
    }
  }

  protected boolean lisFailure(Executable exe) throws IOException {
    try (Stream<String> stream = workspace().lines(exe.stdout())) {
      return stream
          .filter(l -> l.contains("Done."))
          .collect(Collectors.toList())
          .isEmpty();
    }
  }

  protected void runDaycent() throws ServiceException, IOException {
    Executable exe = resources().getExe(DAYCENT);
    exe = (schedule_IC.isEmpty()) ? buildExe(exe) : buildExeIC(exe);
    int ret = exe.exec();
    if (ret != 0 || simulationFailure(exe)) {
      throwError(ret, exe);
    }
  }

  protected void runDaycentList() throws ServiceException, IOException {
    Executable exe = resources().getExe(DAYCENT_LIST);
    exe.addArguments(
        schedule,
        schedule,
        "outvars.txt"
    );
    int ret = exe.exec();
    if (ret != 0 || lisFailure(exe)) {
      throwError(ret, exe);
    }
  }

  protected void computeWeatherStats() throws IOException, Exception {
    try (Stream<String> stream = workspace().lines(DAYCENT_WEATHER)) {
      ws.compute(stream);
    }
  }

  protected void fetchClimate() throws Exception {
    ModelDataServiceCall mds = new ModelDataServiceCall()
        .put(STARTING_YEAR_TXT, startingYear)
        .put(WEATHER_DURATION_TXT, weatherDuration)
        .put(STREAM_FILE, fileAsJSON)
        .put(USE_CLIMATE_WIND, useClimateWind)
        .put(soils.AoA.AOA_GEOMETRY, new JSONObject()
            .put(TYPE, aoa_geometry.get(TYPE))
            .put(COORDINATES, aoa_geometry.get(COORDINATES)))
        .url(Config.getString(WEATHER_URL_KEY,
            request().getCodebase() + "d/weather/cligen/1.0"))
        .call();

    if (mds.serviceFinished()) {
      mds.download(DAYCENT_WEATHER, workspace().getFile(DAYCENT_WEATHER));
    } else {
      throw new ServiceException("GHG Weather service error: " + mds.getError());
    }
  }

  //////////// Calibration
  void calibrate(List<String> l) throws Exception {
    for (String ofName : l) {

      // 2 values
      String[] data = parameter().getStringArray(ofName);
      if (data.length != 2) {
        throw new ServiceException("Invalid of content, 2 elements expected");
      }

      // data[0] : calibration target (e.g. yield)
      // data[1] : calibration details (startyear/endyear)
      double v = calcObjFunc(LOG,
          workspace().getFile("Truterra.sch"),
          workspace().getFile("Truterra.lis"),
          data[0], data[1]);
      results().put(ofName, v);
    }
  }

  static double getAvg(SessionLogger LOG, File schFile, File lisFile, String column,
      int starty, int endy, String croptype, String termination) throws Exception {

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, String> c = ServiceUtils.getMapFor(lisFile, column);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("caggr : " + c);
    }

    // lisDates of harvest from schedule file
    List<String> harvestDates = getDates(LOG, starty, endy, schFile, croptype, termination);

    // extract the values based on keys (harvest dates) into columnVals
    List<String> columnVals = new ArrayList<>();
    for (String harvestDate : harvestDates) {
      String caggr = c.get(harvestDate);
      if (caggr == null) {
        throw new ServiceException("No caggr value for harvestDate: " + harvestDate);
      }
      columnVals.add(c.get(harvestDate));
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("columnVals " + columnVals);
    }

    // create the average of those values
    double avg = columnVals.stream().mapToDouble(Double::parseDouble).average().getAsDouble();

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("avg: " + avg);
    }

    return avg;
  }

  public static boolean isLeapYear(int year) {
    Calendar cal = Calendar.getInstance();
    cal.set(Calendar.YEAR, year);
    return cal.getActualMaximum(Calendar.DAY_OF_YEAR) > 365;
  }

  static double getVal(SessionLogger LOG, File schFile, File lisFile, String column,
      int year) throws Exception {

    // get the raw values from lis file: as map
    // e.g. time -> cgracc
    Map<String, String> c = ServiceUtils.getMapFor(lisFile, column);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("caggr : " + c);
    }
    String tdate;
    if (isLeapYear(year)) {
      tdate = String.valueOf(year) + ".366";
    } else {
      tdate = String.valueOf(year) + ".365";
    }
    String date = DateConversion.date2daycentLis(tdate);

    // extract the values based on keys (harvest dates) into columnVals
    String somsc = c.get(date);
    if (somsc == null) {
      throw new ServiceException("No somsc value for year: " + year);
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("somsc " + somsc);
    }

    return Double.parseDouble(somsc);
  }

  static List<String> getDates(SessionLogger LOG, int starty, int endy,
      File schFile, String croptype, String termination) throws IOException {

    List<String> schDates = ServiceUtils.getCropHarvest(schFile, croptype, termination);

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("schDates: " + schDates);
    }

    List<String> filtered_schDates = new ArrayList<>();

    // filter by date range (starty - endy)
    for (String schDate : schDates) {
      String[] sd = schDate.split("\\.");
      int y = Integer.parseInt(sd[0]);
      if (y >= starty && y <= endy) {
        filtered_schDates.add(schDate);
      }
    }

    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("Filtered dates: " + filtered_schDates);
    }

    // convert all schDates to lisDates
    List<String> lisDates = new ArrayList<>();
    for (String schDate : filtered_schDates) {
      lisDates.add(DateConversion.date2daycentLis(schDate));
    }
    if (LOG.isLoggable(Level.INFO)) {
      LOG.info("lisDates: " + lisDates);
    }
    return lisDates;
  }

  public static void main(String[] args) throws Exception {
    calcObjFunc(new SessionLogger(),
        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.sch"),
        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.lis"),
        "yield", "50.9, 1935, 1960, SYBN1");

    calcObjFunc(new SessionLogger(),
        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.sch"),
        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.lis"),
        "soc", "50.9, 2019");

    calcObjFunc(new SessionLogger(),
        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.sch"),
        new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.lis"),
        "biomass", "120, 2011, 2020, CC2");

  }

  static double calcObjFunc(SessionLogger LOG, File schFile, File lisFile,
      String target, String targetInfo) throws Exception {
    if (target.equals("yield")) {
      String[] ti = targetInfo.split("\\s*,\\s*");
      if (ti.length != 4) {
        throw new ServiceException("Invalid targetinfo for yield.");
      }

      String obsValStr = ti[0];    // obs data value for period
      String start = ti[1];  // start 
      String end = ti[2];    // end
      String ct = ti[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double obsVal = Double.parseDouble(obsValStr);

      // simulated yield 
      double simVal = getAvg(LOG, schFile, lisFile,
          "cgracc", Integer.parseInt(start), Integer.parseInt(end), ct, "HARV");

      Double conv = yieldConv.get(ct);
      if (conv == null) {
        throw new ServiceException("No yield conversion factor for: " + ct);
      }

      simVal = simVal * conv;
      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim yield: " + simVal);
      }

      return obsVal - simVal;
    } else if (target.equals("biomass")) {
      String[] ti = targetInfo.split("\\s*,\\s*");
      if (ti.length != 4) {
        throw new ServiceException("Invalid targetinfo for yield.");
      }

      String obsValStr = ti[0];    // obs data value for period
      String start = ti[1];  // start 
      String end = ti[2];    // end
      String ct = ti[3];     // croptype  (e.g. C5)

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double obsVal = Double.parseDouble(obsValStr);

      // simulated yield 
      double simVal = getAvg(LOG, schFile, lisFile,
          "agcacc", Integer.parseInt(start), Integer.parseInt(end), ct, "HERB");
//
//      Double conv = yieldConv.get(ct);
//      if (conv == null) {
//        throw new ServiceException("No yield conversion factor for: " + ct);
//      }

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info("sim biomass: " + simVal);
      }

      return obsVal - simVal;
    } else if (target.equals("soc")) {

      String[] ti = targetInfo.split("\\s*,\\s*");
      if (ti.length != 2) {
        throw new ServiceException("Invalid targetinfo for yield.");
      }

      String obsValStr = ti[0];    // obs data value for period
      String year = ti[1];  // year

      if (LOG.isLoggable(Level.INFO)) {
        LOG.info(" objfunc info: " + Arrays.toString(ti));
      }

      double simVal = getVal(LOG, schFile, lisFile,
          "somsc", Integer.parseInt(year));

      double obsVal = Double.parseDouble(obsValStr);

      return obsVal - simVal;

    } else {
      throw new Exception("calibration target not supported: " + target);
    }
  }

  static final Map<String, ObjectiveFunction> OF = new HashMap<>();

  static {
//    OF.put("kge", new KGE());
//    OF.put("ns", new NS());
//    OF.put("nslog", new NS2LOG());
//    OF.put("nslog1p", new NSLOG1P());
//    OF.put("nslog2", new NSLOG2());
//    OF.put("rmse", new RMSE());
//    OF.put("trmse", new TRMSE());
    OF.put("pbias", new PBIAS());
  }

  private List<String> getRequestedObjfunc() {
    List<String> l = new ArrayList<>();
    for (String p : parameter().getNames()) {
      if (p.toLowerCase().startsWith("pbias.")) {
        l.add(p);
      }
    }
    return l;
  }

  /////////////////////
  protected void parseLis() throws FileNotFoundException, JSONException {
    List<Integer> practiceChange = new LinkedList<>();
    // adjusted to get
    for (int i = staticYearPractChange; i <= endPractChange; i++) {
      practiceChange.add(i);
    }
    String filename = schedule + ".lis";
    int lineNumber = 1;
    int colTime = 0;
    boolean skiplastline = Boolean.FALSE;
    try (Scanner scanner = new Scanner(workspace().getFile(filename));) {
      String[] prevData = null;
      Integer yearNewMgt = practiceChange.remove(0); // first element always
      while (scanner.hasNextLine()) {
        String line = scanner.nextLine();
        double prevCgain = 0;
        if (lineNumber == 1) {
          // read header
          String[] data = line.split("\\s+");
          for (int col = 0; col < data.length; col++) {
            if (data[col].equals("cgracc")) {
              colGracc = col;
            } else if (data[col].equals("agcprd")) {
              colAbove = col;
            } else if (data[col].equals("bgcjprd")) {
              colBelow = col;
            } else if (data[col].equals("somsc")) {
              colSOM = col;
            } else if (data[col].equals("time")) {
              colTime = col;
            }
          }
        } else if (lineNumber == 2) {
          // skip line because empty
        } else {
          // check the line
          String[] data = line.split("\\s+");
          double year = Double.parseDouble(data[colTime]);
          if (prevData == null && year == (yearNewMgt + MODEL_OFFSET)) {
            prevData = data;
          }

          if (!skiplastline) {
            // +2000 because 4000 years of simulation
            // +1 because of extra year - December 2012 = 2013.00 from Daycent .lis file
            if (year == (yearNewMgt + MODEL_OFFSET + 1)) {
              double som = Double.parseDouble(prevData[colSOM]);
              prevCgain = som * 0.00404686 * area;
              Integer actYear = yearNewMgt;
              results().put(actYear.toString(), getValues(data, prevCgain));
              prevData = data;
              // last year is duplicated and need to skip it
              if (!practiceChange.isEmpty()) {
                yearNewMgt = practiceChange.remove(0);
              } else {
                skiplastline = Boolean.TRUE;
              }
            }
          }
        }
        lineNumber++;
      }
    }
  }

  private JSONArray getValues(String[] data, double prevCgain) throws JSONException {
    double comp_crop_yld_c = Double.parseDouble(data[colGracc]) * 0.00404686 * area;
    double comp_crop_res_c = (Double.parseDouble(data[colAbove])
        + Double.parseDouble(data[colBelow])) * 0.00404686 * area;
    double comp_soil_c_stock = Double.parseDouble(data[colSOM]) * 0.00404686 * area;
    double comp_soil_c_stock_unit_area = Double.parseDouble(data[colSOM]);
    double comp_soil_c_gain = comp_soil_c_stock - prevCgain;
    JSONObject jy = new JSONObject()
        .put("name", "comp_crop_yld_c")
        .put("description", "Crop yield carbon of intersected mapunit soil component leaving field")
        .put("units", "Metric tons")
        .put("value", comp_crop_yld_c);
    JSONObject jc = new JSONObject()
        .put("name", "comp_crop_res_c")
        .put("description", "Crop residue carbon above and below ground of intersected mapunit soil component remaining in field")
        .put("units", "Metric tons")
        .put("value", comp_crop_res_c);
    JSONObject js = new JSONObject()
        .put("name", "comp_soil_c_stock")
        .put("description", "Soil carbon stock of intersected mapunit soil component at the end of crop")
        .put("units", "Metric tons")
        .put("value", comp_soil_c_stock);
    JSONObject jg = new JSONObject()
        .put("name", "comp_soil_c_gain")
        .put("description", "Soil carbon stock gain or loss for intersected mapunit soil component through crop year")
        .put("units", "Metric tons")
        .put("value", comp_soil_c_gain);
    JSONObject jsua = new JSONObject()
        .put("name", "comp_soil_c_stock_unit_area")
        .put("description", "Soil carbon stock per unit area of intersected mapunit soil component at the end of crop")
        .put("units", "g per meter squared")
        .put("value", comp_soil_c_stock_unit_area);
    JSONArray ja = new JSONArray()
        .put(jy)
        .put(jc)
        .put(js)
        .put(jg)
        .put(jsua);
    return ja;
  }

  public void checkHarvest() throws FileNotFoundException, ServiceException {
    String filename = schedule + ".lis";
    int lineNumber = 1;
    int colGracc = 0;
    int colTime = 0;
    try (Scanner scanner = new Scanner(workspace().getFile(filename));) {
      while (scanner.hasNextLine()) {
        String line = scanner.nextLine();
        if (lineNumber == 1) {
          // read header
          String[] data = line.split("\\s+");
          for (int col = 0; col < data.length; col++) {
            if (data[col].equals("cgracc")) {
              colGracc = col;
            } else if (data[col].equals("time")) {
              colTime = col;
            }
          }
        } else if (lineNumber == 2) {
          // skip line because empty
        } else {
          // check the line
          String[] data = line.split("\\s+");
          double year = Double.parseDouble(data[colTime]);
          if (year > 4010 && year < 4021) {
            double grainHarvest = Double.parseDouble(data[colGracc]);
            if (grainHarvest == 0.0) {
              String msg = "cgracc for year " + year + " is " + grainHarvest;
              throw new ServiceException(msg);
            }
          }
        }
        lineNumber++;
      }
    }
  }

  protected void countLayers() throws ServiceException {
    try {
      profileDepths = new LinkedList<>();
      List<Double> profiles = Files.lines(workspace().getFile("soils.in").toPath())
          .map(str -> str.split("\\s+"))
          .map(str -> Double.parseDouble(str[1]))
          .collect(Collectors.toList());
      double[] defaultProfile = new double[]{10, 20, 15, 15, 30, 30, 30, 30, 30, 30};
      double val = 0;
      double bottom = profiles.get(profiles.size() - 1);
      for (int i = 0; i < defaultProfile.length; i++) {
        double prof = defaultProfile[i];
        val += prof;
        if (val == bottom) {
          profileDepths.add(prof);
          break;
        } else if (val > bottom) {
          double tmpval = val - prof;
          profileDepths.add(bottom - tmpval);
          break;
        } else {
          profileDepths.add(prof);
        }
      }
      nlayer = profileDepths.size(); // - 2; // the first three layers are meshed into 1
    } catch (IOException ex) {
      throw new ServiceException(ex.getMessage());
    }
  }

  protected void fetchSoil() throws Exception {
    ModelDataServiceCall mds = new ModelDataServiceCall()
        .put(STREAM_FILE, fileAsJSON)
        .put("cokey", cokey)
        .put(soils.AoA.AOA_GEOMETRY, new JSONObject()
            .put(TYPE, aoa_geometry.get(TYPE))
            .put(COORDINATES, aoa_geometry.get(COORDINATES)))
        .url(Config.getString("ghg.soilfile.url",
            request().getCodebase() + "d/insoilfile/2.2"))
        .call();

    if (mds.serviceFinished()) {
      mds.download("soils.in", workspace().getFile("soils.in"));
    } else {
      throw new ServiceException("GHG Soil service error: " + mds.getError());
    }
  }

}