V2_1.java [src/java/m/rhem/rhem01_runmodel] Revision: 7a0a9e07b04702ad19bda17344864f2374837e7c  Date: Fri Feb 05 09:48:59 MST 2021
/*
 * $Id$
 *
 * This file is part of the Cloud Services Integration Platform (CSIP),
 * a Model-as-a-Service framework, API, and application suite.
 *
 * 2012-2020, OMSLab, Colorado State University.
 *
 * OMSLab licenses this file to you under the MIT license.
 * See the LICENSE file in the project root for more information.
 */
package m.rhem.rhem01_runmodel;

import csip.Executable;
import csip.ModelDataService;
import csip.ModelDataServiceCall;
import csip.ServiceException;
import csip.annotations.Polling;
import csip.annotations.Resource;
import java.io.IOException;
import java.sql.Connection;
import javax.ws.rs.Path;
import m.rhem.model.AoA;
import m.rhem.model.Parameter;
import m.rhem.model.RhemModel;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.utils.Parallel;
import csip.utils.TextParser;
import java.io.File;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import rhem.utils.DBResources;
import rhem.utils.RhemResources;
import static rhem.utils.RhemResources.RHEM4_INTL_EXE;

/**
 * RHEM-01:Run RHEM Model
 *
 * @version 2.1
 */
@Name("RHEM-01:Run RHEM Model")
@Description("Run RHEM Model utilizing parameters including climate station, "
    + "surface soil texture class, slope percent and length, and vegetative "
    + "cover characteristics (bunchgrass, forbs/annual, shrub, and sodgrass "
    + "foliar cover; plant basal cover; rock cover; litter cover; and "
    + "cryptogam cover. This version uses PRISM/cligen.")
@Path("m/rhem/runrhem/2.1")
@Polling(first = 10000, next = 2000)
@Resource(from = DBResources.class)
@Resource(from = RhemResources.class)
public class V2_1 extends ModelDataService {

  static final double DEFAULT_MOISTURE_CONTENT = 25.0;
  static final double DEFAULT_SLOPE_LENGTH_1 = 50.0;
  static final double DEFAULT_SLOPE_LENGTH_2 = 164.04;

  static final String CLIGEN_RECORD_PRISM_PAR = "cligenRecordPrism.par";
  static final String CLIGEN_RECORD_PAR = "cligenRecord.par";
  static final String CLIGEN_TXT = "cligen.txt";

  AoA aoa;
  String parameterFileName;
  String stormFileName;
  String sumFileName;
  String runFileName;
  String detailedOutputFileName;
  Parameter parameter;

  RhemModel rhemModel;
  int unit;

  double[] latlon;

  ModelDataServiceCall res;


  @Override
  public void preProcess() throws ServiceException {

    int rhemSiteId = parameter().getInt("site_id", 0);
    latlon = parameter().getDoubleArray("site_loc");

    String scenarioName = parameter().getString("scenarioname");
    String scenarioDescription = parameter().getString("scenariodescription");
    unit = parameter().getInt("units");

    String soilTexture = parameter().getString("soiltexture");
    String slopeShape = parameter().getString("slopeshape");
    double slopeSteepness = parameter().getDouble("slopesteepness", 0.01);
    double bunchGgrassCanopyCover = parameter().getDouble("bunchgrasscanopycover", 0.0);
    double forbsCanopyCover = parameter().getDouble("forbscanopycover", 0.0);
    double shrubsCanopyCover = parameter().getDouble("shrubscanopycover", 0.0);
    double sodGrassCanopyCover = parameter().getDouble("sodgrasscanopycover", 0.0);
    double basalCover = parameter().getDouble("basalcover", 0.0);
    double rockCover = parameter().getDouble("rockcover", 0.0);
    double litterCover = parameter().getDouble("littercover", 0.0);
    double cryptogamsCover = parameter().getDouble("cryptogamscover", 0.0);
    double sar = parameter().getDouble("sar", 0.0);

    ///////////////////////////////
    //  BEGIN  Validations of input 
    //The values allowed for the field unit in the request are 1 and 2. 1 is for metric units and 2 is for English units
    if (latlon.length != 2) {
      throw new ServiceException("Invalid site_loc, array expected [lat,lon]");
    }

    if (sar < 0.0 || sar > 50.0) {
      throw new ServiceException("Invalid sar (0.0 ... 50.0): " + sar);
    }

    if (unit != 1 && unit != 2) {
      throw new ServiceException("Unit: 1-metric or 2-English. The value, " + unit + ", is not valid.");
    }

    double slopeLength = (unit == 1) ? DEFAULT_SLOPE_LENGTH_1 : DEFAULT_SLOPE_LENGTH_2;

    if (slopeSteepness <= 0.0) {
      throw new ServiceException("The slopesteepness input parameter must be greater than 0.");
    }

    //  END Validations
    ///////////////////////////////
    aoa = new AoA(0, rhemSiteId, scenarioName,
        scenarioDescription, unit,
        soilTexture, slopeLength, slopeShape, slopeSteepness,
        bunchGgrassCanopyCover, forbsCanopyCover, shrubsCanopyCover,
        sodGrassCanopyCover, basalCover, rockCover, litterCover,
        cryptogamsCover, DEFAULT_MOISTURE_CONTENT, sar);

    parameter = new Parameter(slopeLength);
  }


  @Override
  public void doProcess() throws Exception {
    String fileName = aoa.getScenarioName();
    if (fileName.length() > 15) {
      fileName = fileName.substring(0, 15);
    }
    fileName = fileName.replace(' ', '_');

    parameterFileName = "scenario_input_" + fileName + ".par";
    stormFileName = "storm_input_" + fileName + ".pre";
    sumFileName = "scenario_output_summary_" + fileName + ".sum";
    detailedOutputFileName = "scenario_output_summary_" + fileName + ".out";
    runFileName = fileName + ".run";

    Parallel.run(
        () -> {
          try (Connection con = resources().getJDBC(DBResources.CRDB)) {
            parameter.computeParameters(con, aoa);
          }
          rhemModel = new RhemModel(aoa.getScenarioName(), workspace().getDir(), parameterFileName,
              stormFileName, runFileName, sumFileName);

          rhemModel.generateParamFile(parameter);
          rhemModel.generateRunFile();
        },
        () -> {
          res = fetchClimate(stormFileName);
        }
    );

    runModel();
  }


  private ModelDataServiceCall fetchClimate(String stormfileName) throws JSONException, Exception {
    ModelDataServiceCall res = new ModelDataServiceCall()
        .put("duration", 300)
        .put("outputFile", CLIGEN_TXT)
        .put("usePRISM", true)
        .put("returnParFiles", true)
        .put("input_zone_features", new JSONObject(
            "{     'type': 'FeatureCollection',"
            + "        'features': [{"
            + "          'type': 'Feature',"
            + "          'properties': {"
            + "            'name': 'pt one',"
            + "            'gid': 1"
            + "          },"
            + "          'geometry': {"
            + "            'type': 'Point',"
            + "            'coordinates': [" + latlon[1] + "," + latlon[0] + "]"
            + "          }"
            + "        }]"
            + "}"))
        .url(RhemResources.cligenUrl)
        .withDefaultLogger()
        .call();

    if (res.serviceFinished()) {
      res.download(CLIGEN_TXT, workspace().getFile(stormfileName));
      res.download(CLIGEN_RECORD_PAR, workspace().getFile(CLIGEN_RECORD_PAR));
      res.download(CLIGEN_RECORD_PRISM_PAR, workspace().getFile(CLIGEN_RECORD_PRISM_PAR));
      return res;
    } else {
      throw new ServiceException("Climate service error: " + res.getError());
    }
  }


  private void runModel() throws ServiceException, IOException {
    Executable rh = resources().getExe(RHEM4_INTL_EXE);
    rh.setArguments("-b", workspace().getFile(runFileName));
    int run = rh.exec();
    if (run != 0) {
      throw new ServiceException("Error running rhem: " + run);
    }
  }


  @Override
  public void postProcess() throws Exception {
    File sumFile = workspace().getFile(sumFileName);
    File preFile = workspace().getFile(stormFileName);

//    results().put("AoAID", aoa.getAoaId(), "Area of Analysis Identifier");
//    results().put("rhem_site_id", aoa.getRhemSiteId(), "RHEM Evaluation Site Identifier");
    results().put("clen", Double.parseDouble(parameter.getClen()), "characteristic length of hillsope");
//    results().put("UNITS", (unit == 1) ? "metric" : "English", "unit of measure, metric or English");
    results().put("diams", new TextParser(parameter.getDiams(), "diams").nextLine().tokens().asDoubleArray(),
        "list of representative soil particle diameters for up to 5 particle classes");
    results().put("density", new TextParser(parameter.getDensity(), "density").nextLine().tokens().asDoubleArray(),
        "list of densities corresponding to the DIAMS particle classes");
    results().put("chezy", Double.parseDouble(parameter.getChezy()), "overland flow Chezy coefficient");
    results().put("rchezy", Double.parseDouble(parameter.getRchezy()), "concentrated flow Chezy coefficient");
    results().put("sl", new TextParser(parameter.getSl(), "sl").nextLine().tokens(TextParser.COMMA_SEP).asDoubleArray(),
        "slope expressed as fractional rise/run");
    results().put("sx", new TextParser(parameter.getSx(), "sx").nextLine().tokens(TextParser.COMMA_SEP).asDoubleArray(),
        "normalized distance");
    results().put("kss", Double.parseDouble(parameter.getKss()), "splash and sheet erodibility coefficient");
    results().put("ke", Double.parseDouble(parameter.getKe()), "effective hydraulic conductivity");
    results().put("g", Double.parseDouble(parameter.getG()), "mean capillary drive");
    results().put("dist", Double.parseDouble(parameter.getDist()), "pore size distribution index");
    results().put("por", Double.parseDouble(parameter.getPor()), "porosity");
    results().put("fract", new TextParser(parameter.getFract(), "fract").nextLine().tokens().asDoubleArray(),
        "list of particle class fractions");

    double asl = new TextParser(sumFile).toLineContaining("Avg-Soil-Loss").rightOf("=").asDouble();
    results().put("tds", Double.parseDouble(rhemModel.getTDS(asl)), "total dissolved solids");

    results().put("cli_station_id", res.getInt("stationId", 0));
    results().put("cli_station_name", res.getString("name", "?"));
    results().put("cli_station_state", res.getInt("state"));
    results().put("cli_station_elevation", res.getDouble("elevation"));
    results().put("cli_station_location", new double[]{res.getDouble("stationY"), res.getDouble("stationX")}, "station location (lat,lon)");

//  Observed monthly ave precipitation (mm)
//   24.9  18.9  16.4   8.0   5.6  14.0  65.6  79.6  38.8  24.2  16.6  27.1
    double[] mavg = new TextParser(preFile).toLineContaining("ave precipitation").nextLine().tokens().asDoubleArray();
    results().put("monthly_avg_precip", mavg, "monthly average precip", "mm");

    // summary file content.
    try (TextParser tp = new TextParser(sumFile).autoClose(false)) {

//    Avg. Precipitation (mm/year) =        335.49133
      double avg = tp.toLineContaining("Avg. Precipitation").rightOf("=").asDouble();
      results().put("avg_precip", avg, "yearly average precip", "mm/y");

//  Avg-Runoff(mm/year)=          26.79858
      avg = tp.toLineContaining("Avg-Runoff").rightOf("=").asDouble();
      results().put("avg_runoff", avg, "yearly average runoff", "mm/y");

//  Avg-Soil-Loss(Mg/ha/year)=          0.32337
      avg = tp.toLineContaining("Avg-Soil-Loss").rightOf("=").asDouble();
      results().put("avg_soilloss", avg, "yearly average soil loss", "Mg/ha/y");

//  Avg-SY(Mg/ha/year)=          0.32165
      avg = tp.toLineContaining("Avg-SY").rightOf("=").asDouble();
      results().put("avg_sy", avg, "yearly sediment yield", "Mg/ha/y");

// RETURN-FREQUENCY-RESULTS-YEARLY TOTALS
      //  Rain (mm)      326.50723      397.08432      434.37115      488.36038      506.88202      545.54865
      double[] rf = tp.toLineContaining("Rain").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_total_rain", rf, "rain return frequency, totals for years 2, 5, 10, 25, 50, 100", "mm");

      //  Runoff(mm)       22.74758       39.80891       51.82090       67.55290       80.73241       97.35841
      rf = tp.toLineContaining("Runoff").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_total_runoff", rf, "runoff return frequency, totals for years 2, 5, 10, 25, 50, 100", "mm");

      //  Soil-Loss(Mg/ha)        0.25164        0.48592        0.72893        0.98490        1.19312        1.60915
      rf = tp.toLineContaining("Soil-Loss").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_total_soilloss", rf, "soil loss return frequency, totals for years 2, 5, 10, 25, 50, 100", "Mg/ha");

      //  Sediment-Yield(Mg/ha)        0.25011        0.48314        0.72740        0.98077        1.18747        1.60272
      rf = tp.toLineContaining("Sediment-Yield").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_total_sy", rf, "sediment yield return frequency, totals for years 2, 5, 10, 25, 50, 100", "Mg/ha");

// RETURN-FREQUENCY-RESULTS-YEARLY MAXIMUM DAILY      
      rf = tp.toLineContaining("Rain").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_dailymax_rain", rf, "rain return frequency, max daily for years 2, 5, 10, 25, 50, 100 ", "mm");

      //  Runoff(mm)       22.74758       39.80891       51.82090       67.55290       80.73241       97.35841
      rf = tp.toLineContaining("Runoff").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_dailymax_runoff", rf, "runoff return frequency, max daily for years 2, 5, 10, 25, 50, 100", "mm");

      //  Soil-Loss(Mg/ha)        0.25164        0.48592        0.72893        0.98490        1.19312        1.60915
      rf = tp.toLineContaining("Soil-Loss").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_dailymax_soilloss", rf, "soil loss return frequency, max daily for years 2, 5, 10, 25, 50, 100", "Mg/ha");

      //  Sediment-Yield(Mg/ha)        0.25011        0.48314        0.72740        0.98077        1.18747        1.60272
      rf = tp.toLineContaining("Sediment-Yield").rightOf(")").tokens().asDoubleArray();
      results().put("retfreq_dailymax_sy", rf, "sediment yield return frequency, max daily for years 2, 5, 10, 25, 50, 100", "Mg/ha");
    }

    // files
    results().put(workspace().getFile(parameterFileName), "Parameter input file");
    results().put(workspace().getFile(stormFileName), "Storm input file");
    results().put(workspace().getFile(sumFileName), "Summary file");
    results().put(workspace().getFile(detailedOutputFileName), "Detailed summary file");
    results().put(workspace().getFile(CLIGEN_RECORD_PAR), "Cligen Par file");
    results().put(workspace().getFile(CLIGEN_RECORD_PRISM_PAR), "Cligen Prism Par file");

//    results().put(getWorkspaceFile("temp_" + sumFileName), "Temp Summary file");
  }

}