V2_2.java [src/java/m/ghg/daycent] Revision: default Date:
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package m.ghg.daycent;
import csip.Config;
import csip.api.server.Executable;
import csip.api.client.ModelDataServiceCall;
import csip.api.server.PayloadParameter;
import csip.api.server.ServiceException;
import csip.SessionLogger;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.annotations.Options;
import csip.annotations.Resource;
import csip.cosu.ObjFunc;
import csip.utils.Parallel;
import gisobjects.GISObject;
import gisobjects.GISObjectFactory;
import gisobjects.db.GISEngineFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import javax.ws.rs.Path;
import m.ghg.ApplicationResources;
import static m.ghg.ApplicationResources.*;
import oms3.ObjectiveFunction;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import util.DateConversion;
import util.ServiceUtils;
import util.ObjFuncs.*;
import static util.ServiceUtils.getStringColumnData;
/**
*
* @author sidereus, od
*/
@Name("Daycent simulation execution")
@Description("Daycent CSIP service")
@Path("m/daycent/2.2")
//@Options(timeout = "PT30M")
@Resource(from = ApplicationResources.class)
public class V2_2 extends V1_0 {
static final Map<String, Double> yieldConv = new HashMap<>();
// slice by time (start and end) and convert it
/*
Corn: grams/square meter C to corn bushels per acre: cgracc value * 0.41652
Factor: 8.92179 / 0.45 / (56 * 0.85) = 0.41652
Soybean: cgracc value * 0.37982 bu/acre
Factor: 8.92179 / 0.45 / (60 * 0.87) = 0.37982
Wheat: cgracc value * 0.38200 bu/acre
Factor: 8.92179 / 0.45 / (60 * 0.865) = 0.38200
Oats: cgracc value * 0.72043 bu/acre
Factor: 8.92179 / 0.45 / (32 * 0.86) = 0.72043
Sorghum: cgracc value * 0.40694 bu/ac
Factor: 8.92179 / 0.45 / (56 * 0.87) = 0.38200
Sorghum Silage: cgracc value * 0.0124903 tons/acre
Factor: 0.00404686 Mg/ac / 0.45 C fraction / 0.72 dry fraction = 0.0124903 Mg/ac
Alfalfa Hay: cgracc value * 0. 0051275 tons/acre
Factor: 0.0044609 / 0.45 / 0.87 = 0.011394
Corn Silage: cgracc value * 0. 0118957 tons/acre
Factor: 0.0044609 / 0.45 / 0.375 = 0.026434
Sorghum Forage: cgracc value * 0. 0124903 tons/acre
Barley: cgracc value * 0.48594 bu/acre
*/
static {
yieldConv.put("C1", 0.41652);
yieldConv.put("C3", 0.41652);
yieldConv.put("C5", 0.41652);
yieldConv.put("C7", 0.41652);
yieldConv.put("C9", 0.41652);
yieldConv.put("C10", 0.41652);
yieldConv.put("C11", 0.41652);
yieldConv.put("C12", 0.41652);
yieldConv.put("C13", 0.41652);
yieldConv.put("SC13", 0.41652);
yieldConv.put("SYBN1", 0.37982);
yieldConv.put("SYBN2", 0.37982);
yieldConv.put("SYBN3", 0.37982);
yieldConv.put("SYBN4", 0.37982);
yieldConv.put("OAT3", 0.72043);
yieldConv.put("W4", 0.38200);
yieldConv.put("SW3", 0.38200);
yieldConv.put("SW4", 0.38200);
yieldConv.put("SORG3", 0.40694);
yieldConv.put("FSORG", 0.0124903);
yieldConv.put("ALF", 0.011394);
yieldConv.put("G3CPI", 0.0051275);
yieldConv.put("CSL13", 0.026435);
yieldConv.put("JTOM", 1.770197);
yieldConv.put("SUN", 23.32);
yieldConv.put("COT", 22.52);
yieldConv.put("BAR3", 0.48594);
// yieldConv.put("RYE", 0.42445); // triticale
yieldConv.put("RYE", 0.416517); // rye
yieldConv.put("DBEAN", 23.32495);
yieldConv.put("PEA", 0.384228854);
// yieldConv.put("CLV1", 23.325);
yieldConv.put("CLV1", 0.0116625); // clover in tons/A
yieldConv.put("LENT", 23.325);
yieldConv.put("SAFF", 0.47774); // canola
}
protected JSONObject rotation;
protected List<Integer> crop_id;
protected Collection<String> JSONparams;
protected Integer cokey;
protected Double area;
protected Integer yearPractChange;
protected Integer staticYearPractChange = 2011;
protected Integer endPractChange;
protected int colGracc = 0;
protected int colAbove = 0;
protected int colBelow = 0;
protected int colSOM = 0;
@Override
protected void preProcess() throws ServiceException {
schedule = parameter().getString(SCHEDULE_FILE);
schedule_IC = parameter().getString(SCHEDULE_FILE_IC, "");
useClimateWind = parameter().getBoolean(USE_CLIMATE_WIND, false);
fileAsJSON = parameter().getBoolean(STREAM_FILE, false);
weatherDuration = parameter().getInt(WEATHER_DURATION_TXT);
startingYear = parameter().getInt(STARTING_YEAR_TXT);
schedule = ServiceUtils.checkRemoveExtension(schedule);
aoa_geometry = parameter().getJSON(soils.AoA.AOA_GEOMETRY);
cokey = parameter().getInt("cokey");
area = parameter().getDouble("area");
yearPractChange = parameter().getInt("yearPractChange");
endPractChange = parameter().getInt("endPractChange", 2020);
JSONparams = parameter().getNames();
}
@Override
protected void doProcess() throws Exception {
// Get centroid of shape, if it is not a point already. (If it's a point, the centroid is the point)
try ( Connection conn = resources().getJDBC(GISDB_SQLSVR)) {
GISObject shape = GISObjectFactory.createGISObject(aoa_geometry, GISEngineFactory.createGISEngine(conn));
// getLat/Lon functions get point value or the centroid of the shape.
latlon[0] = shape.getLatitude();
latlon[1] = shape.getLongitude();
}
String sourcePath = workspace().getFile("fix.100").getAbsolutePath();
String destPath = workspace().getDir().getAbsolutePath()
.concat(File.separator).concat("fix_2.100");
File sourceTemplate = new File(sourcePath);
File destTemplate = new File(destPath);
Files.copy(sourceTemplate.toPath(), destTemplate.toPath());
getJSONParams();
Parallel.run(Config.getBoolean("ghg.serial.datafetch", false),
() -> {
fetchClimate();
computeWeatherStats();
},
() -> {
fetchSoil();
countLayers(); // testing if we need the fix structure in Black et al. 2017
}
);
createConfig("sitepar.in");
if (fixes.size() != 0) {
if (fixes.get("fixes").containsKey("pa_dec52")) {
double val = fixes.get("fixes").get("pa_dec52");
fixes.get("fixes").put("dec52", val);
}
if (fixes.get("fixes").containsKey("pa_dec11")) {
double val = fixes.get("fixes").get("pa_dec11");
fixes.get("fixes").put("dec11", val);
}
if (fixes.get("fixes").containsKey("pa_dec12")) {
double val = fixes.get("fixes").get("pa_dec12");
fixes.get("fixes").put("dec12", val);
}
if (fixes.get("fixes").containsKey("pa_dec21")) {
double val = fixes.get("fixes").get("pa_dec21");
fixes.get("fixes").put("dec21", val);
}
if (fixes.get("fixes").containsKey("pa_dec22")) {
double val = fixes.get("fixes").get("pa_dec22");
fixes.get("fixes").put("dec22", val);
}
if (fixes.get("fixes").containsKey("pa_dec31")) {
double val = fixes.get("fixes").get("pa_dec31");
fixes.get("fixes").put("dec31", val);
}
if (fixes.get("fixes").containsKey("pa_dec51")) {
double val = fixes.get("fixes").get("pa_dec51");
fixes.get("fixes").put("dec51", val);
}
if (fixes.get("fixes").containsKey("pa_dec4")) {
double val = fixes.get("fixes").get("pa_dec4");
fixes.get("fixes").put("dec4", val);
}
if (fixes.get("fixes").containsKey("pa_dec32")) {
double val = fixes.get("fixes").get("pa_dec32");
fixes.get("fixes").put("dec32", val);
}
if (fixes.get("fixes").containsKey("pa_teff1")) {
double val = fixes.get("fixes").get("pa_teff1");
fixes.get("fixes").put("teff1", val);
}
if (fixes.get("fixes").containsKey("pa_teff2")) {
double val = fixes.get("fixes").get("pa_teff2");
fixes.get("fixes").put("teff2", val);
}
if (fixes.get("fixes").containsKey("pa_teff3")) {
double val = fixes.get("fixes").get("pa_teff3");
fixes.get("fixes").put("teff3", val);
}
if (fixes.get("fixes").containsKey("pa_teff4")) {
double val = fixes.get("fixes").get("pa_teff4");
fixes.get("fixes").put("teff4", val);
}
if (fixes.get("fixes").containsKey("pa_ps1s31")) {
double val = fixes.get("fixes").get("pa_ps1s31");
fixes.get("fixes").put("ps1s31", val);
}
if (fixes.get("fixes").containsKey("pa_ps1s32")) {
double val = fixes.get("fixes").get("pa_ps1s32");
fixes.get("fixes").put("ps1s32", val);
}
if (fixes.get("fixes").containsKey("pa_ps2s31")) {
double val = fixes.get("fixes").get("pa_ps2s31");
fixes.get("fixes").put("ps2s31", val);
}
if (fixes.get("fixes").containsKey("pa_ps2s32")) {
double val = fixes.get("fixes").get("pa_ps2s32");
fixes.get("fixes").put("ps2s32", val);
}
}
// RENAME PRE-AG SCH
java.nio.file.Path source = workspace().getFile("pa_Truterra.sch").toPath();
Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
createConfig("crop.100");
createConfig(getSite100());
createConfig("fix.100"); // values from payload _pa
runDaycent();
runDaycentList();
source = workspace().getFile("Truterra.sch").toPath();
Files.move(source, source.resolveSibling("Truterra_pa.sch"), StandardCopyOption.REPLACE_EXISTING);
java.nio.file.Path sourceBin = workspace().getFile("Truterra.bin").toPath();
Files.move(sourceBin, sourceBin.resolveSibling("Truterra_pa.bin"), StandardCopyOption.REPLACE_EXISTING);
java.nio.file.Path sourceFix = workspace().getFile("fix.100").toPath();
Files.move(sourceFix, sourceFix.resolveSibling("fix_pa.100"), StandardCopyOption.REPLACE_EXISTING);
sourceFix = workspace().getFile("fix_2.100").toPath();
Files.move(sourceFix, sourceFix.resolveSibling("fix.100"), StandardCopyOption.REPLACE_EXISTING);
java.nio.file.Path sourceLis = workspace().getFile("Truterra.lis").toPath();
Files.move(sourceLis, sourceLis.resolveSibling("Truterra_pa.lis"), StandardCopyOption.REPLACE_EXISTING);
Files.delete(workspace().getFile("year_summary.out").toPath());
if (fixes.size() != 0) {
if (fixes.get("fixes").containsKey("ag_dec52")) {
double val = fixes.get("fixes").get("ag_dec52");
fixes.get("fixes").put("dec52", val);
}
if (fixes.get("fixes").containsKey("ag_dec11")) {
double val = fixes.get("fixes").get("ag_dec11");
fixes.get("fixes").put("dec11", val);
}
if (fixes.get("fixes").containsKey("ag_dec12")) {
double val = fixes.get("fixes").get("ag_dec12");
fixes.get("fixes").put("dec12", val);
}
if (fixes.get("fixes").containsKey("ag_dec22")) {
double val = fixes.get("fixes").get("ag_dec22");
fixes.get("fixes").put("dec22", val);
}
if (fixes.get("fixes").containsKey("ag_dec21")) {
double val = fixes.get("fixes").get("ag_dec21");
fixes.get("fixes").put("dec21", val);
}
if (fixes.get("fixes").containsKey("ag_dec31")) {
double val = fixes.get("fixes").get("ag_dec31");
fixes.get("fixes").put("dec31", val);
}
if (fixes.get("fixes").containsKey("ag_dec51")) {
double val = fixes.get("fixes").get("ag_dec51");
fixes.get("fixes").put("dec51", val);
}
if (fixes.get("fixes").containsKey("ag_dec4")) {
double val = fixes.get("fixes").get("ag_dec4");
fixes.get("fixes").put("dec4", val);
}
if (fixes.get("fixes").containsKey("ag_dec32")) {
double val = fixes.get("fixes").get("ag_dec32");
fixes.get("fixes").put("dec32", val);
}
if (fixes.get("fixes").containsKey("ag_teff1")) {
double val = fixes.get("fixes").get("ag_teff1");
fixes.get("fixes").put("teff1", val);
}
if (fixes.get("fixes").containsKey("ag_teff2")) {
double val = fixes.get("fixes").get("ag_teff2");
fixes.get("fixes").put("teff2", val);
}
if (fixes.get("fixes").containsKey("ag_teff3")) {
double val = fixes.get("fixes").get("ag_teff3");
fixes.get("fixes").put("teff3", val);
}
if (fixes.get("fixes").containsKey("ag_teff4")) {
double val = fixes.get("fixes").get("ag_teff4");
fixes.get("fixes").put("teff4", val);
}
if (fixes.get("fixes").containsKey("ag_ps1s31")) {
double val = fixes.get("fixes").get("ag_ps1s31");
fixes.get("fixes").put("ps1s31", val);
}
if (fixes.get("fixes").containsKey("ag_ps1s32")) {
double val = fixes.get("fixes").get("ag_ps1s32");
fixes.get("fixes").put("ps1s32", val);
}
if (fixes.get("fixes").containsKey("ag_ps2s31")) {
double val = fixes.get("fixes").get("ag_ps2s31");
fixes.get("fixes").put("ps2s31", val);
}
if (fixes.get("fixes").containsKey("ag_ps2s32")) {
double val = fixes.get("fixes").get("ag_ps2s32");
fixes.get("fixes").put("ps2s32", val);
}
}
// RENAME AG SCH
source = workspace().getFile("ag_Truterra.sch").toPath();
Files.move(source, source.resolveSibling("Truterra.sch"), StandardCopyOption.REPLACE_EXISTING);
createConfig("fix.100"); // values from payload _ag
schedule_IC = "Truterra_pa.bin";
runDaycent();
runDaycentList();
// checkHarvest();
}
public static Map<Integer, Double> getMapFor(File wsFile, String column) throws IOException {
String[] time = getStringColumnData(wsFile, "time");
String[] col = getStringColumnData(wsFile, column);
return createlookup(time, col);
}
static Map<Integer, Double> createlookup(String[] key, String[] val) {
Map<Integer, Double> m = new LinkedHashMap<>();
for (int i = 0; i < val.length; i++) {
m.put(new Double(key[i]).intValue() - MODEL_OFFSET, Double.parseDouble(val[i]));
}
return m;
}
/* slice by time
*/
static Map<Integer, Double> slize(int start, int end, Map<Integer, Double> orig) {
return orig.entrySet()
.stream()
.filter(map -> map.getKey() >= start && map.getKey() <= end)
.collect(Collectors.toMap(map -> map.getKey(), map -> map.getValue()));
}
/* convert the value
*/
static Map<Integer, Double> convert(double factor, Map<Integer, Double> orig) {
return orig.entrySet()
.stream()
.collect(Collectors.toMap(map -> map.getKey(), map -> map.getValue() * factor));
}
void parseYearSummary() throws IOException {
File ys_out = workspace().getFile("year_summary.out");
Map<Integer, Double> N2Oflux = getMapFor(ys_out, "N2Oflux");
Map<Integer, Double> pcChangeN2Oflux = slize(yearPractChange, endPractChange, N2Oflux);
// N2O conversion: 1 g/m2 N2O-N = 1.895087 Mg/ac CO2e
Map<Integer, Double> conv_pcChangeN2Oflux = convert(1.895087, pcChangeN2Oflux);
double sumN2Oflux = conv_pcChangeN2Oflux.values().stream().mapToDouble(Double::doubleValue).sum();
Map<Integer, Double> CH4flux = getMapFor(ys_out, "CH4");
Map<Integer, Double> pcChangeCH4flux = slize(yearPractChange, endPractChange, CH4flux);
// CH4 conversion: 1 g/m2 CH4-C = 0.453248 Mg/ac CO2e
Map<Integer, Double> conv_pcChangeCH4flux = convert(0.453248, pcChangeCH4flux);
double sumCH4flux = conv_pcChangeCH4flux.values().stream().mapToDouble(Double::doubleValue).sum();
// put it into the result payload
results().put("N2O", sumN2Oflux);
results().put("CH4", sumCH4flux);
}
@Override
protected void postProcess() throws Exception {
// List<String> l = getRequestedObjfunc();
File ys_out = workspace().getFile("year_summary.out");
Map<Integer, Double> N2Oflux = getMapFor(ys_out, "N2Oflux");
Map<Integer, Double> CH4flux = getMapFor(ys_out, "CH4");
if (cosu().isRequested()) {
calibrate();
} else {
parseLis(N2Oflux, CH4flux);
// parseYearSummary();
}
}
//////////// Calibration
void calibrate() throws Exception {
for (String name : cosu().getNames()) {
String[] data = cosu().getData(name);
if (data.length != 2) {
throw new ServiceException("2 elements expected");
}
// this returns null is the OF is unknown.
ObjFunc of = cosu().getObjFunc(name);
// data[0] : calibration target (e.g. yield)
// data[1] : calibration details (startyear/endyear)
double v = calcObjFunc(of, LOG,
workspace().getFile("Truterra.sch"),
workspace().getFile("Truterra.lis"),
data[0], data[1]);
cosu().setValue(name, v);
}
}
//
// //////////// Calibration
// void calibrate(List<String> l) throws Exception {
// for (String ofName : l) {
//
// // 2 values
// String[] data = parameter().getStringArray(ofName);
// if (data.length != 2) {
// throw new ServiceException("Invalid of content, 2 elements expected");
// }
//
// // data[0] : calibration target (e.g. yield)
// // data[1] : calibration details (startyear/endyear)
// double v = calcObjFunc(LOG,
// workspace().getFile("Truterra.sch"),
// workspace().getFile("Truterra.lis"),
// data[0], data[1]);
// results().put(ofName, v);
// }
// }
static double getAvg(SessionLogger LOG, File schFile, File lisFile, String column,
int starty, int endy, String croptype, String termination) throws Exception {
// get the raw values from lis file: as map
// e.g. time -> cgracc
Map<String, String> c = ServiceUtils.getMapFor(lisFile, column);
if (LOG.isLoggable(Level.INFO)) {
LOG.info("caggr : " + c);
}
// lisDates of harvest from schedule file
List<String> harvestDates = getDates(LOG, starty, endy, schFile, croptype, termination);
// extract the values based on keys (harvest dates) into columnVals
List<String> columnVals = new ArrayList<>();
for (String harvestDate : harvestDates) {
String caggr = c.get(harvestDate);
if (caggr == null) {
throw new ServiceException("No caggr value for harvestDate: " + harvestDate);
}
columnVals.add(c.get(harvestDate));
}
if (LOG.isLoggable(Level.INFO)) {
LOG.info("columnVals " + columnVals);
}
// create the average of those values
double avg = columnVals.stream().mapToDouble(Double::parseDouble).average().getAsDouble();
if (LOG.isLoggable(Level.INFO)) {
LOG.info("avg: " + avg);
}
return avg;
}
public static boolean isLeapYear(int year) {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, year);
return cal.getActualMaximum(Calendar.DAY_OF_YEAR) > 365;
}
static double getVal(SessionLogger LOG, File schFile, File lisFile, String column,
int year) throws Exception {
// get the raw values from lis file: as map
// e.g. time -> cgracc
Map<String, String> c = ServiceUtils.getMapFor(lisFile, column);
if (LOG.isLoggable(Level.INFO)) {
LOG.info("caggr : " + c);
}
String tdate;
if (isLeapYear(year)) {
tdate = String.valueOf(year) + ".366";
} else {
tdate = String.valueOf(year) + ".365";
}
String date = DateConversion.date2daycentLis(tdate);
// extract the values based on keys (harvest dates) into columnVals
String somsc = c.get(date);
if (somsc == null) {
throw new ServiceException("No somsc value for year: " + year);
}
if (LOG.isLoggable(Level.INFO)) {
LOG.info("somsc " + somsc);
}
return Double.parseDouble(somsc);
}
static List<String> getDates(SessionLogger LOG, int starty, int endy,
File schFile, String croptype, String termination) throws IOException {
List<String> schDates = ServiceUtils.getCropHarvest(schFile, croptype, termination);
if (LOG.isLoggable(Level.INFO)) {
LOG.info("schDates: " + schDates);
}
List<String> filtered_schDates = new ArrayList<>();
// filter by date range (starty - endy)
for (String schDate : schDates) {
String[] sd = schDate.split("\\.");
int y = Integer.parseInt(sd[0]);
if (y >= starty && y <= endy) {
if (termination.equals("LAST")) {
int day = Integer.parseInt(sd[1]) - 30; // one month earlier
schDate = y + "." + day;
}
filtered_schDates.add(schDate);
}
}
if (LOG.isLoggable(Level.INFO)) {
LOG.info("Filtered dates: " + filtered_schDates);
}
// convert all schDates to lisDates
List<String> lisDates = new ArrayList<>();
for (String schDate : filtered_schDates) {
lisDates.add(DateConversion.date2daycentLis(schDate));
}
if (LOG.isLoggable(Level.INFO)) {
LOG.info("lisDates: " + lisDates);
}
return lisDates;
}
public static void main(String[] args) {
Integer a = new Double("2077.00").intValue();
System.out.println(a);
}
// public static void main(String[] args) throws Exception {
//
//// calcObjFunc(new SessionLogger(),
//// new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.sch"),
//// new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.lis"),
//// "yield", "60, 2011, 2020, W4");
////
//// calcObjFunc(new SessionLogger(),
//// new File("/home/sidereus/documents/daycentsim/wheat/check/Truterra.sch"),
//// new File("/home/sidereus/documents/daycentsim/wheat/check/Truterra.lis"),
//// "yield", "180, 2011, 2020, C13");
//// calcObjFunc(new SessionLogger(),
//// new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.sch"),
//// new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.lis"),
//// "yield", "111.67, 2011, 2020, SORG3");
// calcObjFunc(new SessionLogger(),
// new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.sch"),
// new File("/tmp/csip/work/22/15/db0e4255-1be8-11ec-b04a-738330b2f29e/Truterra.lis"),
// "soc", "50.9, 1875");
////
//// calcObjFunc(new SessionLogger(),
//// new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.sch"),
//// new File("/tmp/csip/work/09/12/16f716fa-e0e0-11eb-ad10-23d87c956bee/Truterra.lis"),
//// "biomass", "120, 2011, 2020, CC2");
// }
static double calcObjFunc(ObjFunc objfunc, SessionLogger LOG, File schFile, File lisFile,
String target, String targetInfo) throws Exception {
// objfunc is ot used yet
if (target.equals("yield")) {
String[] ti = targetInfo.split("\\s*,\\s*");
if (ti.length != 4) {
throw new ServiceException("Invalid targetinfo for yield.");
}
String obsValStr = ti[0]; // obs data value for period
String start = ti[1]; // start
String end = ti[2]; // end
String ct = ti[3]; // croptype (e.g. C5)
if (LOG.isLoggable(Level.INFO)) {
LOG.info(" objfunc info: " + Arrays.toString(ti));
}
double obsVal = Double.parseDouble(obsValStr);
// simulated yield
double simVal = getAvg(LOG, schFile, lisFile,
"cgracc", Integer.parseInt(start), Integer.parseInt(end), ct, "HARV");
Double conv = yieldConv.get(ct);
if (conv == null) {
throw new ServiceException("No yield conversion factor for: " + ct);
}
simVal = simVal * conv;
if (LOG.isLoggable(Level.INFO)) {
LOG.info("sim yield: " + simVal);
}
return obsVal - simVal;
} else if (target.equals("biomass")) {
String[] ti = targetInfo.split("\\s*,\\s*");
if (ti.length != 4) {
throw new ServiceException("Invalid targetinfo for yield.");
}
String obsValStr = ti[0]; // obs data value for period
String start = ti[1]; // start
String end = ti[2]; // end
String ct = ti[3]; // croptype (e.g. C5)
if (LOG.isLoggable(Level.INFO)) {
LOG.info(" objfunc info: " + Arrays.toString(ti));
}
double obsVal = Double.parseDouble(obsValStr);
// simulated yield
double simVal = getAvg(LOG, schFile, lisFile,
"agcacc", Integer.parseInt(start), Integer.parseInt(end), ct, "LAST");
Double conv = yieldConv.get(ct);
if (conv == null) {
throw new ServiceException("No yield conversion factor for: " + ct);
}
if (LOG.isLoggable(Level.INFO)) {
LOG.info("sim biomass: " + simVal);
}
simVal = simVal * conv * 0.81; // harvest index
return obsVal - simVal;
} else if (target.equals("soc")) {
String[] ti = targetInfo.split("\\s*,\\s*");
if (ti.length != 2) {
throw new ServiceException("Invalid targetinfo for yield.");
}
String obsValStr = ti[0]; // obs data value for period
String year = ti[1]; // year
if (LOG.isLoggable(Level.INFO)) {
LOG.info(" objfunc info: " + Arrays.toString(ti));
}
double simVal = getVal(LOG, schFile, lisFile,
"somsc", Integer.parseInt(year));
double obsVal = Double.parseDouble(obsValStr);
return obsVal - simVal;
} else {
throw new Exception("calibration target not supported: " + target);
}
}
// static final Map<String, ObjectiveFunction> OF = new HashMap<>();
// static {
// OF.put("kge", new KGE());
// OF.put("ns", new NS());
// OF.put("nslog", new NS2LOG());
// OF.put("nslog1p", new NSLOG1P());
// OF.put("nslog2", new NSLOG2());
// OF.put("rmse", new RMSE());
// OF.put("trmse", new TRMSE());
// OF.put("pbias", new PBIAS());
// }
//
// private List<String> getRequestedObjfunc() {
// List<String> l = new ArrayList<>();
// for (String p : parameter().getNames()) {
// if (p.toLowerCase().startsWith("pbias.")) {
// l.add(p);
// }
// }
// return l;
// }
/////////////////////
protected void parseLis(Map<Integer, Double> N2Oflux, Map<Integer, Double> CH4flux) throws FileNotFoundException, JSONException {
List<Integer> practiceChange = new LinkedList<>();
// adjusted to get
for (int i = staticYearPractChange; i <= endPractChange; i++) {
practiceChange.add(i);
}
String filename = schedule + ".lis";
int lineNumber = 1;
int colTime = 0;
boolean skiplastline = Boolean.FALSE;
try ( Scanner scanner = new Scanner(workspace().getFile(filename));) {
String[] prevData = null;
Integer yearNewMgt = practiceChange.remove(0); // first element always
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
double prevCgain = 0;
if (lineNumber == 1) {
// read header
String[] data = line.split("\\s+");
for (int col = 0; col < data.length; col++) {
if (data[col].equals("cgracc")) {
colGracc = col;
} else if (data[col].equals("agcprd")) {
colAbove = col;
} else if (data[col].equals("bgcjprd")) {
colBelow = col;
} else if (data[col].equals("somsc")) {
colSOM = col;
} else if (data[col].equals("time")) {
colTime = col;
}
}
} else if (lineNumber == 2) {
// skip line because empty
} else {
// check the line
String[] data = line.split("\\s+");
double year = Double.parseDouble(data[colTime]);
if (prevData == null && year == (yearNewMgt + MODEL_OFFSET)) {
prevData = data;
}
if (!skiplastline) {
// +2000 because 4000 years of simulation
// +1 because of extra year - December 2012 = 2013.00 from Daycent .lis file
if (year == (yearNewMgt + MODEL_OFFSET + 1)) {
double som = Double.parseDouble(prevData[colSOM]);
prevCgain = som * 0.00404686 * area;
Integer actYear = yearNewMgt;
results().put(actYear.toString(), getValues(data, prevCgain, N2Oflux.get(actYear), CH4flux.get(actYear)));
prevData = data;
// last year is duplicated and need to skip it
if (!practiceChange.isEmpty()) {
yearNewMgt = practiceChange.remove(0);
} else {
skiplastline = Boolean.TRUE;
}
}
}
}
lineNumber++;
}
}
}
private JSONArray getValues(String[] data, double prevCgain, double comp_n2o_emission, double comp_ch4_emission) throws JSONException {
double comp_crop_yld_c = Double.parseDouble(data[colGracc]) * 0.00404686 * area;
double comp_crop_res_c = (Double.parseDouble(data[colAbove])
+ Double.parseDouble(data[colBelow])) * 0.00404686 * area;
double comp_soil_c_stock = Double.parseDouble(data[colSOM]) * 0.00404686 * area;
double comp_soil_c_stock_unit_area = Double.parseDouble(data[colSOM]);
double comp_soil_c_gain = comp_soil_c_stock - prevCgain;
comp_n2o_emission = comp_n2o_emission * 1.895087; // g/m2 of N2O-N to Mg/ac CO2e
comp_n2o_emission = comp_n2o_emission * area; // Mg/ac CO2e to Mg (metric tons) CO2e
comp_ch4_emission = comp_ch4_emission * 0.453248; // g/m2 of CH4-C to Mg/ac CO2e
comp_ch4_emission = comp_ch4_emission * area; // Mg/ac CO2e to Mg (metric tons) CO2e
JSONObject jy = new JSONObject()
.put("name", "comp_crop_yld_c")
.put("description", "Crop yield carbon of intersected mapunit soil component leaving field")
.put("units", "Metric tons")
.put("value", comp_crop_yld_c);
JSONObject jc = new JSONObject()
.put("name", "comp_crop_res_c")
.put("description", "Crop residue carbon above and below ground of intersected mapunit soil component remaining in field")
.put("units", "Metric tons")
.put("value", comp_crop_res_c);
JSONObject js = new JSONObject()
.put("name", "comp_soil_c_stock")
.put("description", "Soil carbon stock of intersected mapunit soil component at the end of crop")
.put("units", "Metric tons")
.put("value", comp_soil_c_stock);
JSONObject jg = new JSONObject()
.put("name", "comp_soil_c_gain")
.put("description", "Soil carbon stock gain or loss for intersected mapunit soil component through crop year")
.put("units", "Metric tons")
.put("value", comp_soil_c_gain);
JSONObject jsua = new JSONObject()
.put("name", "comp_soil_c_stock_unit_area")
.put("description", "Soil carbon stock per unit area of intersected mapunit soil component at the end of crop")
.put("units", "g per meter squared")
.put("value", comp_soil_c_stock_unit_area);
JSONObject n2o_emission = new JSONObject()
.put("name", "comp_n2o_emission")
.put("description", "Nitrous oxide emissions from farm field in carbon dioxide (CO2) equivalents")
.put("units", "Metric tons")
.put("value", comp_n2o_emission);
JSONObject ch4_emission = new JSONObject()
.put("name", "comp_ch4_emission")
.put("description", "Methane emissions from farm field in carbon dioxide (CO2) equivalents")
.put("units", "Metric tons")
.put("value", comp_ch4_emission);
// JSONObject net_co2_removed = new JSONObject()
// .put("name", "comp_net_co2_removed_unit_area")
// .put("description", "Soil carbon CO2 equivalents minus CO2 equivalents for nitrous oxide and methans")
// .put("units", "Metric tons")
// .put("value", "");
JSONArray ja = new JSONArray()
.put(jy)
.put(jc)
.put(js)
.put(jg)
.put(jsua)
.put(n2o_emission)
.put(ch4_emission);
return ja;
}
public void checkHarvest() throws FileNotFoundException {
String filename = schedule + ".lis";
int lineNumber = 1;
int colGracc = 0;
int colTime = 0;
try ( Scanner scanner = new Scanner(workspace().getFile(filename));) {
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (lineNumber == 1) {
// read header
String[] data = line.split("\\s+");
for (int col = 0; col < data.length; col++) {
if (data[col].equals("cgracc")) {
colGracc = col;
} else if (data[col].equals("time")) {
colTime = col;
}
}
} else if (lineNumber == 2) {
// skip line because empty
} else {
// check the line
String[] data = line.split("\\s+");
double year = Double.parseDouble(data[colTime]);
if (year > 4010 && year < 4021) {
double grainHarvest = Double.parseDouble(data[colGracc]);
if (grainHarvest == 0.0) {
String msg = "cgracc for year " + year + " is " + grainHarvest;
throw new UnsupportedOperationException(msg);
}
}
}
lineNumber++;
}
}
}
static final Map<String, Map<String, String>> defMap = new HashMap() {
{
put("prdx", prdxDef);
put("ppdf1", ppdf1Def);
put("himax", himaxDef);
put("ppdf2", ppdf2Def);
put("pramn", pramnDef);
put("pramx", pramxDef);
}
};
protected void countLayers() {
try {
profileDepths = new LinkedList<>();
List<Double> profiles = Files.lines(workspace().getFile("soils.in").toPath())
.map(str -> str.split("\\s+"))
.map(str -> Double.parseDouble(str[1]))
.collect(Collectors.toList());
double[] defaultProfile = new double[]{10, 20, 15, 15, 30, 30, 30, 30, 30, 30};
double val = 0;
double bottom = profiles.get(profiles.size() - 1);
for (int i = 0; i < defaultProfile.length; i++) {
double prof = defaultProfile[i];
val += prof;
if (val == bottom) {
profileDepths.add(prof);
break;
} else if (val > bottom) {
double tmpval = val - prof;
profileDepths.add(bottom - tmpval);
break;
} else {
profileDepths.add(prof);
}
}
nlayer = profileDepths.size(); // - 2; // the first three layers are meshed into 1
} catch (IOException ex) {
throw new RuntimeException(ex.getMessage());
}
}
protected Map<String, Map<String, Double>> dictPopulation(Map<String, Map<String, Double>> hashmap, String s) throws ServiceException {
String[] parts = s.split("\\.");
String cropName = parts[1];
String parameterName = parts[2];
Double parameterValue = parameter().getDouble(s);
if (hashmap.containsKey(cropName)) {
hashmap.get(cropName).put(parameterName, parameterValue);
} else {
Map<String, Double> val1 = new HashMap<>();
val1.put(parameterName, parameterValue);
hashmap.put(cropName, val1);
}
return hashmap;
}
protected void fillCropValues() {
for (Map.Entry<String, Map<String, Double>> entry : crops.entrySet()) {
List<String> cropparamList = Arrays.asList("prdx", "ppdf1", "ppdf2", "himax");
for (String param : cropparamList) {
if (!entry.getValue().containsKey(param)) {
crops.get(entry.getKey()).put(param, Double.parseDouble(defMap.get(param).get(entry.getKey())));
}
}
}
}
protected void fillFileValues(List<String> paramList, Map<String, Map<String, Double>> hashmap, Map<String, String> defName) {
for (Map.Entry<String, Map<String, Double>> entry : hashmap.entrySet()) {
for (String param : paramList) {
if (!entry.getValue().containsKey(param)) {
hashmap.get(entry.getKey()).put(param, Double.parseDouble(defName.get(param)));
}
}
}
}
protected void fillFileFixes(List<String> paramList, Map<String, Map<String, Double>> hashmap, Map<String, String> defName) {
for (Map.Entry<String, Map<String, Double>> entry : hashmap.entrySet()) {
for (String param : paramList) {
if (!entry.getValue().containsKey(param)) {
hashmap.get(entry.getKey()).put(param, Double.parseDouble(defName.get(param)));
}
}
}
}
protected void getJSONParams() throws JSONException, ServiceException {
List<String> paramList = Arrays.asList("fwloss1", "fwloss2", "fwloss3", "fwloss4",
"ag_dec32", "pa_dec32", "ag_dec4", "pa_dec4", "pa_dec52", "ag_dec52",
"dec32", "dec4", "dec52",
"ag_teff1", "ag_teff2", "ag_teff3", "ag_teff4",
"pa_teff1", "pa_teff2", "pa_teff3", "pa_teff4",
"ps1s31", "ps1s32", "ps2s31", "ps2s32",
"pa_ps1s31", "pa_ps1s32", "pa_ps2s31", "pa_ps2s32",
"ag_ps1s31", "ag_ps1s32", "ag_ps2s31", "ag_ps2s32", "dec11", "dec12", "dec22", "dec21", "dec31", "dec51",
"pa_dec11", "ag_dec11", "pa_dec12", "ag_dec12", "pa_dec21", "ag_dec21", "pa_dec22", "ag_dec22", "pa_dec31", "ag_dec31", "pa_dec51", "ag_dec51");
List<String> lrrmList = Arrays.asList("basef");
for (String s : JSONparams) {
String[] parts = s.split("\\.");
if (parts.length > 1) {
String cropType = parts[0];
String fileName = parts[1];
if (cropType.equals("crop")) {
crops = dictPopulation(crops, s);
} else if (cropType.equals("param") && fileName.equals("fixes")) {
fixes = dictPopulation(fixes, s);
} else if (cropType.equals("param") && fileName.equals("lrrm")) {
lrrm = dictPopulation(lrrm, s);
}
}
}
fillFileFixes(paramList, fixes, fixDef);
fillFileValues(lrrmList, lrrm, lrrmDef);
fillCropValues();
}
protected void fetchSoil() throws Exception {
ModelDataServiceCall mds = new ModelDataServiceCall()
.put(STREAM_FILE, fileAsJSON)
.put("cokey", cokey)
.put(soils.AoA.AOA_GEOMETRY, new JSONObject()
.put(TYPE, aoa_geometry.get(TYPE))
.put(COORDINATES, aoa_geometry.get(COORDINATES)))
.url(Config.getString("ghg.soilfile.url",
request().getCodebase() + "d/insoilfile/2.2"))
.call();
if (mds.serviceFinished()) {
mds.download("soils.in", workspace().getFile("soils.in"));
} else {
throw new RuntimeException("GHG Soil service error: " + mds.getError());
}
}
}