V2_0.java [src/java/m/watershed/raster_stats] Revision: default Date:
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package m.watershed.raster_stats;
import csip.ModelDataService;
import csip.api.server.PayloadParameter;
import csip.api.server.ServiceException;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.annotations.Resource;
import javax.ws.rs.Path;
import csip.utils.ZipFiles;
import java.io.*;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import m.watershed.Resources;
import static m.watershed.Resources.GDAL;
import static m.watershed.Resources.GDALBUILDVRT;
import static m.watershed.Resources.GDALCALC;
import static m.watershed.Resources.GDALTRANSLATE;
import static m.watershed.Resources.OGR;
import static m.watershed.Resources.PYTHON;
import static m.watershed.Resources.ZONALSTATS_MAMC;
import static m.watershed.Utils.checkExist;
import static m.watershed.Utils.exec;
import org.apache.commons.io.FileUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
/**
* raster_stats
*
* @author HK (using OD's template)
*/
@Name("Raster Stats by Vector data")
@Description("Example ... input data")
@Path("m/raster_stats/2.0")
@Resource(from = Resources.class)
public class V2_0 extends ModelDataService {
static final String BOUNDARY = "boundary";
static final String RESULTID = "result_ID";
static final String STATS = "stats";
static final String RASTERS = "rasters";
static final String FIELDRASTER = "field_raster";
static final String UTMFILE = "utm_file";
boolean shp_file = false;
String shp_name = "";
String used_stats = "";
String used_fieldraster = "";
Map<String, String> Results = new HashMap<>();
JSONObject boundary = null;
JSONArray rasters = new JSONArray();
double Result = Double.NaN;
Integer pixel_width = 5;
@Override
protected void doProcess() throws Exception {
PayloadParameter inputPayload = parameter();
if (inputPayload.has(BOUNDARY) && inputPayload.has(RESULTID) && inputPayload.has(UTMFILE)) {
String b = parameter().getString(BOUNDARY);
String ID = parameter().getString(RESULTID);
used_stats = parameter().getString(STATS, "ALL");
used_fieldraster = parameter().getString(FIELDRASTER, "help.asc");
rasters = parameter().getJSONArray(RASTERS);
if (b.endsWith(".shp")) {
shp_file = true;
File input_file = attachments().getFile(b);
checkExist(workspace().getDir(), shp_name);
shp_name = input_file.getName();
} else {
boundary = parameter().getJSONArray(BOUNDARY).getJSONObject(0);
FileUtils.writeStringToFile(workspace().getFile("boundary.geojson"), boundary.toString(), "UTF-8");
checkExist(workspace().getDir(), "boundary.geojson");
shp_name = "field_extent_utm.shp";
}
run(ID);
} else {
throw new ServiceException("No result-id or No UTM-file or No Extent or no GeoJSON Geometry input listed!");
}
}
private void run(String ID) throws Exception {
int threads = Resources.mpiThreads(parameter());
setProgress("Started...");
String condition_string = "--calc=A*(A>=-900)";
if (used_stats.equals("ALL")) {
condition_string = "--calc=A*(A>=-900)";
}
if (used_stats.equals("NEGATIVE")) {
condition_string = "--calc=A*(A<0)";
}
if (used_stats.equals("POSITIVE")) {
condition_string = "--calc=A*(A>0)";
}
if (used_stats.equals("ZERO")) {
condition_string = "--calc=A*(A==0.0)";
}
checkExist(workspace().getDir(), parameter().getString(UTMFILE));
BufferedReader br = null;
String utmline = "";
br = new BufferedReader(new FileReader(workspace().getFile(parameter().getString(UTMFILE))));
utmline = br.readLine();
if (!shp_file) {
setProgress("No shp file, generate one...");
workspace().writeString("boundary.geojson", boundary.toString());
exec(resources(), LOG,
OGR,
"-t_srs", "EPSG:4326",
"-f", "ESRI Shapefile",
workspace().getFile("field_extent0.shp"),
workspace().getFile("boundary.geojson")
);
checkExist(workspace().getDir(), "field_extent0.shp");
exec(resources(), LOG,
OGR,
"-explodecollections",
"-skipfailures",
"-f", "ESRI Shapefile",
"-dialect", "sqlite",
"-sql", "select ST_buffer(geometry,0), * from field_extent0",
workspace().getFile("field_extent.shp"),
workspace().getFile("field_extent0.shp")
);
checkExist(workspace().getDir(), "field_extent.shp");
setProgress("Get field extend ...");
exec(resources(), LOG,
OGR,
"-f", "CSV",
"-dialect", "sqlite",
"-sql", "select AsText(St_Convexhull(geometry)) AS geom, * from field_extent",
workspace().getFile("field_boundary.csv"),
workspace().getFile("field_extent.shp")
);
checkExist(workspace().getDir(), "field_boundary.csv");
setProgress("Reproject ...");
exec(resources(), LOG,
OGR,
"-t_srs", "" + utmline,
"-f", "ESRI Shapefile",
workspace().getFile("field_extent_utm.shp"),
workspace().getFile("field_extent.shp")
);
checkExist(workspace().getDir(), "field_extent_utm.shp");
} else {
setProgress("Use provided shp file...");
}
String lines = "";
br = new BufferedReader(new FileReader(workspace().getFile(rasters.getString(0))));
while ((lines = br.readLine()) != null) {
if (lines.startsWith("cellsize")) {
String[] splited = lines.split("\\s+");
pixel_width = Double.valueOf(splited[1]).intValue();
break;
}
}
setProgress("ASCII to UTM GeoTIFF...");
setProgress("Translate FlowPathLoss ASCII to GeoTIFF...");
FileWriter result = new FileWriter(workspace().getFile("tiff_list.txt"));
for (int i = 0; i < rasters.length(); i++) {
String file_name = rasters.getString(i);
exec(resources(), LOG,
GDALTRANSLATE,
"-of",
"GTiff",
"-ot", "Float32",
"-a_srs", "" + utmline,
workspace().getFile(file_name),
workspace().getFile("output_" + i + ".tif"),
"-a_nodata", "-999"
// "-a_nodata", "None"
);
checkExist(workspace().getDir(), "output_" + i + ".tif");
result.write("output_" + i + ".tif\n");
}
result.flush();
result.close();
checkExist(workspace().getDir(), "tiff_list.txt");
setProgress("Merging outlet flowpath rasters...");
exec(resources(), LOG,
GDALBUILDVRT,
"-input_file_list", workspace().getFile("tiff_list.txt"),
workspace().getFile("merged.vrt")
);
checkExist(workspace().getDir(), "merged.vrt");
setProgress("Translating merged raster...");
exec(resources(), LOG,
GDALTRANSLATE,
workspace().getFile("merged.vrt"),
workspace().getFile("merged_rasters.tif")
);
checkExist(workspace().getDir(), "merged_rasters.tif");
setProgress("Cropping Raster to Field...");
exec(resources(), LOG,
GDAL,
workspace().getFile("merged_rasters.tif"),
workspace().getFile("field_merged_rasters.tif"),
"-crop_to_cutline",
"-cutline",
//workspace().getFile("field_extent_utm.shp"),
workspace().getFile("field_extent_utm.shp"),
"-tr", pixel_width + "", pixel_width + "",
"-dstnodata", "-999",
//"-dstnodata", "None",
"-tap",
//"-te", coords,
"-t_srs", "" + utmline,
"-multi",
"-of", "GTiff",
"-co", "TILED=YES",
"-co", "COMPRESS=LZW",
//"-co", "BIGTIFF=YES",
"-ot", "Float32",
"-co", "NUM_THREADS=" + threads + "",
"-wo", "NUM_THREADS=" + threads + "",
"-wo", "CUTLINE_ALL_TOUCHED=TRUE",
"--config",
"GDAL_CACHEMAX", "2500",
"--config",
"GDALWARP_IGNORE_BAD_CUTLINE", "YES",
"-wm", "2500"
);
checkExist(workspace().getDir(), "field_merged_rasters.tif");
exec(resources(), LOG,
GDALCALC,
"-A", workspace().getFile("field_merged_rasters.tif"),
"--outfile=" + workspace().getFile("merged0.tif"),
"--calc=numpy.where(A!=-999,A,numpy.nan)"
);
checkExist(workspace().getDir(), "merged0.tif");
setProgress("Translate Field FlowPathLoss ASCII to GeoTIFF...");
exec(resources(), LOG,
GDALTRANSLATE,
"-of",
"GTiff",
"-ot", "Float32",
"-a_srs", "" + utmline,
workspace().getFile(used_fieldraster),
workspace().getFile("field.tif"),
"-a_nodata", "-999"
//"-a_nodata", "None"
);
checkExist(workspace().getDir(), "field.tif");
setProgress("Cropping Raster to Field...");
exec(resources(), LOG,
GDAL,
workspace().getFile("field.tif"),
workspace().getFile("field_field.tif"),
"-crop_to_cutline",
"-cutline",
//workspace().getFile("field_extent_utm.shp"),
workspace().getFile("field_extent_utm.shp"),
"-tr", pixel_width + "", pixel_width + "",
"-dstnodata", "-999",
//"-dstnodata", "None",
"-tap",
//"-te", coords,
"-t_srs", "" + utmline,
"-multi",
"-of", "GTiff",
"-co", "TILED=YES",
"-co", "COMPRESS=LZW",
//"-co", "BIGTIFF=YES",
"-ot", "Float32",
"-co", "NUM_THREADS=" + threads + "",
"-wo", "NUM_THREADS=" + threads + "",
"-wo", "CUTLINE_ALL_TOUCHED=TRUE",
"--config",
"GDAL_CACHEMAX", "2500",
"--config",
"GDALWARP_IGNORE_BAD_CUTLINE", "YES",
"-wm", "2500"
);
checkExist(workspace().getDir(), "field_field.tif");
setProgress("Merged flowpath rasters overlayed with Hillslope result...");
exec(resources(), LOG,
GDALCALC,
"-A", workspace().getFile("merged0.tif"),
"-B", workspace().getFile("field_field.tif"),
"--outfile=" + workspace().getFile("overlayed.tif"),
"--calc=numpy.where(isnan(A),B, A)",
"--NoDataValue=-999"
);
checkExist(workspace().getDir(), "overlayed.tif");
setProgress("Filter flowpath raster...");
exec(resources(), LOG,
GDALCALC,
"-A", workspace().getFile("overlayed.tif"),
"--overwrite",
"--outfile=" + workspace().getFile("output.tif"),
"--NoDataValue=-999",
condition_string
);
checkExist(workspace().getDir(), "output.tif");
exec(resources(), LOG,
GDALTRANSLATE,
"-of",
"AAIGRID",
"-ot", "Float32",
"-a_srs", "" + utmline,
workspace().getFile("output.tif"),
workspace().getFile("new_output.asc"),
"-a_nodata", "-999"
);
checkExist(workspace().getDir(), "new_output.asc");
String resultfile = "";
setProgress("Stats...");
exec(resources(), LOG,
PYTHON,
resources().getFile(ZONALSTATS_MAMC),
workspace().getFile(shp_name),
workspace().getFile("output.tif"),
ID,
"stats_",
"False",
"-999"
);
resultfile = "stats_results.csv";
checkExist(workspace().getDir(), "stats_results.csv");
br = new BufferedReader(new FileReader(workspace().getFile(resultfile)));
Integer c = 0;
String line;
while ((line = br.readLine()) != null) {
if (c > 0) {
String[] infos = line.split(",");
Results.put(infos[0], infos[1]);
}
c++;
}
// pullResult();
setProgress("Finished.");
}
// 3) provide the temperature as a result.
@Override
protected void postProcess() throws Exception {
results().put("reso", pixel_width);
ZipFiles.zip(workspace().getFile("output.zip"),
(Object) workspace().getFiles("*.tif", "*.csv", "*.kml", "*.kmz", "*.json",
"*.geojson", "*.asc", "*.shp", "*.dbf", "*.prj", "*.shx", "*.dat"));
results().put(workspace().getFile("output.zip"));
if (parameter().getBoolean("std_zip", false)) {
ZipFiles.zip(workspace().getFile("std.zip"), (Object) workspace().getFiles("*.txt"));
results().put(workspace().getFile("std.zip"));
}
}
protected void pullResult(String prev) throws IOException, ServiceException {
File resultFile = workspace().getFile(prev + "_results.csv");
List<String> results = FileUtils.readLines(resultFile, Charset.defaultCharset());
if (results.size() >= 2) {
String average = results.get(1);
String[] values = average.split(",");
Result = Double.parseDouble(values[1]);
} else {
throw new ServiceException("No values are found in the resulting csv file.");
}
}
public static double haversine(double x) {
return (1.0 - Math.cos(x)) / 2.0;
}
public double sphericalPolygonArea(double[] lat, double[] lon) {
double lam1 = 0, lam2 = 0, beta1 = 0, beta2 = 0, cosB1 = 0, cosB2 = 0;
double sum = 0;
for (int j = 0; j < lat.length; j++) {
int k = j + 1;
if (j == 0) {
lam1 = lon[j];
beta1 = lat[j];
lam2 = lon[j + 1];
beta2 = lat[j + 1];
cosB1 = Math.cos(beta1);
cosB2 = Math.cos(beta2);
} else {
k = (j + 1) % lat.length;
lam1 = lam2;
beta1 = beta2;
lam2 = lon[k];
beta2 = lat[k];
cosB1 = cosB2;
cosB2 = Math.cos(beta2);
}
if (lam1 != lam2) {
double hav = haversine(beta2 - beta1) + cosB1 * cosB2 * haversine(lam2 - lam1);
double a = 2 * Math.asin(Math.sqrt(hav));
double b = Math.PI / 2 - beta2;
double c = Math.PI / 2 - beta1;
double s = 0.5 * (a + b + c);
double t = Math.tan(s / 2) * Math.tan((s - a) / 2) * Math.tan((s - b) / 2) * Math.tan((s - c) / 2);
double excess = Math.abs(4 * Math.atan(Math.sqrt(Math.abs(t))));
if (lam2 < lam1) {
excess = -excess;
}
sum += excess;
}
}
return Math.abs(sum) * R * R;
}
static final double R = 6371.001; // kilometers
}