PostGIS.java [src/java/c] Revision: 95faef4c0711cc06cca672ad885649ce96de8ec9 Date: Mon Sep 26 13:38:03 MDT 2016
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package c;
import csip.Config;
import csip.ServiceException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author od
*/
public class PostGIS implements GIS_DB{
private static String PG_URL = Config.getString("pg.url", "jdbc:postgresql://oms-db.engr.colostate.edu:5432/r2gis");
private static int MAX_CONNECTIONS = 16;
private static int DEFAULT_PG_CHUNKS = 0;
static final Logger logger = Logger.getLogger(PostGIS.class.getName());
//private java.sql.Connection conn;
private static PostGIS instance;
private static int currentConnection = 0;
private static java.sql.Connection[] connections;
public static boolean bUseChunks = false;
// // deactivated the singleton pattern, because we want more than 1 db conn
// public static PostGIS singleton() throws Exception {
// return new PostGIS();
// }
protected static synchronized PostGIS singleton() throws Exception {
if (instance == null) {
instance = new PostGIS();
}
return instance;
}
PostGIS() throws Exception {
Class.forName("org.postgresql.Driver");
//conn = DriverManager.getConnection(PG_URL, "postgres", "postgres");
// initialize connection pool
int pgChunks = Config.getInt("pg.chunks", DEFAULT_PG_CHUNKS);
int maxConnections = Config.getInt("pg.connpool", MAX_CONNECTIONS);
if (pgChunks == 0) {
bUseChunks = false;
logger.log(Level.INFO, "Not using postgresql database chunks.");
logger.log(Level.INFO, "Initializing {0} db connections for connection pool.", maxConnections);
connections = new Connection[maxConnections];
for (int i = 0; i < maxConnections; i++) {
connections[i] = DriverManager.getConnection(PG_URL, "postgres", "postgres");
}
} else {
bUseChunks = true;
logger.log(Level.INFO, "Using postgresql database chunks!");
logger.log(Level.INFO, "Initializing {0} db connections for each db chunk in the connection pool.", maxConnections);
Collection<Config.PostgresChunk> coll = Config.getPostgresChunks();
for (Config.PostgresChunk pgc : coll) {
logger.log(Level.INFO, "Initializing db connection for pg db {0} for longitude " + pgc.getMinLong() + " to " + pgc.getMaxLong() + ".", pgc.getName());
Connection[] connections = new Connection[maxConnections];
for (int i = 0; i < maxConnections; i++) {
try {
logger.info("default-db=" + PG_URL + " chunk-db=" + pgc.getName());
connections[i] = DriverManager.getConnection(pgc.getName(), "postgres", "postgres");
} catch (SQLException sqle) {
logger.log(Level.SEVERE, "Connection to db chunk " + i + " server has failed. Failing over to " + PG_URL);
// Failover to default db server
connections[i] = DriverManager.getConnection(PG_URL, "postgres", "postgres");
}
}
pgc.setConnections(connections);
}
}
}
private FileQryResult findFile(String select, double lng) throws SQLException {
// Statement s = createStatement(getConnection());
Statement s = createStatement(lng);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
logger.info("Column Count: "+cols);
if (cols != 3) {
//s.close();
logger.severe("invalid columns in soil query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from soil query!");
return null;
}
FileQryResult res = new FileQryResult();
res.id = r.getString(1);
res.file_path = r.getString(2);
res.file_name = r.getString(3);
//s.close();
logger.info("id=" + res.id);
logger.info("r2_path=" + res.file_path);
logger.info("r2_name=" + res.file_name);
return res;
}
private String findCmzName(String select, double lng, int colcnt) throws SQLException {
// Statement s = createStatement(getConnection());
Statement s = createStatement(lng);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != colcnt) {
//s.close();
logger.severe("invalid number of columns returned in find dbl query:" + select);
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from find dbl query!");
return "0";
}
logger.info("value=" + r.getDouble(1));
return r.getString(1);
//s.close();
}
private Collection<Soil> findPolygons(String select, double lng) throws SQLException {
// Statement s = createStatement(getConnection());
LinkedList<Soil> soilLst = new LinkedList<Soil>();
Statement s = createStatement(lng);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 13) {
//s.close();
logger.severe("invalid columns in soil query");
throw new IllegalArgumentException("invalid Columns");
}
// if (r.) {
// //s.close();
// logger.severe("no result from soil query!");
// return null;
// }
while (r.next()) {
Soil soil = new Soil();
soil.coFips = r.getString(1);
soil.muSym = r.getString(2);
soil.muKey = r.getString(3);
soil.r2Path = r.getString(4);
soil.r2Name = r.getString(5);
soil.muName = r.getString(6);
soil.compName = r.getString(7);
soil.kffact = r.getString(8);
soil.tfact = r.getDouble(9);
soil.slopeR = r.getDouble(10);
soil.coKey = r.getString(11);
soil.percentAoi = r.getDouble(12);
soil.sizeAoi = r.getDouble(13);
soil.lengthR = calculateSlopeR(soil.slopeR, soil.coFips);
String sInfo = "ADDING SOIL TO LIST cokey=" + soil.coKey;
sInfo += " ,mukey=" + soil.muKey;
sInfo += " ,r2_path=" + soil.r2Path;
sInfo += " ,r2_name=" + soil.r2Name;
logger.info(sInfo);
soilLst.add(soil);
}
r.close();
s.close();
// return findPolygon ("SELECT m.co_fips, m.musym, m.mukey, r2_path, r2_name, m.muname, " +
// "m.compname, m.kffact, m.tfact, m.slope_r, m.cokey, " +
// "st_area(st_intersection(" + polygon +
// ", geometry)) / st_area(" + polygon +
// ") as percentAoi," +
// "st_area(st_intersection(" + polygon +
// ", geometry)) as sizeAoi " +
return soilLst;
}
private Collection<Soil> findPolygonsWithSand(String select, double lng) throws SQLException {
// Statement s = createStatement(getConnection());
LinkedList<Soil> soilLst = new LinkedList<Soil>();
Statement s = createStatement(lng);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
SQLWarning warning = s.getWarnings();
//SQLWarning warning = r.getWarnings();
if (warning != null) {
String sqlWarning = warning.getMessage();
logger.info("**************************************************SQL WARNINGS! executing polygon with sand query");
logger.info("**************************************************message=" + sqlWarning);
logger.info("**************************************************sql-state=" + warning.getSQLState());
logger.info("**************************************************error-code=" + warning.getErrorCode());
if ((sqlWarning != null) && (sqlWarning.length() > 0)) {
throw new SQLException(sqlWarning);
}
} else {
logger.info("**************************************************NO SQL WARNINGS! executing polygon with sand query");
}
int cols = r.getMetaData().getColumnCount();
if (cols != 14) {
//s.close();
logger.severe("invalid columns in soil query");
throw new IllegalArgumentException("invalid Columns");
}
// if (r.) {
// //s.close();
// logger.severe("no result from soil query!");
// return null;
// }
while (r.next()) {
Soil soil = new Soil();
soil.coFips = r.getString(1);
soil.muSym = r.getString(2);
soil.muKey = r.getString(3);
soil.r2Path = r.getString(4);
soil.r2Name = r.getString(5);
soil.muName = r.getString(6);
soil.compName = r.getString(7);
soil.kffact = r.getString(8);
soil.tfact = r.getDouble(9);
soil.slopeR = r.getDouble(10);
soil.coKey = r.getString(11);
soil.sandtotalR = Double.parseDouble(r.getString(12)); // issues here with precision from ssurgo are avoided by converting the string value to double
soil.percentAoi = r.getDouble(13);
soil.sizeAoi = r.getDouble(14);
soil.lengthR = calculateSlopeR(soil.slopeR, soil.coFips);
String sInfo = "ADDING SOIL TO LIST cokey=" + soil.coKey;
sInfo += " ,mukey=" + soil.muKey;
sInfo += " ,r2_path=" + soil.r2Path;
sInfo += " ,r2_name=" + soil.r2Name;
logger.info(sInfo);
soilLst.add(soil);
}
r.close();
s.close();
// return findPolygon ("SELECT m.co_fips, m.musym, m.mukey, r2_path, r2_name, m.muname, " +
// "m.compname, m.kffact, m.tfact, m.slope_r, m.cokey, " +
// "st_area(st_intersection(" + polygon +
// ", geometry)) / st_area(" + polygon +
// ") as percentAoi," +
// "st_area(st_intersection(" + polygon +
// ", geometry)) as sizeAoi " +
return soilLst;
}
private Collection<Soil> findPolygonsWeps(String select, double lng) throws SQLException {
// Statement s = createStatement(getConnection());
LinkedList<Soil> soilLst = new LinkedList<Soil>();
Statement s = createStatement(lng);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 13) {
//s.close();
logger.severe("invalid columns in soil query");
throw new IllegalArgumentException("invalid Columns");
}
// if (r.) {
// //s.close();
// logger.severe("no result from soil query!");
// return null;
// }
while (r.next()) {
Soil soil = new Soil();
soil.coFips = r.getString(1);
soil.muSym = r.getString(2);
soil.muKey = r.getString(3);
soil.wepsPath = r.getString(4);
soil.wepsName = r.getString(5);
soil.muName = r.getString(6);
soil.compName = r.getString(7);
soil.kffact = r.getString(8);
soil.tfact = r.getDouble(9);
soil.slopeR = r.getDouble(10);
soil.coKey = r.getString(11);
//soil.sandtotalR = r.getDouble(12);
soil.percentAoi = r.getDouble(12);
soil.sizeAoi = r.getDouble(13);
soil.lengthR = calculateSlopeR(soil.slopeR, soil.coFips);
String sInfo = "ADDING SOIL TO LIST cokey=" + soil.coKey;
sInfo += " ,mukey=" + soil.muKey;
sInfo += " ,weps_path=" + soil.wepsPath;
sInfo += " ,weps_name=" + soil.wepsName;
logger.info(sInfo);
soilLst.add(soil);
}
r.close();
s.close();
return soilLst;
}
// Look up table for slope length
static final double[][] LightleWeesiesSlopeLength = {{0, .75, 100},
{.75, 1.5, 200},
{1.5, 2.5, 300},
{2.5, 3.5, 200},
{3.5, 4.5, 180},
{4.5, 5.5, 160},
{5.5, 6.5, 150},
{6.5, 7.5, 140},
{7.5, 8.5, 130},
{8.5, 9.5, 125},
{9.5, 10.5, 120},
{10.5, 11.5, 110},
{11.5, 12.5, 100},
{12.5, 13.5, 90},
{13.5, 14.5, 80},
{14.5, 15.5, 70},
{15.5, 17.5, 60},
{17.5, -1, 50}};
static final double[][] PalouseSlopeLength = {{0, 5.5, 350},
{5.5, 10.5, 275},
{10.5, 15.5, 225},
{15.5, 20.5, 175},
{20.5, 25.5, 150},
{25.5, 35.5, 125},
{35.5, -1, 100}};
static final List<String> PalouseAreas = Arrays.asList("ID607", "ID610", "OR021", "OR049", "OR055", "OR625",
"OR667", "OR670", "OR673", "WA001", "WA021", "WA025",
"WA043", "WA063", "WA071", "WA075", "WA603", "WA605",
"WA613", "WA617", "WA623", "WA639", "WA676", "WA677");
private double calculateSlopeR(double slope_r, String areaSymbol) {
double lengthR = 0.0;
// Determines if the soil survey area is in the Palouse region
if (PalouseAreas.contains(areaSymbol)) // Use Palouse region slope length lookup table.
{
for (int i = 0; i < PalouseSlopeLength.length; i++) {
if (PalouseSlopeLength[i][1] != -1) {
if ((slope_r >= PalouseSlopeLength[i][0]) && (slope_r < PalouseSlopeLength[i][1])) {
lengthR = PalouseSlopeLength[i][2];
break;
} else if (slope_r >= PalouseSlopeLength[i][0]) {
lengthR = PalouseSlopeLength[i][2];
break;
}
}
}
} else // Uses Lightle and Weesies 10/1/1996 slope length lookup table (see documentation).
// Not for Palouse region
{
for (int i = 0; i < LightleWeesiesSlopeLength.length; i++) {
// logger.info("LightleWeesiesSlopeLength[" + i + "][0]=" + LightleWeesiesSlopeLength[i][0]);
// logger.info("LightleWeesiesSlopeLength[" + i + "][1]=" + LightleWeesiesSlopeLength[i][1]);
// logger.info("LightleWeesiesSlopeLength[" + i + "][2]=" + LightleWeesiesSlopeLength[i][2]);
// logger.info("slope_r=" + slope_r);
// logger.info("expr val=" + ((slope_r >= LightleWeesiesSlopeLength[i][0]) && (slope_r < LightleWeesiesSlopeLength[i][1])));
if (LightleWeesiesSlopeLength[i][1] != -1) {
if ((slope_r >= LightleWeesiesSlopeLength[i][0]) && (slope_r < LightleWeesiesSlopeLength[i][1])) {
lengthR = LightleWeesiesSlopeLength[i][2];
// logger.info("grabbing value now from double check");
break;
}
} else if (slope_r >= LightleWeesiesSlopeLength[i][0]) {
lengthR = LightleWeesiesSlopeLength[i][2];
// logger.info("grabbing value now from SINGLE check");
break;
}
}
}
return lengthR;
}
private boolean isValidCoord(String select, double lng) throws SQLException {
//Statement s = createStatement(getConnection(lng));
Statement s = createStatement(lng);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 1) {
//s.close();
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
return false;
}
String sValidCoord = r.getString(1).trim();
//s.close();
return (sValidCoord.contains("t") ? true : false);
}
@Override
public boolean IsValidCliCoord(double lat, double lon) throws SQLException {
return isValidCoord("SELECT IsValidCliCoord(" + lon + ", " + lat + ");", lon);
}
@Override
public boolean IsValidManCoord(double lat, double lon) throws SQLException {
return isValidCoord("SELECT IsValidManCoord(" + lon + ", " + lat + ");", lon);
}
@Override
public boolean IsValidSoiCoord(double lat, double lon) throws SQLException {
return isValidCoord("SELECT IsValidSoiCoord(" + lon + ", " + lat + ");", lon);
}
@Override
public boolean IsValidGisCoord(double lat, double lon) throws SQLException {
return isValidCoord("SELECT IsValidGisCoord(" + lon + ", " + lat + ");", lon);
}
@Override
public boolean IsValidRusle2Soil(double lat, double lon) throws SQLException {
FileQryResult fqr = findSoils(lat, lon);
if (fqr == null) {
return false;
} else {
return true;
}
}
@Override
public boolean IsValidWepsSoil(double lat, double lon) throws SQLException {
FileQryResult fqr = findSoilsWeps(lat, lon);
if (fqr == null) {
return false;
} else {
return true;
}
}
@Override
// Determine if a point is within the wind station interpolation boundary for WEPS
public boolean IsInInterpolateBoundary(double lat, double lon) throws SQLException {
return isValidCoord("SELECT IsInInterpolateBoundary(" + lon + ", " + lat + ");", lon);
}
@Override
public FileQryResult findSoils(double lat, double lon) throws SQLException {
// new query - mbound
return findFile("SELECT m.musym, r2_path, r2_name FROM soil_geom AS g, map_soils AS m "
+ "WHERE ST_Within(ST_SetSRID(ST_Point(" + lon + "," + lat + "),4326), g.geometry) "
+ "AND g.mukey = m.mukey LIMIT 1;", lon);
// old query - had nasty join on a nested query - dbound
// return find("SELECT m.musym, r2_path, r2_name FROM map_soils AS m, (SELECT musym FROM soil_geom " +
// "WHERE Within(GeomFromText('POINT(" + lon + " " +
// lat + ")' ), geometry)) AS s WHERE m.musym = s.musym LIMIT 1;");
}
@Override
//
// fix me
//
// added string matching to fix a problem which requires us to regenerate r2 soils
public FileQryResult findSoilsByCokey(String cokey, double lon) throws SQLException {
return findFile("SELECT distinct m.musym, r2_path, r2_name FROM soil_geom AS g, map_soils AS m "
+ "WHERE m.cokey='" + cokey + "' "
+ "AND g.mukey = m.mukey and m.r2_name like '%' || m.compname || '%' limit 1; ", lon);
}
@Override
// To calculate area in acres, we use the st_transform function to transform the intersection polygon into
// a NAD83 Kansas projection which has units in feet (not degrees), which produces square feet.
// We then divide by 43,560 sq ft to get acres.
public Collection<Soil> findSoilsForPolygon(String _polygon, double poly_longitude) throws SQLException {
String polygon = "ST_PolygonFromText('POLYGON((-90.111 38.19,-90.110 38.19, -90.110 38.18,-90.111 38.18, -90.111 38.19))')";
// add PostgreSQL specific call to _polygon string
_polygon = "ST_PolygonFromText('" + _polygon + "',4326)";
logger.info("The input polygon is=" + _polygon);
logger.info("The input polygon len len =" + _polygon.length());
if ((_polygon == null) || (_polygon.length() < 1)) {
_polygon = polygon;
poly_longitude = -90.111;
}
return findPolygons("SELECT m.co_fips, m.musym, m.mukey, r2_path, r2_name, m.muname, "
+ "m.compname, m.kffact, m.tfact, m.slope_r, m.cokey, "
+ "st_area(st_intersection(" + _polygon
+ ", geometry)) / st_area(" + _polygon
+ ")*100 as percentAoi,"
+ "st_area(st_transform(st_intersection(" + _polygon
+ ", geometry),3541))/43560 as sizeAoi "
+ "FROM soil_geom AS g, map_soils AS m "
+ "WHERE ST_Intersects(" + _polygon
+ ", g.geometry) "
+ "AND g.mukey = m.mukey "
+ "and st_isvalid(geometry)='t' "
+ "and st_isvalid(" + _polygon
+ ")='t';", poly_longitude);
}
// To calculate area in acres, we use the st_transform function to transform the intersection polygon into
// a NAD83 Kansas projection which has units in feet (not degrees), which produces square feet.
// We then divide by 43,560 sq ft to get acres.
@Override
public Collection<Soil> findSoilsForPolygonWithSand(String _polygon, double poly_longitude) throws SQLException {
String polygon = "ST_PolygonFromText('POLYGON((-90.111 38.19,-90.110 38.19, -90.110 38.18,-90.111 38.18, -90.111 38.19))')";
// add PostgreSQL specific call to _polygon string
_polygon = "ST_PolygonFromText('" + _polygon + "',4326)";
logger.info("The input polygon is=" + _polygon);
logger.info("The input polygon len len =" + _polygon.length());
if ((_polygon == null) || (_polygon.length() < 1)) {
_polygon = polygon;
poly_longitude = -90.111;
}
// added a fix "and m.r2_name like '%' || m.compname || '%'" to the end of this query because
// soils have duplicate records presently, and need to be regenerated...
return findPolygonsWithSand("SELECT m.co_fips, m.musym, m.mukey, r2_path, r2_name, m.muname, "
+ "m.compname, m.kffact, m.tfact, m.slope_r, m.cokey, m.sandtotal_r, "
+ "st_area(st_intersection(" + _polygon
+ ", geometry)) / st_area(" + _polygon
+ ")*100 as percentAoi,"
+ "st_area(st_transform(st_intersection(" + _polygon
+ ", geometry),3541))/43560 as sizeAoi "
+ "FROM soil_geom AS g, map_soils AS m "
+ "WHERE ST_Intersects(" + _polygon
+ ", g.geometry) "
+ "AND g.mukey = m.mukey "
+ "and st_isvalid(geometry)='t' "
+ "and st_isvalid(" + _polygon
+ ")='t' and m.r2_name like '%' || m.compname || '%';", poly_longitude);
}
@Override
// To calculate area in acres, we use the st_transform function to transform the intersection polygon into
// a NAD83 Kansas projection which has units in feet (not degrees), which produces square feet.
// We then divide by 43,560 sq ft to get acres.
public Collection<Soil> findSoilsForPolygonWeps(String _polygon, double poly_longitude) throws SQLException {
String polygon = "ST_PolygonFromText('POLYGON((-90.111 38.19,-90.110 38.19, -90.110 38.18,-90.111 38.18, -90.111 38.19))')";
// add PostgreSQL specific call to _polygon string
_polygon = "ST_PolygonFromText('" + _polygon + "',4326)";
logger.info("find weps soils - The input polygon is=" + _polygon);
logger.info("The input polygon len len =" + _polygon.length());
if ((_polygon == null) || (_polygon.length() < 1)) {
_polygon = polygon;
poly_longitude = -90.111;
}
return findPolygonsWeps("SELECT m.co_fips, m.musym, m.mukey, weps_path, weps_name, m.muname, "
+ "m.compname, m.kffact, m.tfact, m.slope_r, m.cokey, "
+ "st_area(st_intersection(" + _polygon
+ ", geometry)) / st_area(" + _polygon
+ ")*100 as percentAoi,"
+ "st_area(st_transform(st_intersection(" + _polygon
+ ", geometry),3541))/43560 as sizeAoi "
+ "FROM soil_geom AS g, map_soils_weps AS m "
+ "WHERE ST_Intersects(" + _polygon
+ ", g.geometry) "
+ "AND g.mukey = m.mukey "
+ "and st_isvalid(geometry)='t' "
+ "and st_isvalid(" + _polygon
+ ")='t'", poly_longitude);
}
@Override
public FileQryResult findSoilsWeps(double lat, double lon) throws SQLException {
return findFile("SELECT m.musym, weps_path, weps_name FROM soil_geom AS g, map_soils_weps AS m "
+ "WHERE ST_Within(ST_GeomFromText('POINT(" + lon + " " + lat + ")',4326), g.geometry) "
+ "AND g.mukey = m.mukey LIMIT 1;", lon);
}
@Override
public FileQryResult findSoilsWepsByCokey(String cokey, double lon) throws SQLException {
return findFile("SELECT m.musym, weps_path, weps_name FROM soil_geom AS g, map_soils_weps AS m "
+ "WHERE m.cokey='" + cokey + "' "
+ "AND g.mukey = m.mukey limit 1; ", lon);
}
@Override
//
// 09-03-2013 WJL
// Added the "DESC" to sort values in order to favor REQ for IDAHO and RMOD for COLORADO
//
// Basically these climate files (rmod, req, etc.), where they exist, should always be returned in favor of the just "R" ones...
public FileQryResult findClimate(double lat, double lon) throws SQLException {
// new query - mbound
// changed from using cnty_geom to cli_geom table
return findFile("SELECT m.co_fips, r2_path, r2_name FROM cli_geom as g, map_climates AS m "
+ "WHERE ST_Within(ST_Point(" + lon + "," + lat + "), g.geometry) "
+ "AND g.co_fips = m.co_fips "
+ "AND (g.ei_rang=m.ei_rang or g.ei_rang is null) "
+ "ORDER BY m.r2_name desc "
+ "LIMIT 1;", lon);
// old
// return find("SELECT m.co_fips, r2_path, r2_name FROM cli_geom as g, map_climates AS m " +
// "WHERE ST_Within(ST_Point(" + lon + "," + lat + "), g.geometry) " +
// "AND g.co_fips = m.co_fips LIMIT 1;", lon);
// old query with nested join - dbound
// return find("SELECT m.co_fips, r2_path, r2_name FROM map_climates AS m, (SELECT co_fips FROM cnty_geom WHERE "
// + "Within(GeomFromText( 'POINT(" + lon + " " + lat
// + ")' ), geometry)) AS g WHERE m.co_fips = g.co_fips LIMIT 1;");
}
@Override
public County findCounty(double lat, double lon) throws SQLException, ServiceException{
String select = "SELECT m.st_abbr, m.co_numb, m.co_name FROM cnty_geom as g, cnty_data AS m "
+ "WHERE ST_Within(ST_Point(" + lon + "," + lat + "), g.geometry) "
+ "AND g.co_fips = m.co_fips;";
Statement s = createStatement(lon);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 3) {
//s.close();
logger.severe("invalid columns in cligen station query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from cligen station query!");
throw new ServiceException("no result from cligen station query!");
}
County cnty = new County();
cnty.st_abbr = r.getString(1);
cnty.county_code = r.getString(2);
cnty.name = r.getString(3);
//s.close();
logger.info("state abbr=" + cnty.st_abbr + " county code=" + cnty.county_code + " county name=" + cnty.name);
return cnty;
}
@Override
public StationResult findCligenStation(double lat, double lon) throws SQLException {
String select = "SELECT s.st_numb, s.station_id, st_x(point), st_y(point), s.elevation, ST_Distance(point, ST_GeomFromText('POINT(" + lon + " " + lat + ")',4326)) as distance, s.name "
+ "FROM cligen_stations as s "
+ "order by distance "
+ "limit 1;";
Statement s = createStatement(lon);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 7) {
logger.severe("invalid columns in cligen station query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
logger.severe("no result from cligen station query!");
return null;
}
StationResult cligenst = new StationResult();
cligenst.state = r.getString(1);
cligenst.stationId = r.getString(2);
cligenst.stationX = r.getString(3);
cligenst.stationY = r.getString(4);
cligenst.elevation = r.getString(5);
cligenst.distance = r.getDouble(6);
cligenst.name = (r.getString(7) != null ? r.getString(7) : "NAME_UNAVAILABLE-LOOKUP-ID#" + r.getString(2));
logger.info("state number=" + cligenst.state + " station id=" + cligenst.stationId + " station_X=" + cligenst.stationX + " stationY=" + cligenst.stationY + " name=" + cligenst.name);
logger.info("distance in deg=" + cligenst.distance);
return cligenst;
}
@Override
public StationResult findWindgenStation(double lat, double lon) throws SQLException {
String select = "SELECT st_numb, station_id, ST_Distance(point, ST_GeomFromText('POINT(" + lon + " " + lat + ")',4326)) as distance "
+ "FROM windgen_stations "
+ "order by distance "
+ "limit 1;";
Statement s = createStatement(lon);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 3) {
//s.close();
logger.severe("invalid columns in windgen station query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from windgen station query!");
return null;
}
StationResult windgenst = new StationResult();
windgenst.state = r.getString(1);
windgenst.stationId = r.getString(2);
windgenst.distance = r.getDouble(3);
//s.close();
logger.info("state=" + windgenst.state);
logger.info("station id=" + windgenst.stationId);
logger.info("distance in deg=" + windgenst.distance);
return windgenst;
}
@Override
public StationResult cliGeomIntersect(double lat, double lon) throws SQLException {
String select = "select st, name, st_fips, c1, lat_dd, long_dd, elev_m "
+ "from weps_cli_geom "
+ "where ST_Intersects(geom,ST_GeomFromText('POINT(" + lon + " " + lat + ")',4326));";
Statement s = createStatement(lon);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 7) {
//s.close();
logger.severe("invalid columns in cliGeomIntersect query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from cliGeomIntersect query!");
return null;
}
StationResult clist = new StationResult();
clist.state = r.getString(3);
clist.name = r.getString(2);
clist.stationId = r.getString(4);
clist.stationY = r.getString(5);
clist.stationX = r.getString(6);
clist.elevation = r.getString(7);
logger.info("state=" + clist.state);
logger.info("cli station id=" + clist.stationId);
logger.info("cli station name=" + clist.name);
logger.info("cli station elev=" + clist.elevation);
return clist;
}
@Override
public StationResult windGeomIntersect(double lat, double lon) throws SQLException {
String select = "select lat_dd, long_dd, wban, cntry, st, name "
+ "from weps_wind_geom "
+ "where ST_Intersects(geom,ST_GeomFromText('POINT(" + lon + " " + lat + ")',4326));";
Statement s = createStatement(lon);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 6) {
//s.close();
logger.severe("invalid columns in windGeomIntersect query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from windGeomIntersect query!");
return null;
}
StationResult windgenst = new StationResult();
windgenst.stationX = r.getString(2);
windgenst.stationY = r.getString(1);
windgenst.stationId = r.getString(3);
windgenst.country = r.getString(4);
windgenst.state = r.getString(5);
windgenst.name = r.getString(6);
logger.info("state=" + windgenst.state);
logger.info("station id=" + windgenst.stationId);
logger.info("station name=" + windgenst.name);
return windgenst;
}
@Override
// Uses a field lat long, to determine the county, then returns the county's centroid lat/long
public County countyCentroid(double lat, double lon) throws SQLException {
String select = "select state, county, name, ST_X(ST_Centroid(geom)) centroid_long, ST_Y(ST_Centroid(geom)) centroid_lat "
+ "from weps_county_geom "
+ "where ST_Intersects(geom,ST_GeomFromText('POINT(" + lon + " " + lat + ")',4326));";
Statement s = createStatement(lon);
logger.info("query sql=" + select);
ResultSet r = s.executeQuery(select);
int cols = r.getMetaData().getColumnCount();
if (cols != 5) {
//s.close();
logger.severe("invalid columns in countyCentroid query");
throw new IllegalArgumentException("invalid Columns");
}
if (!r.next()) {
//s.close();
logger.severe("no result from countyCentroid query!");
return null;
}
County county = new County();
county.st_abbr = r.getString(1);
county.county_code = r.getString(2);
county.name = r.getString(3);
county.county_centroid_X = r.getString(4);
county.county_centroid_Y = r.getString(5);
logger.info("county_fips_code=" + county.st_abbr + county.county_code);
logger.info("name=" + county.name);
logger.info("centroidX=" + county.county_centroid_X);
logger.info("centroidY=" + county.county_centroid_Y);
return county;
}
@Override
public String findCmz(double lat, double lon) throws SQLException {
return findCmzName("select distinct g.co_cmz from cmz_geom as g "
+ "where ST_Within(ST_Point(" + lon + "," + lat + "), g.geometry) LIMIT 1;", lon, 1);
}
@Override
public List<FileQryResult> findManagements(double lat, double lon) throws SQLException {
//Statement s = createStatement(getConnection());
Statement s = createStatement(lon);
ResultSet r = s.executeQuery("SELECT co_cmz, r2_path, r2_name FROM man_names WHERE co_cmz= (SELECT distinct co_cmz FROM cmz_geom "
+ "WHERE Within(GeomFromText( 'POINT(" + lon + " "
+ lat + ")' ), geometry) ) ORDER BY usage DESC LIMIT 10;");
int cols = r.getMetaData().getColumnCount();
if (cols != 3) {
//s.close();
throw new IllegalArgumentException("invalid Columns");
}
List<FileQryResult> l = new ArrayList<FileQryResult>();
while (r.next()) {
FileQryResult res = new FileQryResult();
res.id = r.getString(1);
res.file_path = r.getString(2);
res.file_name = r.getString(3);
l.add(res);
}
//s.close();
return l;
}
@Override
public List<FileQryResult> findManagementsFilter(double lat, double lon, String filter1, String filter2) throws SQLException {
//Statement s = createStatement(getConnection());
Statement s = createStatement(lon);
String sSql = "SELECT co_cmz, r2_path, r2_name FROM man_names WHERE co_cmz= (SELECT distinct co_cmz FROM cmz_geom "
+ "WHERE Within(GeomFromText( 'POINT(" + lon + " "
+ lat + ")' ), geometry) ) ";
if (filter1.length() > 0) {
sSql += " and (lower(r2_path) like '%" + filter1 + "%' or lower(r2_name) like '%" + filter1 + "%') ";
}
if (filter2.length() > 0) {
sSql += " and (lower(r2_path) like '%" + filter2 + "%' or lower(r2_name) like '%" + filter2 + "%') ";
}
sSql += " ORDER BY usage DESC LIMIT 100;";
ResultSet r = s.executeQuery(sSql);
int cols = r.getMetaData().getColumnCount();
if (cols != 3) {
//s.close();
throw new IllegalArgumentException("invalid Columns");
}
List<FileQryResult> l = new ArrayList<FileQryResult>();
while (r.next()) {
FileQryResult res = new FileQryResult();
res.id = r.getString(1);
res.file_path = r.getString(2);
res.file_name = r.getString(3);
l.add(res);
}
//s.close();
return l;
}
@Override
public void close() throws SQLException {
// conn.close();
// for (int i = 0;i< MAX_CONNECTIONS;i++)
// connections[i].close();
}
//private Statement createStatement(java.sql.Connection conn) {
private Statement createStatement(double lon) {
try {
java.sql.Connection conn = getConnection(lon);
logger.info("db connection to use=" + conn + " with URL=" + conn.getMetaData().getURL());
if ((conn != null) && (!conn.isClosed())) {
Statement st = conn.createStatement();
logger.info("statement created normally");
logger.info("connection is closed=" + conn.isClosed());
logger.info("connection is read only=" + conn.isReadOnly());
logger.info("st.connection is closed=" + st.getConnection().isClosed());
logger.info("st.connection is read only=" + st.getConnection().isReadOnly());
if (st.getConnection().isClosed()) {
logger.warning("Reinitializing database connection! Connection obtained through connections pool was closed! This should not happen.");
conn = DriverManager.getConnection(PG_URL, "postgres", "postgres");
st = conn.createStatement();
}
logger.info("db client info=" + st.getConnection().getClientInfo().toString());
logger.info("db url=" + conn.getMetaData().getURL());
return st;
} else {
if (conn != null) {
logger.severe("connection isClosed=" + conn.isClosed());
}
if ((conn == null) || ((conn != null) && (conn.isClosed()))) {
logger.warning("Database connection not obtained through connections pool! This should not happen.");
conn = DriverManager.getConnection(PG_URL, "postgres", "postgres");
}
return conn.createStatement();
}
} catch (SQLException ex) {
Logger.getLogger(PostGIS.class.getName()).log(Level.SEVERE, null, ex);
}
Logger.getLogger(PostGIS.class.getName()).log(Level.SEVERE, null, "statement is being returned null");
return null;
}
// return a rotating connection...
private static synchronized java.sql.Connection getConnection(double lon) {
if (PostGIS.bUseChunks) {
logger.info("using db chunks for query");
// find the needed connection array based on the chunk
int maxConnections = Config.getInt("pg.connpool", MAX_CONNECTIONS);
Collection<Config.PostgresChunk> coll = Config.getPostgresChunks();
for (Config.PostgresChunk pgc : coll) {
logger.info("checking for chunk for longitude=" + lon);
if ((pgc.getMinLong() > lon) && (pgc.getMaxLong() < lon)) {
pgc.connectionInc();
if (pgc.getCurrentConnectionIdx() < maxConnections) {
try {
Connection conn = pgc.getCurrentConnection();
if (conn != null) {
logger.info("using chunk for query=" + pgc.getName() + " with the URL of=" + conn.getMetaData().getURL());
} else {
logger.info("using chunk for query=" + pgc.getName() + " BUT CONNECTION IS NULL AND NEEDS TO BE REINITIALIZED");
}
} catch (SQLException sqle) {
logger.severe("using chunk for query=" + pgc.getName() + " BUT ERROR WHILE GETTING THE DB URL!!!");
}
} else {
logger.info("resetting connection counter; using chunk for query=" + pgc.getName());
pgc.setCurrentConnectionIdx(0);
}
Connection conn = pgc.getCurrentConnection();
if (conn != null) {
return conn;
} else {
logger.severe("NO AVAILABLE DATABASE CONNECTION WHEN GETTING THE CURRENT CONNECTION!");
return null;
}
}
}
// if no chunk is found return standard chunk
logger.warning("no db chunk found for longitude=" + lon);
logger.warning("trying to use default postgresql db.");
currentConnection++;
if (currentConnection < maxConnections) {
return connections[currentConnection];
} else {
currentConnection = 0;
return connections[0];
}
} else {
// if chunks are not used, then lat lng coords can be ignored
logger.info("Not using DB chunks for this query.");
int maxConnections = Config.getInt("pg.connpool", MAX_CONNECTIONS);
currentConnection++;
if (currentConnection < maxConnections) {
logger.info("Using DB connection #" + currentConnection);
return connections[currentConnection];
} else {
currentConnection = 0;
logger.info("Using DB connection #" + currentConnection);
return connections[0];
}
}
}
public static void main(String[] args) throws Exception {
PostGIS q = new PostGIS();
FileQryResult s = q.findSoils(-84.026264, 36.250515);
FileQryResult c = q.findClimate(-84.026264, 36.250515);
List<FileQryResult> m = q.findManagements(-84.026264, 36.250515);
System.out.println(s);
System.out.println(c);
System.out.println(m);
q.close();
}
}