@@ -1,6 +1,7 @@ |
import os, datetime, netCDF4, pandas, csv, bz2, sys, json, bisect, psycopg2, math |
import psycopg2.extras |
from pymongo import MongoClient |
+from shapely.geometry import asShape |
|
from collections import namedtuple |
import numpy as np |
@@ -162,30 +163,44 @@ |
geojson_obj = json.loads(input_zone_file) |
|
# Build tiles by looking up each gid |
+ if cell_table == 'prism': |
+ # PRISM uses cell centroids |
+ if input_zone_file: |
+ geojson_obj = json.loads(input_zone_file) |
+ for feat in geojson_obj['features']: |
+ shape = asShape(feat['geometry']) |
+ tile = [shape.centroid.x, shape.centroid.y] |
+ tiles.append(tile) |
|
- c = getConnBeta().cursor(cursor_factory=psycopg2.extras.DictCursor) |
- for i in range(len(geojson_obj['features'])): |
- feat = geojson_obj['features'][i] |
- # Make sure that there is an SRID associated |
- if not "crs" in feat["geometry"]: |
- feat["geometry"]["crs"] = {"type":"name","properties":{"name":"EPSG:4326"}} |
+ name = feat['properties'].get('name', '{0},{1}'.format(tile[0], tile[1])) |
+ names.append(name) |
+ cell_extents[name] = None |
+ cell_defs[name] = None |
+ |
+ else: |
+ c = getConnBeta().cursor(cursor_factory=psycopg2.extras.DictCursor) |
+ for i in range(len(geojson_obj['features'])): |
+ feat = geojson_obj['features'][i] |
+ # Make sure that there is an SRID associated |
+ if not "crs" in feat["geometry"]: |
+ feat["geometry"]["crs"] = {"type":"name","properties":{"name":"EPSG:4326"}} |
|
- # If the feature is a polygon, then use the centroid |
- geomjson = json.dumps(feat['geometry']) |
- c.execute("SELECT geom FROM (SELECT ST_Centroid(ST_GeomFromGeoJSON(%s)) as geom) as foo", (geomjson,)) |
- geom = c.fetchone()[0] |
+ # If the feature is a polygon, then use the centroid |
+ geomjson = json.dumps(feat['geometry']) |
+ c.execute("SELECT geom FROM (SELECT ST_Centroid(ST_GeomFromGeoJSON(%s)) as geom) as foo", (geomjson,)) |
+ geom = c.fetchone()[0] |
|
- # Find the timeseries. Keep the timeseries cell so that the caller can decide if he can use cached values |
- # options=2 means to add the SRS |
- c.execute("SELECT gid, ST_AsGeoJSON(the_geom, 15, 2) as geojson, ST_Extent(the_geom) as extent FROM {cell_table}.cells WHERE ST_Intersects(the_geom, %s) GROUP BY gid".format(cell_table=cell_table), (geom,)) |
- row = c.fetchone() |
- gid, geojson_cell, extent_cell = row |
+ # Find the timeseries. Keep the timeseries cell so that the caller can decide if he can use cached values |
+ # options=2 means to add the SRS |
+ c.execute("SELECT gid, ST_AsGeoJSON(the_geom, 15, 2) as geojson, ST_Extent(the_geom) as extent FROM {cell_table}.cells WHERE ST_Intersects(the_geom, %s) GROUP BY gid".format(cell_table=cell_table), (geom,)) |
+ row = c.fetchone() |
+ gid, geojson_cell, extent_cell = row |
|
- tiles.append(gid) |
- name = feat['properties'].get('name', '{0}'.format(i+1)) |
- names.append(name) |
- cell_extents[name] = extent_cell |
- cell_defs[name] = geojson_cell |
+ tiles.append(gid) |
+ name = feat['properties'].get('name', '{0}'.format(i+1)) |
+ names.append(name) |
+ cell_extents[name] = extent_cell |
+ cell_defs[name] = geojson_cell |
|
return tiles, names, cell_extents, cell_defs |
|
@@ -437,16 +452,10 @@ |
|
ret = [header,] |
|
- # generate tile info |
- tiles = [] |
- if input_zone_file: |
- geojson_obj = json.loads(input_zone_file) |
- for feat in geojson_obj['features']: |
- pt = feat['geometry']['coordinates'] |
- tiles.append(pt) |
- |
- for tile in tiles: |
- x, y = tile |
+ tiles, names, cell_extents, cell_defs = self.getInputZoneInfo(input_zone_file, 'prism') |
+ |
+ for itile in range(len(tiles)): |
+ x, y = tiles[itile] |
|
# query the locations for x, y data |
query = [ |
@@ -470,12 +479,12 @@ |
} |
] |
|
- results = db.prism_locs.aggregate(query) |
+ results = db.geoPrism.aggregate(query) |
|
cells = [] |
for x, y in [(d['x'], d['y']) for d in results]: |
cells.append([x, y]) |
- assert len(cells) == 1 |
+ #assert len(cells) == 1, 'No PRISM data for cell {0}'.format(tile) |
|
# get data for multiple cells and multiple years |
for cell in cells: |
@@ -500,9 +509,9 @@ |
dt = start_date |
for i in range(len(tmax)): |
# convert precip from mm to cm |
- ret.append(['{0} {1}'.format(cell[0], cell[1]), dt.strftime('%Y-%m-%d'), conv_dict['tmp'](tmin[i]), conv_dict['tmp'](tmax[i]), conv_dict['apcp'](ppt[i])]) |
+ ret.append([names[itile], dt.strftime('%Y-%m-%d'), conv_dict['tmp'](tmin[i]), conv_dict['tmp'](tmax[i]), conv_dict['apcp'](ppt[i])]) |
dt += datetime.timedelta(days=1) |
- return ret |
+ return ret, cell_defs, cell_extents |
|
def WEgetDataGHCND(self, input_zone_file_or_station_list, units, start_date, end_date, generate_average=False, IDW_center=False): |
names = [] |