Displaying differences for changeset
 
display as  

nbproject/private/private.xml

@@ -24,10 +24,13 @@
             <file>file:/od/projects/csip-all/csip-example/test/service_tests/externalexe/STest.java</file>
         </group>
         <group>
+            <file>file:/home/pattersd/work/csip-weather/src/java/m/weather/NARR/V1_0.java</file>
             <file>file:/home/pattersd/work/csip-weather/src/java/m/weather/WeatherModelDataService.java</file>
             <file>file:/home/pattersd/work/csip-weather/test/service_tests/PRISM_V1_0/prism-req.json</file>
-            <file>file:/home/pattersd/work/csip-weather/test/service_tests/NCWCD_V1_0/ncwcd-req.json</file>
-            <file>file:/home/pattersd/work/csip-weather/test/service_tests/MACA_V1_0/maca-req.json</file>
+            <file>file:/home/pattersd/work/csip-weather/test/service_tests/NARR_V1_0/narr-res.json</file>
+            <file>file:/home/pattersd/work/csip-weather/test/service_tests/NARR_V1_0/narr-req.json</file>
+            <file>file:/home/pattersd/work/csip-weather/test/service_tests/PRISM_V1_0/prism-res.json</file>
+            <file>file:/home/pattersd/work/csip-weather/src/java/m/weather/NARR/V1_0.json</file>
         </group>
     </open-files>
 </project-private>

src/java/m/weather/WeatherModelDataService.java

@@ -22,6 +22,7 @@
  */
 public class WeatherModelDataService extends ModelDataService {
     File result_file = null;
+    boolean download_link_only = false;
     
     @Override
     protected void preProcess() throws Exception {
@@ -30,29 +31,12 @@
     @Override
     protected String process() throws Exception {
         // Check for selection by region or point
-        String input_zone_geojson = getStringParam("input_zone_features", "");
-        // Save features to file because a large request can overflow the parameter buffer
-        String geojson_fname = getWorkspaceDir() + "/" + "input_zone_features.geojson";
-        Files.write(Paths.get(geojson_fname), input_zone_geojson.getBytes());
-
-        // Check for selection by station ID list
-        String station_names_json = getStringParam("station_list", null);
+        JSONObject req = getRequest();
+        String req_fname = getWorkspaceDir() + "/" + "req.json";
+        Files.write(Paths.get(req_fname), req.toString().getBytes());
         
-        // MACA stuff
-        String forecast_model = getStringParam("forecast_model", "");
-        String forecast_option = getStringParam("forecast_option", "");
-
-        String units = getStringParam("units", "english");
-        String start_date = getStringParam("start_date", "");
-
-        final SimpleDateFormat format;
-        if (start_date.indexOf("-") > 0) {
-            format = new SimpleDateFormat("yyyy-MM-dd");  
-        }
-        else {
-            format = new SimpleDateFormat("MM/dd/yyyy");  
-        }
-        String end_date = getStringParam("end_date", format.format(new Date()));
+        // Check for file return instead of direct return of results
+        download_link_only = getBooleanParam("download_link_only", false);
         
         File f = getResourceFile("driver");
         getResourceFile("weatherExtraction");
@@ -65,30 +49,60 @@
         int idx = s.indexOf("/")+1;
         String climate_type = s.substring(idx, s.indexOf("/", idx));
 
+//        String input_zone_geojson = getStringParam("input_zone_features", "");
+//        // Save features to file because a large request can overflow the parameter buffer
+//        String geojson_fname = getWorkspaceDir() + "/" + "input_zone_features.geojson";
+//        Files.write(Paths.get(geojson_fname), input_zone_geojson.getBytes());
+//
+//        // Check for selection by station ID list
+//        String station_names_json = getStringParam("station_list", null);
+//        
+//        // MACA stuff
+//        String forecast_model = getStringParam("forecast_model", "");
+//        String forecast_option = getStringParam("forecast_option", "");
+//
+//        String units = getStringParam("units", "english");
+//        String start_date = getStringParam("start_date", "");
+//
+//        final SimpleDateFormat format;
+//        if (start_date.indexOf("-") > 0) {
+//            format = new SimpleDateFormat("yyyy-MM-dd");  
+//        }
+//        else {
+//            format = new SimpleDateFormat("MM/dd/yyyy");  
+//        }
+//        String end_date = getStringParam("end_date", format.format(new Date()));
+        
+//        args.add(input_zone_geojson != "" ? geojson_fname : station_names_json);
+//        if (!forecast_model.isEmpty()) {
+//            args.add(forecast_model);
+//            args.add(forecast_option);
+//        }
+//        args.add(units);
+//        args.add(start_date);
+//        args.add(end_date);
+//        if (hasParam("generate_average")) {
+//            args.add(getBooleanParam("generate_average") ? "True" : "False");
+//        }
+//        if (hasParam("IDW_center")) {
+//            args.add(getStringParam("IDW_center"));
+//        }
+//        if (hasParam("time")) {
+//            args.add(getStringParam("time"));
+//        }
+//        if (hasParam("climate_data")) {
+//            args.add(getStringParam("climate_data"));
+//        }
+//        if (hasParam("return_full_period")) {
+//            args.add(getBooleanParam("return_full_period") ? "True" : "False");
+//        }
+
         List<String> args = new ArrayList<String>();
         args.add("python");
         args.add(f.getAbsolutePath());
         args.add(climate_type);
-        args.add(input_zone_geojson != "" ? geojson_fname : station_names_json);
-        if (!forecast_model.isEmpty()) {
-            args.add(forecast_model);
-            args.add(forecast_option);
-        }
-        args.add(units);
-        args.add(start_date);
-        args.add(end_date);
-        if (hasParam("generate_average")) {
-            args.add(getBooleanParam("generate_average") ? "True" : "False");
-        }
-        if (hasParam("IDW_center")) {
-            args.add(getStringParam("IDW_center"));
-        }
-        if (hasParam("time")) {
-            args.add(getStringParam("time"));
-        }
-        if (hasParam("climate_data")) {
-            args.add(getStringParam("climate_data"));
-        }
+        args.add(req_fname);
+
         for (String arg: args) {
             LOG.info("arguments = " + arg);
         }
@@ -120,8 +134,13 @@
 
     @Override
     protected void postProcess() throws Exception {
-        byte[] result = Files.readAllBytes(Paths.get(result_file.getPath()));
-        JSONObject results = new JSONObject(new String(result));
-        putResult("results", results, "JSON string");
+        if (download_link_only) {
+            putResult(result_file, "results file");
+        }
+        else {
+            byte[] result = Files.readAllBytes(Paths.get(result_file.getPath()));
+            JSONObject results = new JSONObject(new String(result));
+            putResult("results", results, "JSON string");
+        }
     }
 }

src/java/m/weather/coagmet/V1_0.json

@@ -26,5 +26,9 @@
         "name": "climate_data",
         "value": ["tmin", "tmax", "pp", "sr", "rhave", "windspeed_avg"],
         "description": "for daily, choose from ['tave', 'tmax', 'mxtm', 'tmin', 'mntm', 'vp', 'rhmax', 'rhmxtm', 'rhmin', 'rhmntm', 'sr', 'wrun', 'pp', 'st5mx', 'st5mxtm', 'st5mn', 'st5mntm', 'st15mx', 'st15mxtm', 'st15mn', 'st15mntm', 'volts', 'year', 'gust', 'gusttm', 'gustdir']. For hourly choose from ['tmean', 'rh', 'vp', 'sr', 'ws', 'wind_vec', 'wind_std', 'pp', 'st5', 'st15', 'gust', 'gusttm', 'gustdir']"
+    }, {
+        "name": "return_full_period",
+        "value": false,
+        "description": "If true, then the period of record will be set to the start and end dates. Otherwise the start date is the day that climate data starts and the end date is the last date of data."
     }]
 }
\ No newline at end of file

src/java/python/weatherExtraction.py

@@ -101,54 +101,48 @@
         `tiles`: array of [gridx, gridy]
         `units`: either 'metric' or 'english'
         """
-        def parseDates():
-            if start_date_str:
-                try:
-                    start_date = datetime.datetime.strptime(start_date_str, "%m/%d/%Y").date()
-                except:
-                    start_date = datetime.datetime.strptime(start_date_str, "%Y-%m-%d").date()
-            else:
-                start_date = None
-            if end_date_str:
-                try:
-                    end_date = datetime.datetime.strptime(end_date_str, "%m/%d/%Y").date()
-                except:
-                    end_date = datetime.datetime.strptime(end_date_str, "%Y-%m-%d").date()
-            else:
-                end_date = None
+        def parseDates(req=None):
+            start_date, end_date = None, None
+            if req:
+                start_date_str = req['start_date']
+                end_date_str = req['end_date']
+                
+                if start_date_str:
+                    try:
+                        start_date = datetime.datetime.strptime(start_date_str, "%m/%d/%Y").date()
+                    except:
+                        start_date = datetime.datetime.strptime(start_date_str, "%Y-%m-%d").date()
+                if end_date_str:
+                    try:
+                        end_date = datetime.datetime.strptime(end_date_str, "%m/%d/%Y").date()
+                    except:
+                        end_date = datetime.datetime.strptime(end_date_str, "%Y-%m-%d").date()
+
             return start_date, end_date
 
+        # This is the csip request object
+        if len(args) == 1:
+            with open(args[0]) as fp:
+                request = json.load(fp)
+
+                # convert to key-value pairs
+                arg_dict = {}
+                for name_value in request['parameter']:
+                    arg_dict[name_value['name']] = name_value['value']
+            arg_dict['start_date'], arg_dict['end_date'] = parseDates(arg_dict)
+
         if climate_dataset == 'maca':
-            input_zone_file, model, forecast_option, units, start_date_str, end_date_str = args
-            start_date, end_date = parseDates()
-            return self.WEgetDataMACA(input_zone_file, model, forecast_option, units, start_date, end_date)
+            return self.WEgetDataMACA(arg_dict)
         elif climate_dataset == 'narr':
-            input_zone_file, units, start_date_str, end_date_str = args
-            start_date, end_date = parseDates()
-            return self.WEgetDataNARR(input_zone_file, units, start_date, end_date)
+            return self.WEgetDataNARR(arg_dict)
         elif climate_dataset == 'prism':
-            input_zone_file, units, start_date_str, end_date_str = args
-            start_date, end_date = parseDates()
-            return self.WEgetDataPRISM(input_zone_file, units, start_date, end_date)
+            return self.WEgetDataPRISM(arg_dict)
         elif climate_dataset == 'ncwcd':
-            input_zone_file_or_station_list, units, start_date_str, end_date_str, climate_data = args
-            start_date, end_date = parseDates()
-            return self.WEgetDataNCWCD_live(input_zone_file_or_station_list, units, start_date, end_date, climate_data)
+            return self.WEgetDataNCWCD_live(arg_dict)
         elif climate_dataset == 'coagmet':
-            input_zone_file_or_station_list, units, start_date_str, end_date_str, time_type, climate_data = args
-            start_date, end_date = parseDates()
-            return self.WEgetDataCoAgMet(input_zone_file_or_station_list, units, start_date, end_date, time_type, climate_data)
+            return self.WEgetDataCoAgMet(arg_dict)
         elif climate_dataset == 'ghcnd':
-            generate_average = "False"
-            IDW_center = None
-            if len(args) == 4:
-                input_zone_file_or_station_list, units, start_date_str, end_date_str = args
-            elif len(args) == 5:
-                input_zone_file_or_station_list, units, start_date_str, end_date_str, generate_average = args
-            elif len(args) == 6:
-                input_zone_file_or_station_list, units, start_date_str, end_date_str, generate_average, IDW_center = args
-            start_date, end_date = parseDates()
-            return self.WEgetDataGHCND(input_zone_file_or_station_list, units, start_date, end_date, generate_average.lower() == 'true', IDW_center)
+            return self.WEgetDataGHCND(arg_dict)
 
         raise Exception("Invalid climate type {0}".format(climate_dataset))
 
@@ -160,14 +154,10 @@
         cell_extents = {}
         cell_defs = {}
 
-        if os.path.isfile(input_zone_file):
-            with open(input_zone_file) as fp:
-                geojson_obj = json.load(fp)
-        else:
-            geojson_obj = json.loads(input_zone_file)
+        geojson_obj = input_zone_file
             
         # Build tiles by looking up each gid
-        if cell_table == 'prism':
+        if cell_table == 'prism' or cell_table == 'narr':
             # PRISM uses cell centroids
             if input_zone_file:
                 for feat in geojson_obj['features']:
@@ -207,7 +197,12 @@
 
         return tiles, names, cell_extents, cell_defs
 
-    def WEgetDataNARR(self, input_zone_file, units, start_date, end_date):
+    def WEgetDataNARR_CSIP(self, arg_dict):
+        input_zone_file = arg_dict['input_zone_features']
+        units = arg_dict['units']
+        start_date = arg_dict['start_date']
+        end_date = arg_dict['end_date']
+        
         tiles, names, cell_extents, cell_defs = self.getInputZoneInfo(input_zone_file, 'narr')
         
         ident = lambda x: float(x)
@@ -253,8 +248,115 @@
                               ])
             climate_data += cell_data
         return climate_data, cell_defs, cell_extents
+
+    def WEgetDataNARR(self, arg_dict):
+        input_zone_file = arg_dict['input_zone_features']
+        units = arg_dict['units']
+        start_date = arg_dict['start_date']
+        end_date = arg_dict['end_date']
         
-    def WEgetDataMACA(self, input_zone_file, model, forecast_option, units, start_date, end_date):
+        tiles, names, cell_extents, cell_defs = self.getInputZoneInfo(input_zone_file, 'narr')
+        
+        ident = lambda x: float(x)
+        to_english_fn = {
+            'tmp': lambda x: float(x) * 1.8 + 32,  # C -> F
+            'apcp': lambda x: float(x) / 2.54, # cm -> in
+            'dswrf': lambda x: float(x) * 2.06352,  # W/m^2 -> langley/day
+            'rh': ident,
+            'windgust': lambda x: float(x) * 2.23694  # m/s -> mph
+        }
+        to_metric_fn = {
+            'tmp': ident,
+            'apcp': lambda x: float(x) / 10, # mm -> cm
+            'dswrf': ident,  # W/m^2 
+            'rh': ident,
+            'windgust': ident, #lambda x: x / 2.23694  # mph -> m/s
+        }
+
+        if units == 'metric':
+            conv_dict = to_metric_fn
+            header = ['cell', 'date', 'tmpmin (C)', 'tmpmax (C)', 'apcp (cm)', 'dswrf (W/m^2)', 'rh (%)', 'windspeed (m/s)']
+        elif units == 'english':
+            conv_dict = to_english_fn
+            header = ['cell', 'date', 'tmpmin (F)', 'tmpmax (F)', 'apcp (in)', 'dswrf (langley/day)', 'rh (%)', 'windspeed (mph)']
+
+        mc = MongoClient('eds0.engr.colostate.edu')
+        db = mc.climate
+
+        ret = [header,]
+
+        tiles, names, cell_extents, cell_defs = self.getInputZoneInfo(input_zone_file, 'narr')
+
+        for itile in range(len(tiles)):
+            x, y = tiles[itile]
+
+            # query the locations for x, y data
+            query = [
+                {
+                    '$match': {
+                        'boundary': {
+                            '$geoIntersects': {
+                                '$geometry': {
+                                    'type': "Point", 
+                                    'coordinates': [x, y]
+                                }
+                            }
+                        }
+                    }
+                },
+                {
+                    '$project': {'x': 1, 'y': 1}
+                },
+                {
+                    '$limit': 10
+                }
+            ]
+
+            results = db.geoNarr.aggregate(query)
+
+            cells = []
+            for x, y in [(d['x'], d['y']) for d in results]: 
+                cells.append([x, y])
+            #assert len(cells) == 1, 'No NARR data for cell {0}'.format(tile)
+
+            # NARR return datetime dates
+            start_date = datetime.datetime(start_date.year, start_date.month, start_date.day)
+            end_date = datetime.datetime(end_date.year, end_date.month, end_date.day)
+            
+            # get data for multiple cells and multiple years
+            for cell in cells:
+                data_results = db.narr.find({'_id': {'$in': ['{0}_{1}_{2}'.format(cell[1], cell[0], y) for y in range(start_date.year, end_date.year+1)]}})
+                data_results = sorted(data_results, key=lambda d: d['year'])
+
+                for i in range(len(data_results)):
+                    data = data_results[i]['data']
+                    data = sorted(data, key=lambda d: d['date'])
+
+                    for j in range(len(data)):
+                        row = data[j]
+                        if row['date'] >= start_date and row['date'] <= end_date:
+                        
+                            # convert precip from mm to cm
+                            ret.append([names[itile],
+                                        row['date'].strftime('%Y-%m-%d'),
+                                        round(conv_dict['tmp'](row['tmin']), 2),
+                                        round(conv_dict['tmp'](row['tmax']), 2),
+                                        round(conv_dict['apcp'](row['ppt']), 3),
+                                        round(conv_dict['dswrf'](row['dswrf']), 1),
+                                        round(conv_dict['rh'](row['rh']), 0),
+                                        round(conv_dict['windgust'](row['windgust']), 2)])
+
+        return ret, cell_defs, cell_extents
+        
+        
+    def WEgetDataMACA(self, arg_dict):
+        input_zone_features = arg_dict['input_zone_features']
+        model = arg_dict['forecast_model']
+        forecast_option = arg_dict['forecast_option']
+        units = arg_dict['units']
+        start_date = arg_dict['start_date']
+        end_date = arg_dict['end_date']
+        
         testPartitioned = False
         testNew = True
         
@@ -284,12 +386,8 @@
         # get centroid of input_zone_file and map it to the available latitudes and longitudes
         names = []
         tiles = []
-        if input_zone_file:
-            if os.path.isfile(input_zone_file):
-                with open(input_zone_file) as fp:
-                    geojson_obj = json.load(fp)
-            else:
-                geojson_obj = json.loads(input_zone_file)
+        if input_zone_features:
+            geojson_obj = input_zone_features
 
             # Build tiles by looking up each gid
             c = getConnWRAP().cursor()            
@@ -436,7 +534,12 @@
             climate_data += cell_data
         return climate_data, cell_defs, cell_extents
 
-    def WEgetDataPRISM(self, input_zone_file, units, start_date, end_date):
+    def WEgetDataPRISM(self, arg_dict):
+        input_zone_file = arg_dict['input_zone_features']
+        units = arg_dict['units']
+        start_date = arg_dict['start_date']
+        end_date = arg_dict['end_date']
+
         prec = 2
         ident = lambda x: round(x, prec) if x != None else None
         to_metric_fn = {
@@ -515,20 +618,24 @@
 
         return ret, cell_defs, cell_extents
 
-    def WEgetDataGHCND(self, input_zone_file_or_station_list, units, start_date, end_date, generate_average=False, IDW_center=False):
+    def WEgetDataGHCND(self, arg_dict):
+        input_zone_file_or_station_list = arg_dict['station_list']
+        units = arg_dict['units']
+        start_date = arg_dict['start_date']
+        end_date = arg_dict['end_date']
+        generate_average = arg_dict.get('generate_average', False)
+        IDW_center = arg_dict.get('IDW_Center', False)
+        
         names = []
         stations = []
-        if input_zone_file_or_station_list[0] == '[':
+        if 'station_list' in arg_dict:
             # List of stations
-            stations = json.loads(input_zone_file_or_station_list)
+            stations = arg_dict['station_list']
+            # Check for [id, name] pairs
             names = map(lambda x: x[1] if isinstance(x, list) else x, stations)
         else:
             # Input zone file -- get all features in the polygon
-            if os.path.isfile(input_zone_file):
-                with open(input_zone_file) as fp:
-                    geojson_obj = json.load(fp)
-            else:
-                geojson_obj = json.loads(input_zone_file)
+            geojson_obj = arg_dict['input_zone_file']
             c = getConnWRAP().cursor(cursor_factory=psycopg2.extras.DictCursor)
             
             # Build tiles by looking up each gid
@@ -685,22 +792,36 @@
 
         return self.WEprocessPointData(data, units)
 
-    def WEgetDataNCWCD_live(self, stations_json, units, start_date, end_date, climate_data_json):
-        stations = json.loads(stations_json)
-        climate_data = json.loads(climate_data_json)
-        ncwcd_finder = NCWCDDataFinder(stations, start_date, end_date, climate_data)
+    def WEgetDataNCWCD_live(self, arg_dict):
+        stations = arg_dict['station_list']
+        climate_data = arg_dict.get('climate_data', ["tmax", "tmin", "pp", "sr", "rhave", "windspeed_avg"])
+        units = arg_dict.get('units', 'metric')
+        
+        ncwcd_finder = NCWCDDataFinder(stations, arg_dict['start_date'], arg_dict['end_date'], climate_data)
         data = ncwcd_finder.query()
 
         return self.WEprocessPointData(data, units, 'daily', climate_data)
 
-    def WEgetDataCoAgMet(self, stations_json, units, start_date, end_date, time_type='daily', climate_data_json='["tmax", "tmin", "pp", "sr", "rhave", "windspeed_avg"]'):
-        stations = json.loads(stations_json)
-        climate_data = json.loads(climate_data_json)
-        finder = CoAgMetDataFinder(stations, start_date, end_date, time_type, climate_data)
+    def WEgetDataCoAgMet(self, arg_dict):
+        stations = arg_dict['station_list']
+        climate_data = arg_dict.get('climate_data', ["tmax", "tmin", "pp", "sr", "rhave", "windspeed_avg"])
+        time_type = arg_dict.get('time', 'daily')
+        units = arg_dict.get('units', 'metric')
+        
+        finder = CoAgMetDataFinder(stations, arg_dict['start_date'], arg_dict['end_date'], time_type, climate_data)
         data = finder.query()
-        return self.WEprocessPointData(data, units, time_type, climate_data)
 
-    def WEprocessPointData(self, data, units, time_type='daily', climate_data=['tmin', 'tmax', 'pp']):
+        if arg_dict.get('return_full_period', False):
+            return self.WEprocessPointData(data, units, time_type, climate_data, arg_dict['start_date'], arg_dict['end_date'])
+        else:
+            return self.WEprocessPointData(data, units, time_type, climate_data)
+
+    def WEprocessPointData(self, data, units, time_type='daily', climate_data=['tmin', 'tmax', 'pp'], start_date=None, end_date=None):
+        """
+        arguments:
+        `start_date`: if present, then the client wants any missing data to be filled with null records
+        """
+        
         ident = lambda x: round(float(x), 3) if x != None and str(x).find(':') == -1 else x
         to_english_fn = {
             'C': lambda x: round(float(x) * 1.8 + 32, 3) if x != None else None,  # C -> F
@@ -722,8 +843,6 @@
         }
 
         ret = []
-        start_date = None
-        end_date = None
         fmt = None
 
         if data:
@@ -761,30 +880,20 @@
                     header.append('{0} ({1})'.format(climate_type, self.climate_to_english.get(dt, dt)))
 
             ret.append(header)
-            
+
             # Flatten the dictionary list
             for station, station_data in data.iteritems():
-                # Pad at front
-                s = start_date
-                e = end_date
-                
                 if station_data:
-                    s = station_data[0][1]
-                    e = station_data[-1][1]
-                    if isinstance(s, basestring):
-                        s = datetime.datetime.strptime(s, fmt).date()
-                        e = datetime.datetime.strptime(e, fmt).date()
+                    one_day = datetime.timedelta(days=1)
 
-                one_day = datetime.timedelta(days=1)
+                    # Fill in missing data.
+                    missing_row = [None,]*6
+                    cur_date = start_date
+                    first_date = datetime.datetime.strptime(station_data[0][1], fmt).date()
+                    while cur_date < first_date:
+                        ret.append([station, cur_date.strftime('%Y-%m-%d')] + missing_row)
+                        cur_date += one_day
 
-                # Fill in missing data.
-                missing_row = [None,]*6
-                cur_date = start_date
-                while cur_date < s:
-                    ret.append([station, cur_date.strftime('%Y-%m-%d')] + missing_row)
-                    cur_date += one_day
-
-                if station_data:
                     for row in station_data:
                         assert len(row) == len(climate_data) + 2, \
                             "station data has wrong number of columns, expected {0}, got {1} for {2} [{3}]".format(len(climate_data)+2, len(row), row, climate_data)
@@ -806,7 +915,6 @@
                         cur_date += one_day
 
                 # Pad at end
-                cur_date = e + datetime.timedelta(days=1)
                 while cur_date <= end_date:
                     ret.append([station, cur_date.strftime('%Y-%m-%d')] + missing_row)
                     cur_date += one_day

test/service_tests/CoAgMet_V1_0/coagmet-req.json

@@ -4,7 +4,7 @@
     },
     "parameter": [{
         "name": "station_list",
-        "value": ["LCN01", "FTC03"],
+        "value": ["lcn01", "gly04"],
         "description": "List of stations IDs"
     }, {
         "name": "units",
@@ -12,11 +12,11 @@
         "description": "'metric' or 'english'"
     }, {
         "name": "start_date",
-        "value": "1950-1-1",
+        "value": "2015-1-1",
         "description": "mm/dd/yy or yyyy-mm-dd"
     }, {
         "name": "end_date",
-        "value": "2015-12-31",
+        "value": "2016-11-16",
         "description": "mm/dd/yy or yyyy-mm-dd"
     }, {
         "name": "time",
@@ -26,5 +26,9 @@
         "name": "climate_data",
         "value": ["tmin", "tmax", "pp", "sr", "rhave", "windspeed_avg"],
         "description": "for daily, choose from ['tave', 'tmax', 'mxtm', 'tmin', 'mntm', 'vp', 'rhmax', 'rhmxtm', 'rhmin', 'rhmntm', 'sr', 'wrun', 'windspeed_avg', 'pp', 'st5mx', 'st5mxtm', 'st5mn', 'st5mntm', 'st15mx', 'st15mxtm', 'st15mn', 'st15mntm', 'volts', 'year', 'gust', 'gusttm', 'gustdir']. For hourly choose from ['tmean', 'rh', 'vp', 'sr', 'ws', 'wind_vec', 'wind_std', 'pp', 'st5', 'st15', 'gust', 'gusttm', 'gustdir']"
+    }, {
+        "name": "return_full_period",
+        "value": true,
+        "description": "If true, then the period of record will be set to the start and end dates. Otherwise the start date is the day that climate data starts and the end date is the last date of data."
     }]
 }
\ No newline at end of file

test/service_tests/NARR_V1_0/narr-req.json

@@ -31,7 +31,7 @@
                   "type": "Point",
                   "coordinates": [
               -105.75,
-              39.9167
+              51.9167
                   ]
                 }
               }