Displaying differences for changeset
 
display as  

nbproject/ant-deploy.xml

@@ -1,42 +1,23 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
-DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
 
-Copyright (c) 2006, 2016 Oracle and/or its affiliates. All rights reserved.
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
 
-Oracle and Java are registered trademarks of Oracle and/or its affiliates.
-Other names may be trademarks of their respective owners.
+      http://www.apache.org/licenses/LICENSE-2.0
 
-The contents of this file are subject to the terms of either the GNU
-General Public License Version 2 only ("GPL") or the Common
-Development and Distribution License("CDDL") (collectively, the
-"License"). You may not use this file except in compliance with the
-License. You can obtain a copy of the License at
-http://www.netbeans.org/cddl-gplv2.html
-or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
-specific language governing permissions and limitations under the
-License.  When distributing the software, include this License Header
-Notice in each file and include the License file at
-nbbuild/licenses/CDDL-GPL-2-CP.  Oracle designates this
-particular file as subject to the "Classpath" exception as provided
-by Oracle in the GPL Version 2 section of the License file that
-accompanied this code. If applicable, add the following below the
-License Header, with the fields enclosed by brackets [] replaced by
-your own identifying information:
-"Portions Copyrighted [year] [name of copyright owner]"
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
 
-If you wish your version of this file to be governed by only the CDDL
-or only the GPL Version 2, indicate your decision by adding
-"[Contributor] elects to include this software in this distribution
-under the [CDDL or GPL Version 2] license." If you do not indicate a
-single choice of license, a recipient has the option to distribute
-your version of this file under either the CDDL, the GPL Version 2 or
-to extend the choice of license to its licensees as provided above.
-However, if you add GPL Version 2 code and therefore, elected the GPL
-Version 2 license, then the option applies only if the new code is
-made subject to such option by the copyright holder.
-
-Contributor(s):
 -->
 <project default="-deploy-ant" basedir=".">
     <target name="-init" if="deploy.ant.enabled">
@@ -59,18 +40,30 @@
     </target>
     <target name="-deploy-ant" if="deploy.ant.enabled" depends="-init,-check-credentials">
         <echo message="Deploying ${deploy.ant.archive} to ${Context(path)}"/>
-        <taskdef name="deploy" classname="org.apache.catalina.ant.DeployTask"
-                 classpath="${tomcat.home}/server/lib/catalina-ant.jar"/>
-        <deploy url="${tomcat.url}/manager" username="${tomcat.username}"
+        <taskdef name="deploy" classname="org.apache.catalina.ant.DeployTask">
+            <classpath>
+                <pathelement path="${tomcat.home}/lib/catalina-ant.jar"/>
+                <pathelement path="${tomcat.home}/lib/tomcat-coyote.jar"/>
+                <pathelement path="${tomcat.home}/lib/tomcat-util.jar"/>
+                <pathelement path="${tomcat.home}/bin/tomcat-juli.jar"/>
+            </classpath>
+        </taskdef>
+        <deploy url="${tomcat.url}/manager/text" username="${tomcat.username}"
                 password="${tomcat.password}" path="${Context(path)}"
                 war="${deploy.ant.archive}"/>
         <property name="deploy.ant.client.url" value="${tomcat.url}${Context(path)}"/>
     </target>
     <target name="-undeploy-ant" if="deploy.ant.enabled" depends="-init,-check-credentials">
         <echo message="Undeploying ${Context(path)}"/>
-        <taskdef name="undeploy"  classname="org.apache.catalina.ant.UndeployTask"
-                classpath="${tomcat.home}/server/lib/catalina-ant.jar"/>
-        <undeploy url="${tomcat.url}/manager" username="${tomcat.username}" 
+        <taskdef name="undeploy"  classname="org.apache.catalina.ant.UndeployTask">
+            <classpath>
+                <pathelement path="${tomcat.home}/lib/catalina-ant.jar"/>
+                <pathelement path="${tomcat.home}/lib/tomcat-coyote.jar"/>
+                <pathelement path="${tomcat.home}/lib/tomcat-util.jar"/>
+                <pathelement path="${tomcat.home}/bin/tomcat-juli.jar"/>
+            </classpath>
+        </taskdef>
+        <undeploy url="${tomcat.url}/manager/text" username="${tomcat.username}" 
                   password="${tomcat.password}" path="${Context(path)}"/>
     </target>
 </project>

nbproject/build-impl.xml~

@@ -1012,7 +1012,7 @@
         <copyfiles files="${file.reference.jcommon-1.0.16.jar}" iftldtodir="${build.web.dir}/WEB-INF" todir="${dist.ear.dir}/lib"/>
         <copyfiles files="${file.reference.jfreechart-1.0.13.jar}" iftldtodir="${build.web.dir}/WEB-INF" todir="${dist.ear.dir}/lib"/>
         <copyfiles files="${file.reference.Jama-1.0.2.jar}" iftldtodir="${build.web.dir}/WEB-INF" todir="${dist.ear.dir}/lib"/>
-        <copyfiles files="${file.reference.csip-lib-water-4.3.1.jar}" iftldtodir="${build.web.dir}/WEB-INF" todir="${dist.ear.dir}/lib"/>
+        <copyfiles files="${file.reference.csip-lib-water-4.4.1.jar}" iftldtodir="${build.web.dir}/WEB-INF" todir="${dist.ear.dir}/lib"/>
         <mkdir dir="${build.web.dir}/META-INF"/>
         <manifest file="${build.web.dir}/META-INF/MANIFEST.MF" mode="update"/>
     </target>
@@ -1023,7 +1023,7 @@
         <copyfiles files="${file.reference.jcommon-1.0.16.jar}" todir="${build.web.dir}/WEB-INF/lib"/>
         <copyfiles files="${file.reference.jfreechart-1.0.13.jar}" todir="${build.web.dir}/WEB-INF/lib"/>
         <copyfiles files="${file.reference.Jama-1.0.2.jar}" todir="${build.web.dir}/WEB-INF/lib"/>
-        <copyfiles files="${file.reference.csip-lib-water-4.3.1.jar}" todir="${build.web.dir}/WEB-INF/lib"/>
+        <copyfiles files="${file.reference.csip-lib-water-4.4.1.jar}" todir="${build.web.dir}/WEB-INF/lib"/>
     </target>
     <target depends="init" if="dist.ear.dir" name="-clean-webinf-lib">
         <delete dir="${build.web.dir}/WEB-INF/lib"/>

nbproject/project.properties

@@ -1,6 +1,7 @@
 file.reference.csip-lib-water-4.4.1.jar=lib/csip-lib-water-4.4.1.jar
+file.reference.gson-2.10.1.jar=lib/gson-2.10.1.jar
 file.reference.Jama-1.0.2.jar=lib/Jama-1.0.2.jar
-j2ee.platform.classpath=${j2ee.server.home}/lib/annotations-api.jar:${j2ee.server.home}/lib/catalina-ant.jar:${j2ee.server.home}/lib/catalina-ha.jar:${j2ee.server.home}/lib/catalina-ssi.jar:${j2ee.server.home}/lib/catalina-storeconfig.jar:${j2ee.server.home}/lib/catalina-tribes.jar:${j2ee.server.home}/lib/catalina.jar:${j2ee.server.home}/lib/ecj-4.20.jar:${j2ee.server.home}/lib/el-api.jar:${j2ee.server.home}/lib/jasper-el.jar:${j2ee.server.home}/lib/jasper.jar:${j2ee.server.home}/lib/jaspic-api.jar:${j2ee.server.home}/lib/jsp-api.jar:${j2ee.server.home}/lib/servlet-api.jar:${j2ee.server.home}/lib/tomcat-api.jar:${j2ee.server.home}/lib/tomcat-coyote.jar:${j2ee.server.home}/lib/tomcat-dbcp.jar:${j2ee.server.home}/lib/tomcat-i18n-cs.jar:${j2ee.server.home}/lib/tomcat-i18n-de.jar:${j2ee.server.home}/lib/tomcat-i18n-es.jar:${j2ee.server.home}/lib/tomcat-i18n-fr.jar:${j2ee.server.home}/lib/tomcat-i18n-ja.jar:${j2ee.server.home}/lib/tomcat-i18n-ko.jar:${j2ee.server.home}/lib/tomcat-i18n-pt-BR.jar:${j2ee.server.home}/lib/tomcat-i18n-ru.jar:${j2ee.server.home}/lib/tomcat-i18n-zh-CN.jar:${j2ee.server.home}/lib/tomcat-jdbc.jar:${j2ee.server.home}/lib/tomcat-jni.jar:${j2ee.server.home}/lib/tomcat-util-scan.jar:${j2ee.server.home}/lib/tomcat-util.jar:${j2ee.server.home}/lib/tomcat-websocket.jar:${j2ee.server.home}/lib/websocket-api.jar
+j2ee.platform.classpath=${j2ee.server.home}/bin/tomcat-juli.jar:${j2ee.server.home}/lib/annotations-api.jar:${j2ee.server.home}/lib/catalina-ant.jar:${j2ee.server.home}/lib/catalina-ha.jar:${j2ee.server.home}/lib/catalina-ssi.jar:${j2ee.server.home}/lib/catalina-storeconfig.jar:${j2ee.server.home}/lib/catalina-tribes.jar:${j2ee.server.home}/lib/catalina.jar:${j2ee.server.home}/lib/ecj-4.20.jar:${j2ee.server.home}/lib/el-api.jar:${j2ee.server.home}/lib/jasper-el.jar:${j2ee.server.home}/lib/jasper.jar:${j2ee.server.home}/lib/jaspic-api.jar:${j2ee.server.home}/lib/jsp-api.jar:${j2ee.server.home}/lib/servlet-api.jar:${j2ee.server.home}/lib/tomcat-api.jar:${j2ee.server.home}/lib/tomcat-coyote.jar:${j2ee.server.home}/lib/tomcat-dbcp.jar:${j2ee.server.home}/lib/tomcat-i18n-cs.jar:${j2ee.server.home}/lib/tomcat-i18n-de.jar:${j2ee.server.home}/lib/tomcat-i18n-es.jar:${j2ee.server.home}/lib/tomcat-i18n-fr.jar:${j2ee.server.home}/lib/tomcat-i18n-ja.jar:${j2ee.server.home}/lib/tomcat-i18n-ko.jar:${j2ee.server.home}/lib/tomcat-i18n-pt-BR.jar:${j2ee.server.home}/lib/tomcat-i18n-ru.jar:${j2ee.server.home}/lib/tomcat-i18n-zh-CN.jar:${j2ee.server.home}/lib/tomcat-jdbc.jar:${j2ee.server.home}/lib/tomcat-jni.jar:${j2ee.server.home}/lib/tomcat-util-scan.jar:${j2ee.server.home}/lib/tomcat-util.jar:${j2ee.server.home}/lib/tomcat-websocket.jar:${j2ee.server.home}/lib/websocket-api.jar
 #Fri May 06 09:05:19 MDT 2016
 javadoc.splitindex=true
 lib.dir=${web.docbase.dir}/WEB-INF/lib
@@ -39,7 +40,7 @@
 javadoc.notree=false
 runmain.jvmargs=
 source.root=src
-platform.active=default_platform
+platform.active=JDK_1.8
 javadoc.preview=true
 j2ee.deploy.on.save=true
 annotation.processing.enabled=true
@@ -54,7 +55,8 @@
     ${file.reference.jcommon-1.0.16.jar}:\
     ${file.reference.jfreechart-1.0.13.jar}:\
     ${file.reference.Jama-1.0.2.jar}:\
-    ${file.reference.csip-lib-water-4.4.1.jar}
+    ${file.reference.csip-lib-water-4.4.1.jar}:\
+    ${file.reference.gson-2.10.1.jar}
 javadoc.noindex=false
 webinf.dir=web/WEB-INF
 annotation.processing.enabled.in.editor=true

nbproject/project.xml

@@ -10,6 +10,7 @@
         <data xmlns="http://www.netbeans.org/ns/web-project/3">
             <name>csip-cfa</name>
             <minimum-ant-version>1.6.5</minimum-ant-version>
+            <explicit-platform explicit-source-supported="true"/>
             <web-module-libraries>
                 <library dirs="200">
                     <file>${libs.CSIP-Jersey-2.16.classpath}</file>
@@ -39,6 +40,10 @@
                     <file>${file.reference.csip-lib-water-4.4.1.jar}</file>
                     <path-in-war>WEB-INF/lib</path-in-war>
                 </library>
+                <library dirs="200">
+                    <file>${file.reference.gson-2.10.1.jar}</file>
+                    <path-in-war>WEB-INF/lib</path-in-war>
+                </library>
             </web-module-libraries>
             <web-module-additional-libraries/>
             <source-roots>

src/java/m/cfa/flood/V1_0.java

@@ -5,9 +5,14 @@
 import csip.api.server.PayloadResults;
 import csip.annotations.Description;
 import csip.annotations.Name;
+import csip.annotations.Resource;
+import csip.annotations.ResourceType;
 import csip.annotations.VersionInfo;
+import csip.api.server.Executable;
 import java.io.File;
+import java.io.IOException;
 import javax.ws.rs.Path;
+import m.cfa.baseflow.guiBaseflow_Model;
 import org.apache.commons.io.FileUtils;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
@@ -16,62 +21,93 @@
 @Description("Time Series: Flood Frequency Analysis")
 @VersionInfo("1.0")
 @Path("m/cfa/flood/1.0")
+@Resource(file = "/bin/win-x86/PeakfqSA.exe", wine = true, id = "peakfqsa", type = ResourceType.EXECUTABLE)
 public class V1_0 extends ModelDataService {
 
+    String analysisType = "";
     guiFlood_Model model = new guiFlood_Model();
-    
+    Bulletin17C c17 = new Bulletin17C();
+
     @Override
     protected void preProcess() throws Exception {
-        model.setDirectory(workspace().getDir().toString());
-        
+
         PayloadParameter inputPayload = parameter();
-        model.setDatabase(inputPayload.getString("database"));
-        model.setOrganizationID(inputPayload.getString("org_id"));
-        model.setStationId(inputPayload.getString("station_id"));
-        model.setStationName(inputPayload.getString("station_name"));
-        model.setAnalysisType(inputPayload.getString("analysis_type", "b17"));
-        model.setStartDate(inputPayload.getString("begin_date", ""));
-        model.setEndDate(inputPayload.getString("end_date", ""));
-        model.setSkewness(inputPayload.getDouble("skewness"));
-        model.setMeanSquareError(inputPayload.getDouble("mean_square_error"));
-        model.setShowLargeFloods(inputPayload.getBoolean("show_large_floods", false));
-        model.setPlotType(inputPayload.getBoolean("plot_type", true));
-        model.setPlotReference(inputPayload.getBoolean("plot_ref", true));
-        model.setMergeDatasets(inputPayload.getBoolean("merge_datasets", false));
-        model.setMergeMethod(inputPayload.getString("merge_method", "user"));
-        model.setUserData(inputPayload.getString("user_data", ""));
+        analysisType = inputPayload.getString( "analysis_type", "b17" );
+        model.setDirectory( workspace().getDir().toString() );
+        model.setDatabase( inputPayload.getString( "database" ) );
+        model.setOrganizationID( inputPayload.getString( "org_id" ) );
+        model.setStationId( inputPayload.getString( "station_id" ) );
+        model.setStationName( inputPayload.getString( "station_name" ) );
+        model.setAnalysisType( inputPayload.getString( "analysis_type", "b17" ) );
+        model.setStartDate( inputPayload.getString( "begin_date", "" ) );
+        model.setEndDate( inputPayload.getString( "end_date", "" ) );
+        model.setMergeDatasets( inputPayload.getBoolean( "merge_datasets", false ) );
+        model.setMergeMethod( inputPayload.getString( "merge_method", "user" ) );
+        model.setUserData( inputPayload.getString( "user_data", "" ) );
+        model.setSkewness( inputPayload.getDouble( "skewness" ) );
+        model.setMeanSquareError( inputPayload.getDouble( "mean_square_error" ) );
+        model.setAtSiteSkewOption( inputPayload.getString( "at_site_skew_option" ) );
+        model.setHighSystematicThreshold( inputPayload.getDouble( "high_systematic_threshold" ) );
+        model.setLowOutlierThreshold( inputPayload.getDouble( "low_outlier_threshold" ) );
+        model.setLowOutlierMethod( inputPayload.getString( "low_outlier_method" ) );
+
+        if ( analysisType.equalsIgnoreCase( "b17" ) ) {
+            model.setShowLargeFloods( inputPayload.getBoolean( "show_large_floods", false ) );
+            model.setPlotType( inputPayload.getBoolean( "plot_type", true ) );
+            model.setPlotReference( inputPayload.getBoolean( "plot_ref", true ) );
+        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {
+            Executable e = resources().getExe( "peakfqsa" );
+            model.setAtSiteSkewOption( inputPayload.getString( "at_site_skew_option" ) );
+            model.setHighSystematicThreshold( inputPayload.getDouble( "high_systematic_threshold" ) );
+            model.setLowOutlierThreshold( inputPayload.getDouble( "low_outlier_threshold" ) );
+            model.setLowOutlierMethod( inputPayload.getString( "low_outlier_method" ) );
+            model.setExecutable( e );
+        }
+
     }
 
     @Override
     protected void doProcess() throws Exception {
-        model.run();
+        if ( analysisType.equalsIgnoreCase( "b17" ) ) {
+            model.run();
+        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {
+            model.run();
+        }
     }
 
     @Override
     protected void postProcess() throws Exception {
+
         PayloadResults resultPayload = results();
-        // files
-        File wd = workspace().getDir();
-        resultPayload.put(new File(wd, model.getGraph()));
 
-        // values
-        resultPayload.put("len", model.getLen());
-        resultPayload.put("start", model.getStart());
-        resultPayload.put("end", model.getEnd());
-        resultPayload.put("data_source", model.getDataSource());
-        resultPayload.put("skewErrorMessage", model.getSkewErrorMessage());
-        String output = FileUtils.readFileToString(model.getOutputWeightedGenSkew(), "UTF-8");
-        resultPayload.put("output", output);
-        resultPayload.put("weighted_generalized_skew", model.getWeightedGenSkew());
-        String output_stationSkew = FileUtils.readFileToString(model.getOutputStationSkew(), "UTF-8");
-        resultPayload.put("output_station_skew", output_stationSkew);
-        resultPayload.put("station_skew", model.getStationSkew());
-        resultPayload.put("graph", model.getGraph());
-        
-        //Arrays
-        resultPayload.put("lowOutliers_station_skew", model.getLowOutliersStationSkew());
-        resultPayload.put("highOutliers_station_skew", model.getHighOutliersStationSkew());
-        resultPayload.put("lowOutliers_weighted_generalized_skew", model.getLowOutliersWeightedGenSkew());
-        resultPayload.put("highOutliers_weighted_generalized_skew", model.getHighOutliersWeightedGenSkew());
+        if ( analysisType.equals( "b17" ) ) {
+            // files
+            File wd = workspace().getDir();
+            resultPayload.put( new File( wd, model.getGraph() ) );
+
+            // values
+            resultPayload.put( "len", model.getLen() );
+            resultPayload.put( "start", model.getStart() );
+            resultPayload.put( "end", model.getEnd() );
+            resultPayload.put( "data_source", model.getDataSource() );
+            resultPayload.put( "skewErrorMessage", model.getSkewErrorMessage() );
+            String output = FileUtils.readFileToString( model.getOutputWeightedGenSkew(), "UTF-8" );
+            resultPayload.put( "output", output );
+            resultPayload.put( "weighted_generalized_skew", model.getWeightedGenSkew() );
+            String output_stationSkew = FileUtils.readFileToString( model.getOutputStationSkew(), "UTF-8" );
+            resultPayload.put( "output_station_skew", output_stationSkew );
+            resultPayload.put( "station_skew", model.getStationSkew() );
+            resultPayload.put( "graph", model.getGraph() );
+
+            //Arrays
+            resultPayload.put( "lowOutliers_station_skew", model.getLowOutliersStationSkew() );
+            resultPayload.put( "highOutliers_station_skew", model.getHighOutliersStationSkew() );
+            resultPayload.put( "lowOutliers_weighted_generalized_skew", model.getLowOutliersWeightedGenSkew() );
+            resultPayload.put( "highOutliers_weighted_generalized_skew", model.getHighOutliersWeightedGenSkew() );
+        } else if ( analysisType.equals( "c17" ) ) {
+            resultPayload.put( "17c_station_output", model.stationResult.toJSON() );
+            resultPayload.put( "17c_weighted_output", model.weightedResult.toJSON() );
+            resultPayload.put( "finished", true );
+        }
     }
 }

src/java/m/cfa/flood/guiFlood_Model.java

@@ -2,23 +2,30 @@
 
 import WaterData.WaterData;
 import WaterData.WaterDataInterface;
+import csip.api.server.Executable;
 import m.cfa.DoubleArray;
 import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.text.DateFormat;
+import java.text.DecimalFormat;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
+import java.util.Scanner;
 import org.codehaus.jettison.json.JSONArray;
 
 /**
-* Last Updated: 9-April-2019
-* @author Tyler Wible
-* @since 13-June-2012
-*/
+ * Last Updated: 9-April-2019
+ *
+ * @author Tyler Wible
+ * @since 13-June-2012
+ */
 public class guiFlood_Model {
+
     String directory = "C:/Projects/TylerWible_repos/NetBeans/data/CFA";
     String database = "USGS";//"CDWR";//"STORET";//"CDSN";//"UserData";//
     String orgId = "n/a";//"n/a";//"21COL001";//"CITYFTCO_WQX";//"n/a";//
@@ -27,15 +34,16 @@
     String analysisType = "B17";
     String startDate = "";
     String endDate = "";
-    double gg = 0.23490029573440552;
-    double MSERbar = 0.302;
+    double skewness = 0.23490029573440552;
+    double meanSquareError = 0.302;
     boolean showLargeFloods = false;
     boolean plotref = true;
     boolean plottype = true;
     boolean mergeDatasets = false;//true;//
     String mergeMethod = "user";//"public";//"max";//"average";//"min";//
-    String userData  = "";//"Date\tFlood\n2012-04-29\t80000.3\n2013-05-09\t60.2\n2014-05-29\t20.1\n2015-04-29\t80000.3\n2016-05-09\t60.2\n2017-05-29\t20.1\n2018-04-29\t80000.3\n2019-05-09\t60.2\n2020-05-29\t20.1\n2021-04-29\t80000.3\n2022-05-09\t60.2\n2023-05-29\t20.1";
-    
+    String userData = "";//"Date\tFlood\n2012-04-29\t80000.3\n2013-05-09\t60.2\n2014-05-29\t20.1\n2015-04-29\t80000.3\n2016-05-09\t60.2\n2017-05-29\t20.1\n2018-04-29\t80000.3\n2019-05-09\t60.2\n2020-05-29\t20.1\n2021-04-29\t80000.3\n2022-05-09\t60.2\n2023-05-29\t20.1";
+    Executable c17Exe = null;
+
     //Outputs
     String len = "-1";
     String start = "?";
@@ -48,150 +56,448 @@
     JSONArray highOutliersStationSkew = new JSONArray();
     JSONArray lowOutliersWeightedGenSkew = new JSONArray();
     JSONArray highOutliersWeightedGenSkew = new JSONArray();
-    
+    private String atSiteSkewOption = "";
+    private double highSystematicThreshold = Double.NaN;
+    private String lowOutlierMethod = "";
+    private double lowOutlierThreshold = Double.NaN;
+    public Bulletin17CResults stationResult;
+    public Bulletin17CResults weightedResult;
+
     //Gets
-    public File getOutputWeightedGenSkew(){ return new File(directory, "flood_summary.txt"); }
-    public File getOutputStationSkew(){ return new File(directory, "flood_summary_stationskew.txt"); }
-    public String getGraph(){ return "flood_graph.jpg"; }
-    public String getLen(){ return len; }
-    public String getStart(){ return start; }
-    public String getEnd(){ return end; }
-    public String getDataSource(){ return dataSource; }
-    public String getSkewErrorMessage(){ return skewErrorMessage; }
-    public String getStationSkew(){ return String.valueOf(stationSkew); }
-    public String getWeightedGenSkew(){ return String.valueOf(weightedGenSkew); }
-    public JSONArray getLowOutliersStationSkew(){ return lowOutliersStationSkew; }
-    public JSONArray getHighOutliersStationSkew(){ return highOutliersStationSkew; }
-    public JSONArray getLowOutliersWeightedGenSkew(){ return lowOutliersWeightedGenSkew; }
-    public JSONArray getHighOutliersWeightedGenSkew(){ return highOutliersWeightedGenSkew; }
-    
+    public File getOutputWeightedGenSkew() {
+        return new File( directory, "flood_summary.txt" );
+    }
+
+    public File getOutputStationSkew() {
+        return new File( directory, "flood_summary_stationskew.txt" );
+    }
+
+    public String getGraph() {
+        return "flood_graph.jpg";
+    }
+
+    public String getLen() {
+        return len;
+    }
+
+    public String getStart() {
+        return start;
+    }
+
+    public String getEnd() {
+        return end;
+    }
+
+    public String getDataSource() {
+        return dataSource;
+    }
+
+    public String getSkewErrorMessage() {
+        return skewErrorMessage;
+    }
+
+    public String getStationSkew() {
+        return String.valueOf( stationSkew );
+    }
+
+    public String getWeightedGenSkew() {
+        return String.valueOf( weightedGenSkew );
+    }
+
+    public JSONArray getLowOutliersStationSkew() {
+        return lowOutliersStationSkew;
+    }
+
+    public JSONArray getHighOutliersStationSkew() {
+        return highOutliersStationSkew;
+    }
+
+    public JSONArray getLowOutliersWeightedGenSkew() {
+        return lowOutliersWeightedGenSkew;
+    }
+
+    public JSONArray getHighOutliersWeightedGenSkew() {
+        return highOutliersWeightedGenSkew;
+    }
+
+    public String getAtSiteSkewOption() {
+        return atSiteSkewOption;
+    }
+
+    public double getHighSystematicThreshold() {
+        return highSystematicThreshold;
+    }
+
+    public String getLowOutlierMethod() {
+        return lowOutlierMethod;
+    }
+
+    public double getLowOutlierThreshold() {
+        return lowOutlierThreshold;
+    }
+
+    public Executable getC17Exe() {
+        return c17Exe;
+    }
+
     //Sets
-    public void setDirectory(String directory_str){ directory = directory_str; }
-    public void setDatabase(String database_str){ database = database_str; }
-    public void setOrganizationID(String orgId_str){ orgId = orgId_str; }
-    public void setStationId(String stationId_str){ stationId = stationId_str; }
-    public void setStationName(String stationName_str){ stationName = stationName_str; }
-    public void setAnalysisType(String analysisType_str){ analysisType = analysisType_str; }
-    public void setStartDate(String startDate_str){ startDate = startDate_str; }
-    public void setEndDate(String endDate_str){ endDate = endDate_str; }
-    public void setSkewness(double gg_dbl){ gg = gg_dbl; }
-    public void setMeanSquareError(double MSERbar_dbl){ MSERbar = MSERbar_dbl; }
-    public void setShowLargeFloods(boolean showLargeFloods_TF){ showLargeFloods = showLargeFloods_TF; }
-    public void setPlotType(boolean plottype_TF){ plottype = plottype_TF; }
-    public void setPlotReference(boolean plotref_TF){ plotref = plotref_TF; }
-    public void setMergeDatasets(boolean mergeDatasets_TF){ mergeDatasets = mergeDatasets_TF; }
-    public void setMergeMethod(String mergeMethod_str){ mergeMethod = mergeMethod_str; }
-    public void setUserData(String userData_str){ userData = userData_str; }
+    public void setDirectory( String directory_str ) {
+        directory = directory_str;
+    }
+
+    public void setDatabase( String database_str ) {
+        database = database_str;
+    }
+
+    public void setOrganizationID( String orgId_str ) {
+        orgId = orgId_str;
+    }
+
+    public void setStationId( String stationId_str ) {
+        stationId = stationId_str;
+    }
+
+    public void setStationName( String stationName_str ) {
+        stationName = stationName_str;
+    }
+
+    public void setAnalysisType( String analysisType_str ) {
+        analysisType = analysisType_str;
+    }
+
+    public void setStartDate( String startDate_str ) {
+        startDate = startDate_str;
+    }
+
+    public void setEndDate( String endDate_str ) {
+        endDate = endDate_str;
+    }
+
+    public void setSkewness( double skewness ) {
+        this.skewness = skewness;
+    }
+
+    public void setMeanSquareError( double meanSquareError ) {
+        this.meanSquareError = meanSquareError;
+    }
+
+    public void setShowLargeFloods( boolean showLargeFloods_TF ) {
+        showLargeFloods = showLargeFloods_TF;
+    }
+
+    public void setPlotType( boolean plottype_TF ) {
+        plottype = plottype_TF;
+    }
+
+    public void setPlotReference( boolean plotref_TF ) {
+        plotref = plotref_TF;
+    }
+
+    public void setMergeDatasets( boolean mergeDatasets_TF ) {
+        mergeDatasets = mergeDatasets_TF;
+    }
+
+    public void setMergeMethod( String mergeMethod_str ) {
+        mergeMethod = mergeMethod_str;
+    }
+
+    public void setUserData( String userData_str ) {
+        userData = userData_str;
+    }
+
+    public void setAtSiteSkewOption( String atSiteSkewOption ) {
+        this.atSiteSkewOption = atSiteSkewOption;
+    }
+
+    public void setHighSystematicThreshold( double highSystematicThreshold ) {
+        this.highSystematicThreshold = highSystematicThreshold;
+    }
+
+    public void setLowOutlierMethod( String lowOutlierMethod ) {
+        this.lowOutlierMethod = lowOutlierMethod;
+    }
+
+    public void setLowOutlierThreshold( double lowOutlierThreshold ) {
+        this.lowOutlierThreshold = lowOutlierThreshold;
+    }
+
+    public void setExecutable( Executable e ) {
+        c17Exe = e;
+    }
+
     /**
-     * Writes out the dynamically created summary table to be displayed to the user along with the flood graph.  
-     * This function exits when completed
-     * @param dynamicSummary  string[][] array to be written as each line of the text file
+     * Writes out the dynamically created summary table to be displayed to the
+     * user along with the flood graph. This function exits when completed
+     *
+     * @param dynamicSummary string[][] array to be written as each line of the
+     * text file
      * @throws IOException
      */
-    public void writeSummary(String[][] dynamicSummary, String resultFileName) throws IOException{
+    public void writeSummary( String[][] dynamicSummary, String resultFileName ) throws IOException {
         String path = directory + File.separator + resultFileName;
-        FileWriter writer =  new FileWriter(path, false);
-        PrintWriter print_line = new PrintWriter(writer);
+        FileWriter writer = new FileWriter( path, false );
+        PrintWriter print_line = new PrintWriter( writer );
 
         //Output data to text file
         String currentLine = "";
-        for(int i=0; i < dynamicSummary.length; i++) {
-            for(int j=0; j<dynamicSummary[i].length; j++){
-                if(j == 0){
-                    currentLine = dynamicSummary[i][j];
-                }else{
-                    currentLine = currentLine + "\t" + dynamicSummary[i][j];
+        for ( int i = 0; i < dynamicSummary.length; i++ ) {
+            for ( int j = 0; j < dynamicSummary[ i ].length; j++ ) {
+                if ( j == 0 ) {
+                    currentLine = dynamicSummary[ i ][ j ];
+                } else {
+                    currentLine = currentLine + "\t" + dynamicSummary[ i ][ j ];
                 }
             }
-            print_line.printf("%s" + "\r\n", currentLine);//Separate the rows with $$ to make substrings easier later in the interface
+            print_line.printf( "%s" + "\r\n", currentLine );//Separate the rows with $$ to make substrings easier later in the interface
         }
         print_line.close();
         writer.close();
-        System.out.println("Text File located at:\t" + path);
+        System.out.println( "Text File located at:\t" + path );
     }
+
     /**
-     * Writes out the error message, if any, for finding the file and then exits the program
-     * @param error  string array to be written as each line of an error message
+     * Writes out the error message, if any, for finding the file and then exits
+     * the program
+     *
+     * @param error string array to be written as each line of an error message
      * @throws IOException
      */
-    public void writeError(ArrayList<String> error) throws IOException{
+    public void writeError( ArrayList<String> error ) throws IOException {
         //Output data to text file
-        String errorContents = error.get(0);
-        for(int i=1; i<error.size(); i++){
-            errorContents = errorContents + "\n" + error.get(i);
+        String errorContents = error.get( 0 );
+        for ( int i = 1; i < error.size(); i++ ) {
+            errorContents = errorContents + "\n" + error.get( i );
         }
-        throw new IOException("Error encountered. Please see the following message for details: \n" + errorContents);
+        throw new IOException( "Error encountered. Please see the following message for details: \n" + errorContents );
     }
+
+    /**
+     * Writes out the inputs files required for the peakfqsa.exe software
+     *
+     * @param peakFlows string[][] array of peak flow data to be written as each
+     * line of the text file
+     * @throws IOException
+     */
+    private void writeC17inputFiles( double[][] peakFlows ) throws IOException {
+        //Fix Start and End year to match the USGS data file.
+        String startYear = new DecimalFormat( "#" ).format( peakFlows[ 0 ][ 0 ] );
+        String endYear = new DecimalFormat( "#" ).format( peakFlows[ peakFlows.length - 1 ][ 0 ] );
+        this.setStartDate( startYear + "-01-01" ); //for simplicity, month and day are extracted later anyway.
+        this.setEndDate( endYear + "-01-01" );
+
+        //Write input data file for peakfasq.exe
+        String path = directory + File.separator + "c17Weighted.spc";
+        try ( FileWriter fileWriter = new FileWriter( path, false ); PrintWriter writer = new PrintWriter( fileWriter ) ) {
+            //First the model options
+            writer.println( "STATION     " + this.stationId + "\n" );
+            writer.println( "I     c17Weighted.spc\n" );
+            writer.println( "CSV     YES\n" );
+            writer.println( "BEGYEAR     " + this.startDate.split( "-" )[ 0 ] + "\n" );
+            writer.println( "ENDYEAR     " + this.endDate.split( "-" )[ 0 ] + "\n" );
+            writer.println( "GENSKEW     " + String.valueOf( this.skewness ) + "\n" );
+            writer.println( "SKEWMSE     " + String.valueOf( this.meanSquareError ) + "\n" );
+            writer.println( "PP_ALPHA     0.05\n" );
+            writer.println( "A_S_SKEW_OPT     " + this.atSiteSkewOption + "\n" ); //TODO: read in from json params
+            if ( this.highSystematicThreshold != Double.NaN ) {
+                writer.println( "HISYS     " + this.highSystematicThreshold + "\n" ); //TODO: read in from json params
+            }
+            if ( !this.lowOutlierMethod.isEmpty() ) {
+                writer.println( "LOMETHOD     " + this.lowOutlierMethod + "\n" ); //TODO: read in from json params
+            }
+            if ( this.lowOutlierMethod.equalsIgnoreCase( "FIXED" ) ) {
+                writer.println( "LOTHRESH     " + this.lowOutlierThreshold + "\n" ); //TODO: read in from json params
+            }
+            writer.println( "SKEWOPT     WEIGHTED\n" ); //TODO: Run model twice, once with weighted, once with station
+            writer.println( "THRESHOLD     " + startYear + "     " + endYear + "     0     1.00E+010\n" ); //Just something to make it run for now.
+
+            //Then the peak flow values.
+            for ( double[] peakFlow : peakFlows ) {
+                //Convert values to strings to lop off decimal places.
+                DecimalFormat df = new DecimalFormat( "#" );
+                String year = df.format( peakFlow[ 0 ] );
+                String rate = df.format( peakFlow[ 1 ] );
+
+                //write values to file
+                writer.println( "Q     " + year + "     " + rate + "\n" );
+            }
+        }
+        System.out.println( "Weighted spc file located at:\t" + path );
+
+        path = directory + File.separator + "c17Station.spc";
+        try ( FileWriter fileWriter = new FileWriter( path, false ); PrintWriter writer = new PrintWriter( fileWriter ) ) {
+            writer.print( "STATION     " + this.stationId + "\n" );
+            writer.print( "I     c17Station.spc\n" );
+            writer.print( "CSV     YES\n" );
+            writer.print( "BEGYEAR     " + this.startDate.split( "-" )[ 0 ] + "\n" );
+            writer.print( "ENDYEAR     " + this.endDate.split( "-" )[ 0 ] + "\n" );
+            writer.print( "GENSKEW     " + String.valueOf( this.skewness ) + "\n" );
+            writer.print( "SKEWMSE     " + String.valueOf( this.meanSquareError ) + "\n" );
+            writer.print( "PP_ALPHA     0.05\n" );
+            writer.print( "A_S_SKEW_OPT     " + this.atSiteSkewOption + "\n" ); //TODO: read in from json params
+            if ( this.highSystematicThreshold != Double.NaN ) {
+                writer.print( "HISYS     " + this.highSystematicThreshold + "\n" ); //TODO: read in from json params
+            }
+            if ( !this.lowOutlierMethod.isEmpty() ) {
+                writer.print( "LOMETHOD     " + this.lowOutlierMethod + "\n" ); //TODO: read in from json params
+            }
+            if ( this.lowOutlierMethod.equalsIgnoreCase( "FIXED" ) ) {
+                writer.print( "LOTHRESH     " + this.lowOutlierThreshold + "\n" ); //TODO: read in from json params
+            }
+            writer.print( "SKEWOPT     STATION\n" );
+            writer.print( "THRESHOLD     " + startYear + "     " + endYear + "     0     1.00E+010\n" ); //Just something to make it run for now.
+            //TODO: writer.println("PCPT_TRESH     ");
+
+            for ( double[] peakFlow : peakFlows ) {
+                //Convert values to strings to lop off decimal places.
+                DecimalFormat df = new DecimalFormat( "#" );
+                String year = df.format( peakFlow[ 0 ] );
+                String rate = df.format( peakFlow[ 1 ] );
+
+                //write values to file
+                writer.print( "Q     " + year + "     " + rate + "\n" );
+            }
+        }
+
+        //Write cmd file for peakfasq.exe
+        path = directory + File.separator + "c17.cmd";
+        try ( FileWriter fileWriter = new FileWriter( path, false ); PrintWriter writer = new PrintWriter( fileWriter ) ) {
+            writer.print( "c17Weighted.spc\n" );
+            writer.print( "c17Station.spc\n" );
+        }
+    }
+
+    public Bulletin17CResults readC17outputFile( String path ) throws FileNotFoundException {
+        Bulletin17CResults finalResult = new Bulletin17CResults();
+
+        boolean foundFreqValues = false;
+        boolean foundWYValues = false;
+        try ( Scanner scanner = new Scanner( new File( path ) ); ) {
+            while ( scanner.hasNext() ) {
+                String line = scanner.nextLine().trim();
+                if ( line.startsWith( "PP," ) ) {
+                    foundFreqValues = true;
+                } else if ( line.startsWith( "WY," ) ) {
+                    foundWYValues = true;
+                } else if ( line.isEmpty() ) {
+                    foundFreqValues = false;
+                    foundWYValues = false;
+                } else if ( foundFreqValues ) {
+                    String[] tokens = line.split( "," );
+                    Bulletin17CFreqResult res = new Bulletin17CFreqResult();
+                    res.probabilityPct = Double.parseDouble( tokens[ 0 ] );
+                    res.returnPeriod = Double.parseDouble( tokens[ 1 ] );
+                    res.zScore = Double.parseDouble( tokens[ 2 ] );
+                    res.estimatedDischarge = Double.parseDouble( tokens[ 3 ] );
+                    res.lowerCI = Double.parseDouble( tokens[ 4 ] );
+                    res.upperCI = Double.parseDouble( tokens[ 5 ] );
+                    finalResult.addResult( res );
+
+                } else if ( foundWYValues ) {
+                    String[] tokens = line.split( "," );
+                    BulletinC17WYResult res = new BulletinC17WYResult();
+                    res.waterYear = Integer.parseInt( tokens[ 0 ] );
+                    res.probability = Double.parseDouble( tokens[ 1 ] );
+                    res.zScore = Double.parseDouble( tokens[ 2 ] );
+                    res.lowDischargeEstimate = Double.parseDouble( tokens[ 3 ] );
+                    res.highDischargeEstiamte = Double.parseDouble( tokens[ 4 ] );
+                    res.fittedDischarge = Double.parseDouble( tokens[ 5 ] );
+                    finalResult.addResult( res );
+                }
+            }
+        }
+
+        return finalResult;
+    }
+
+    public void readC17outputFiles() throws FileNotFoundException, IOException {
+        String path = directory + File.separator + "c17Station.csv";
+        this.stationResult = readC17outputFile( path );
+
+        path = directory + File.separator + "c17Weighted.csv";
+        this.weightedResult = readC17outputFile( path );
+    }
+
     public void run() throws IOException, Exception {
         //If no date input, make it the maximum of available data
-        if(startDate == null || startDate.equalsIgnoreCase("")){
+        if ( startDate == null || startDate.equalsIgnoreCase( "" ) ) {
             startDate = "1850-01-01";
         }
-        if(endDate == null || endDate.equalsIgnoreCase("")){
+        if ( endDate == null || endDate.equalsIgnoreCase( "" ) ) {
             // Pull current date for upper limit of data search
-            DateFormat desiredDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+            DateFormat desiredDateFormat = new SimpleDateFormat( "yyyy-MM-dd" );
             Date currentDate = new Date();
-            endDate = desiredDateFormat.format(currentDate);
+            endDate = desiredDateFormat.format( currentDate );
         }
-        
+
+        //Check if any flow data exists
+        WaterDataInterface waterLib = WaterData.getNewWaterDataInterface( database, userData );
+        double[][] peakFlowData = waterLib.extractFloodData_formatted( directory, orgId, stationId, startDate, endDate );
+        dataSource = waterLib.getDataSourceCitation();
+
+        //Check if merging the datasets is desired, if so get the user data
+        double[][] peakFlowData_user = new double[ 0 ][ 0 ];
+        if ( mergeDatasets ) {
+            WaterDataInterface waterLibUser = WaterData.getNewWaterDataInterface( "UserData", userData );
+            peakFlowData_user = waterLibUser.extractFloodData_formatted( directory, orgId, stationId, startDate, endDate );
+        }
+
+        //Merge the two datasets (if user data is empty nothing will be merged)
+        double[][] peakFlowData_combined = DoubleArray.mergeData( peakFlowData, peakFlowData_user, mergeMethod );
+        if ( peakFlowData_combined.length == 0 ) {
+            ArrayList<String> errorMessage = new ArrayList<>();
+            if ( peakFlowData.length == 0 ) {
+                errorMessage.add( "There is no available flood data in the " + database + " database for station '" + stationId + "' and the specified date range." );
+                if ( database.equalsIgnoreCase( "CDWR" ) ) {
+                    errorMessage.add( "The CDWR database is sensitive to the begin date used, try specifying a later begin date" );
+                }
+            }
+            if ( peakFlowData_user.length == 0 ) {
+                errorMessage.add( "There is no available uploaded flow data for station '" + stationId + "' and the specified date range" );
+            }
+            writeError( errorMessage );
+        }
+
         //Decide which analysis to perform
-        if(analysisType.equalsIgnoreCase("B17")){
-            //Check if any flow data exists
-            WaterDataInterface waterLib = WaterData.getNewWaterDataInterface(database, userData);
-            double[][] peakFlowData = waterLib.extractFloodData_formatted(directory, orgId, stationId, startDate, endDate);
-            dataSource = waterLib.getDataSourceCitation();
-            
-            //Check if merging the datasets is desired, if so get the user data
-            double[][] peakFlowData_user = new double[0][0];
-            if(mergeDatasets){
-                WaterDataInterface waterLibUser = WaterData.getNewWaterDataInterface("UserData", userData);
-                peakFlowData_user = waterLibUser.extractFloodData_formatted(directory, orgId, stationId, startDate, endDate);
-            }
-            
-            //Merge the two datasets (if user data is empty nothing will be merged)
-            double[][] peakFlowData_combined = DoubleArray.mergeData(peakFlowData, peakFlowData_user, mergeMethod);
-            if(peakFlowData_combined.length == 0){
-                ArrayList<String> errorMessage = new ArrayList<>();
-                if(peakFlowData.length == 0){
-                    errorMessage.add("There is no available flood data in the " + database + " database for station '" + stationId + "' and the specified date range.");
-                    if(database.equalsIgnoreCase("CDWR")){
-                        errorMessage.add("The CDWR database is sensitive to the begin date used, try specifying a later begin date");
-                    }
-                }
-                if(peakFlowData_user.length == 0){
-                    errorMessage.add("There is no available uploaded flow data for station '" + stationId + "' and the specified date range");
-                }
-                writeError(errorMessage);
-            }
-            
+        if ( analysisType.equalsIgnoreCase( "B17" ) ) {
+
             //Run Bulletin 17 function and return graph
             Bulletin17B bulletin17B = new Bulletin17B();
-            Object[] returnArrayStationSkew = bulletin17B.b17(peakFlowData_combined, Double.NaN, MSERbar, directory, database, stationId, stationName, showLargeFloods, plotref, plottype);
-            String[][] dataSummaryStationSkew = (String[][]) returnArrayStationSkew[0];
-            stationSkew = (double) returnArrayStationSkew[1];
-            lowOutliersStationSkew = (JSONArray) returnArrayStationSkew[2];
-            highOutliersStationSkew = (JSONArray) returnArrayStationSkew[3];
-            Object[] returnArrayWeightedGenSkew = bulletin17B.b17(peakFlowData_combined, gg, MSERbar, directory, database, stationId, stationName, showLargeFloods, plotref, plottype);
-            String[][] dataSummaryWeightedGenSkew = (String[][]) returnArrayWeightedGenSkew[0];
-            weightedGenSkew = (double) returnArrayWeightedGenSkew[1];
-            lowOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[2];
-            highOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[3];
-            len = String.valueOf(peakFlowData_combined.length);
-            start = String.valueOf(peakFlowData_combined[0][0]);
-            end = String.valueOf(peakFlowData_combined[peakFlowData_combined.length - 1][0]);
+            Object[] returnArrayStationSkew = bulletin17B.b17( peakFlowData_combined, Double.NaN, meanSquareError, directory, database, stationId, stationName, showLargeFloods, plotref, plottype );
+            String[][] dataSummaryStationSkew = (String[][]) returnArrayStationSkew[ 0 ];
+            stationSkew = (double) returnArrayStationSkew[ 1 ];
+            lowOutliersStationSkew = (JSONArray) returnArrayStationSkew[ 2 ];
+            highOutliersStationSkew = (JSONArray) returnArrayStationSkew[ 3 ];
+            Object[] returnArrayWeightedGenSkew = bulletin17B.b17( peakFlowData_combined, skewness, meanSquareError, directory, database, stationId, stationName, showLargeFloods, plotref, plottype );
+            String[][] dataSummaryWeightedGenSkew = (String[][]) returnArrayWeightedGenSkew[ 0 ];
+            weightedGenSkew = (double) returnArrayWeightedGenSkew[ 1 ];
+            lowOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[ 2 ];
+            highOutliersWeightedGenSkew = (JSONArray) returnArrayStationSkew[ 3 ];
+            len = String.valueOf( peakFlowData_combined.length );
+            start = String.valueOf( peakFlowData_combined[ 0 ][ 0 ] );
+            end = String.valueOf( peakFlowData_combined[ peakFlowData_combined.length - 1 ][ 0 ] );
             skewErrorMessage = bulletin17B.skewErrorMessage;
 
             //Write out the data summary to be displayed with the graph
-            writeSummary(dataSummaryWeightedGenSkew, getOutputWeightedGenSkew().getName());
-            writeSummary(dataSummaryStationSkew, getOutputStationSkew().getName());
-            
-        }else{
-            throw new IOException("Error: Flood analysis method specified is not 'B17'");
+            writeSummary( dataSummaryWeightedGenSkew, getOutputWeightedGenSkew().getName() );
+            writeSummary( dataSummaryStationSkew, getOutputStationSkew().getName() );
+
+        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {
+
+            writeC17inputFiles( peakFlowData_combined );
+            File inputFile = new File( directory + File.separator + "c17.cmd" );
+            c17Exe.addArguments( inputFile.getAbsolutePath() );
+            c17Exe.exec();
+
+            readC17outputFiles();
+
+        } else {
+            throw new IOException( "Error: Flood analysis method specified is not 'B17' or 'C17'" );
         }
     }
-    public static void main(String[] args) throws IOException, Exception{
+
+    public static void main( String[] args ) throws IOException, Exception {
         //Run Model
         guiFlood_Model floodModel = new guiFlood_Model();
         floodModel.run();
     }
-}
\ No newline at end of file
+}