V1_0.java [src/java/m/cfa/flood] Revision: 10c1e1c64828299e898620a0cc734637ac001dd9  Date: Sat May 04 08:58:59 MDT 2024
package m.cfa.flood;

import csip.ModelDataService;
import csip.api.server.PayloadParameter;
import csip.api.server.PayloadResults;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.annotations.Resource;
import csip.annotations.ResourceType;
import csip.annotations.VersionInfo;
import csip.api.server.Executable;
import java.io.File;
import java.io.IOException;
import javax.ws.rs.Path;
import m.cfa.baseflow.guiBaseflow_Model;
import org.apache.commons.io.FileUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;

@Name("flood")
@Description("Time Series: Flood Frequency Analysis")
@VersionInfo("1.0")
@Path("m/cfa/flood/1.0")
@Resource(file = "/bin/win-x86/PeakfqSA.exe", wine = true, id = "peakfqsa", type = ResourceType.EXECUTABLE)
public class V1_0 extends ModelDataService {

    String analysisType = "";
    guiFlood_Model model = new guiFlood_Model();

    @Override
    protected void preProcess() throws Exception {

        PayloadParameter inputPayload = parameter();
        analysisType = inputPayload.getString( "analysis_type", "b17" );
        model.setDirectory( workspace().getDir().toString() );
        model.setDatabase( inputPayload.getString( "database" ) );
        model.setOrganizationID( inputPayload.getString( "org_id" ) );
        model.setStationId( inputPayload.getString( "station_id" ) );
        model.setStationName( inputPayload.getString( "station_name" ) );
        model.setAnalysisType( inputPayload.getString( "analysis_type", "b17" ) );
        model.setStartDate( inputPayload.getString( "begin_date", "" ) );
        model.setEndDate( inputPayload.getString( "end_date", "" ) );
        model.setMergeDatasets( inputPayload.getBoolean( "merge_datasets", false ) );
        model.setMergeMethod( inputPayload.getString( "merge_method", "user" ) );
        model.setUserData( inputPayload.getString( "user_data", "" ) );
        model.setSkewness( inputPayload.getDouble( "skewness" ) );
        model.setMeanSquareError( inputPayload.getDouble( "mean_square_error" ) );
        if ( analysisType.equalsIgnoreCase( "b17" ) ) {
            model.setShowLargeFloods( inputPayload.getBoolean( "show_large_floods", false ) );
            model.setPlotType( inputPayload.getBoolean( "plot_type", true ) );
            model.setPlotReference( inputPayload.getBoolean( "plot_ref", true ) );
        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {
            Executable e = resources().getExe( "peakfqsa" );
            model.setAtSiteSkewOption( inputPayload.getString( "at_site_skew_option" ) );
            model.setHighSystematicThreshold( inputPayload.getDouble( "high_systematic_threshold" ) );
            model.setLowOutlierThreshold( inputPayload.getDouble( "low_outlier_threshold" ) );
            model.setLowOutlierMethod( inputPayload.getString( "low_outlier_method" ) );
            model.setExecutable( e );
        }

    }

    @Override
    protected void doProcess() throws Exception {
        if ( analysisType.equalsIgnoreCase( "b17" ) ) {
            model.run();
        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {
            model.run();
        }
    }

    @Override
    protected void postProcess() throws Exception {

        PayloadResults resultPayload = results();

        if ( analysisType.equalsIgnoreCase( "b17" ) ) {
            // files
            File wd = workspace().getDir();
            resultPayload.put( new File( wd, model.getGraph() ) );

            // values
            resultPayload.put( "len", model.getLen() );
            resultPayload.put( "start", model.getStart() );
            resultPayload.put( "end", model.getEnd() );
            resultPayload.put( "data_source", model.getDataSource() );
            resultPayload.put( "skewErrorMessage", model.getSkewErrorMessage() );
            String output = FileUtils.readFileToString( model.getOutputWeightedGenSkew(), "UTF-8" );
            resultPayload.put( "output", output );
            resultPayload.put( "weighted_generalized_skew", model.getWeightedGenSkew() );
            String output_stationSkew = FileUtils.readFileToString( model.getOutputStationSkew(), "UTF-8" );
            resultPayload.put( "output_station_skew", output_stationSkew );
            resultPayload.put( "station_skew", model.getStationSkew() );
            resultPayload.put( "graph", model.getGraph() );

            //Arrays
            resultPayload.put( "lowOutliers_station_skew", model.getLowOutliersStationSkew() );
            resultPayload.put( "highOutliers_station_skew", model.getHighOutliersStationSkew() );
            resultPayload.put( "lowOutliers_weighted_generalized_skew", model.getLowOutliersWeightedGenSkew() );
            resultPayload.put( "highOutliers_weighted_generalized_skew", model.getHighOutliersWeightedGenSkew() );
        } else if ( analysisType.equalsIgnoreCase( "c17" ) ) {
            resultPayload.put( "17c_station_output", model.stationResult.toJSON() );
            resultPayload.put( "17c_weighted_output", model.weightedResult.toJSON() );
            resultPayload.put( "weighted_generalized_skew", model.getWeightedGenSkew() );
            resultPayload.put( "station_skew", model.getStationSkew() );
        }
    }
}