V2_0.java [src/java/m/cfa/flood] Revision: 017f661d0041f1386c002d47a83b1f6052176b16  Date: Tue Apr 30 15:56:23 MDT 2024
package m.cfa.flood;

import csip.ModelDataService;
import csip.api.server.PayloadParameter;
import csip.api.server.PayloadResults;
import csip.annotations.Description;
import csip.annotations.Name;
import csip.annotations.Resource;
import csip.annotations.ResourceType;
import csip.annotations.VersionInfo;
import csip.api.server.Executable;
import java.io.File;
import javax.ws.rs.Path;
import org.apache.commons.io.FileUtils;

@Name("flood")
@Description("Time Series: Flood Frequency Analysis")
@VersionInfo("1.0")
@Path("m/cfa/flood/1.0")
@Resource(file = "/bin/win-x86/PeakfqSA.exe", wine = true, id = "peakfqsa", type = ResourceType.EXECUTABLE)
public class V2_0 extends ModelDataService {

    String analysisType = "";
    guiFlood_Model model = new guiFlood_Model();

    @Override
    protected void preProcess() throws Exception {

        PayloadParameter inputPayload = parameter();
        analysisType = inputPayload.getString( "analysis_type", "b17" );
        model.setDirectory( workspace().getDir().toString() );
        model.setDatabase( inputPayload.getString( "database" ) );
        model.setOrganizationID( inputPayload.getString( "org_id" ) );
        model.setStationId( inputPayload.getString( "station_id" ) );
        model.setStationName( inputPayload.getString( "station_name" ) );
        model.setAnalysisType( inputPayload.getString( "analysis_type", "b17" ) );
        model.setStartDate( inputPayload.getString( "begin_date", "" ) );
        model.setEndDate( inputPayload.getString( "end_date", "" ) );
        model.setMergeDatasets( inputPayload.getBoolean( "merge_datasets", false ) );
        model.setMergeMethod( inputPayload.getString( "merge_method", "user" ) );
        model.setUserData( inputPayload.getString( "user_data", "" ) );
        model.setSkewness( inputPayload.getDouble( "skewness" ) );
        model.setMeanSquareError( inputPayload.getDouble( "mean_square_error" ) );
        model.setAtSiteSkewOption( inputPayload.getString( "at_site_skew_option" ) );
        model.setHighSystematicThreshold( inputPayload.getDouble( "high_systematic_threshold" ) );
        model.setLowOutlierThreshold( inputPayload.getDouble( "low_outlier_threshold" ) );
        model.setLowOutlierMethod( inputPayload.getString( "low_outlier_method" ) );

        if ( analysisType.equalsIgnoreCase( "17b" ) ) {
            model.setShowLargeFloods( inputPayload.getBoolean( "show_large_floods", false ) );
            model.setPlotType( inputPayload.getBoolean( "plot_type", true ) );
            model.setPlotReference( inputPayload.getBoolean( "plot_ref", true ) );
        } else if ( analysisType.equalsIgnoreCase( "17c" ) ) {
            Executable e = resources().getExe( "peakfqsa" );
            model.setAtSiteSkewOption( inputPayload.getString( "at_site_skew_option" ) );
            model.setHighSystematicThreshold( inputPayload.getDouble( "high_systematic_threshold" ) );
            model.setLowOutlierThreshold( inputPayload.getDouble( "low_outlier_threshold" ) );
            model.setLowOutlierMethod( inputPayload.getString( "low_outlier_method" ) );
            model.setExecutable( e );
        }

    }

    @Override
    protected void doProcess() throws Exception {
        if ( analysisType.equalsIgnoreCase( "17b" ) ) {
            model.run();
        } else if ( analysisType.equalsIgnoreCase( "17c" ) ) {
            model.run();
        }
    }

    @Override
    protected void postProcess() throws Exception {

        PayloadResults resultPayload = results();

        if ( analysisType.equals( "b17" ) ) {
            // files
            File wd = workspace().getDir();
            resultPayload.put( new File( wd, model.getGraph() ) );

            // values
            resultPayload.put( "len", model.getLen() );
            resultPayload.put( "start", model.getStart() );
            resultPayload.put( "end", model.getEnd() );
            resultPayload.put( "data_source", model.getDataSource() );
            resultPayload.put( "skewErrorMessage", model.getSkewErrorMessage() );
            String output = FileUtils.readFileToString( model.getOutputWeightedGenSkew(), "UTF-8" );
            resultPayload.put( "output", output );
            resultPayload.put( "weighted_generalized_skew", model.getWeightedGenSkew() );
            String output_stationSkew = FileUtils.readFileToString( model.getOutputStationSkew(), "UTF-8" );
            resultPayload.put( "output_station_skew", output_stationSkew );
            resultPayload.put( "station_skew", model.getStationSkew() );
            resultPayload.put( "graph", model.getGraph() );

            //Arrays
            resultPayload.put( "lowOutliers_station_skew", model.getLowOutliersStationSkew() );
            resultPayload.put( "highOutliers_station_skew", model.getHighOutliersStationSkew() );
            resultPayload.put( "lowOutliers_weighted_generalized_skew", model.getLowOutliersWeightedGenSkew() );
            resultPayload.put( "highOutliers_weighted_generalized_skew", model.getHighOutliersWeightedGenSkew() );
        } else if ( analysisType.equals( "c17" ) ) {
            resultPayload.put( "17c_station_output", model.stationResult.toJSON() );
            resultPayload.put( "17c_weighted_output", model.weightedResult.toJSON() );
            resultPayload.put( "finished", true );
        }
    }
}