Example usage for weka.classifiers.functions RBFNetwork buildClassifier

List of usage examples for weka.classifiers.functions RBFNetwork buildClassifier

Introduction

In this page you can find the example usage for weka.classifiers.functions RBFNetwork buildClassifier.

Prototype

@Override
public void buildClassifier(Instances instances) throws Exception 

Source Link

Document

Builds the classifier

Usage

From source file:org.jaqpot.algorithm.resource.WekaRBF.java

License:Open Source License

@POST
@Path("training")
public Response training(TrainingRequest request) {

    try {/*from w  w  w  . jav a 2 s  .  c o m*/
        if (request.getDataset().getDataEntry().isEmpty()
                || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) {
            return Response.status(Response.Status.BAD_REQUEST).entity(
                    ErrorReportFactory.badRequest("Dataset is empty", "Cannot train model on empty dataset"))
                    .build();
        }
        List<String> features = request.getDataset().getDataEntry().stream().findFirst().get().getValues()
                .keySet().stream().collect(Collectors.toList());

        Instances data = InstanceUtils.createFromDataset(request.getDataset(), request.getPredictionFeature());

        Map<String, Object> parameters = request.getParameters() != null ? request.getParameters()
                : new HashMap<>();

        Double minStdDev = Double.parseDouble(parameters.getOrDefault("minStdDev", _minStdDev).toString());
        Double ridge = Double.parseDouble(parameters.getOrDefault("ridge", _ridge).toString());
        Integer seed = Integer.parseInt(parameters.getOrDefault("seed", _seed).toString());
        Integer maxIts = Integer.parseInt(parameters.getOrDefault("maxIts", _maxIts).toString());
        Integer numClusters = Integer.parseInt(parameters.getOrDefault("numClusters", _numClusters).toString());

        RBFNetwork rbf = new RBFNetwork();

        rbf.setMinStdDev(minStdDev);
        rbf.setRidge(ridge);
        rbf.setClusteringSeed(seed);
        rbf.setMaxIts(maxIts);
        rbf.setNumClusters(numClusters);

        rbf.buildClassifier(data);

        WekaModel model = new WekaModel();
        model.setClassifier(rbf);

        TrainingResponse response = new TrainingResponse();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ObjectOutput out = new ObjectOutputStream(baos);
        out.writeObject(model);
        String base64Model = Base64.getEncoder().encodeToString(baos.toByteArray());
        response.setRawModel(base64Model);
        List<String> independentFeatures = features.stream()
                .filter(feature -> !feature.equals(request.getPredictionFeature()))
                .collect(Collectors.toList());
        response.setIndependentFeatures(independentFeatures);
        //            response.setPmmlModel(pmml);
        response.setAdditionalInfo(request.getPredictionFeature());
        response.setPredictedFeatures(
                Arrays.asList("Weka RBF prediction of " + request.getPredictionFeature()));

        return Response.ok(response).build();
    } catch (Exception ex) {
        Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex);
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build();
    }
}

From source file:org.jaqpot.algorithms.resource.WekaRBF.java

License:Open Source License

@POST
@Path("training")
public Response training(TrainingRequest request) {

    try {//from w w  w. j  a  v  a  2  s.  c om
        if (request.getDataset().getDataEntry().isEmpty()
                || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) {
            return Response.status(Response.Status.BAD_REQUEST)
                    .entity("Dataset is empty. Cannot train model on empty dataset.").build();
        }
        List<String> features = request.getDataset().getDataEntry().stream().findFirst().get().getValues()
                .keySet().stream().collect(Collectors.toList());

        Instances data = InstanceUtils.createFromDataset(request.getDataset(), request.getPredictionFeature());

        Map<String, Object> parameters = request.getParameters() != null ? request.getParameters()
                : new HashMap<>();

        Double minStdDev = Double.parseDouble(parameters.getOrDefault("minStdDev", _minStdDev).toString());
        Double ridge = Double.parseDouble(parameters.getOrDefault("ridge", _ridge).toString());
        Integer seed = Integer.parseInt(parameters.getOrDefault("seed", _seed).toString());
        Integer maxIts = Integer.parseInt(parameters.getOrDefault("maxIts", _maxIts).toString());
        Integer numClusters = Integer.parseInt(parameters.getOrDefault("numClusters", _numClusters).toString());

        RBFNetwork rbf = new RBFNetwork();

        rbf.setMinStdDev(minStdDev);
        rbf.setRidge(ridge);
        rbf.setClusteringSeed(seed);
        rbf.setMaxIts(maxIts);
        rbf.setNumClusters(numClusters);

        rbf.buildClassifier(data);

        WekaModel model = new WekaModel();
        model.setClassifier(rbf);

        TrainingResponse response = new TrainingResponse();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ObjectOutput out = new ObjectOutputStream(baos);
        out.writeObject(model);
        String base64Model = Base64.getEncoder().encodeToString(baos.toByteArray());
        response.setRawModel(base64Model);
        List<String> independentFeatures = features.stream()
                .filter(feature -> !feature.equals(request.getPredictionFeature()))
                .collect(Collectors.toList());
        response.setIndependentFeatures(independentFeatures);
        //            response.setPmmlModel(pmml);
        response.setAdditionalInfo(request.getPredictionFeature());
        response.setPredictedFeatures(
                Arrays.asList("Weka RBF prediction of " + request.getPredictionFeature()));

        return Response.ok(response).build();
    } catch (Exception ex) {
        Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex);
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build();
    }
}

From source file:tcc.FeatureExtraction.java

public void rbf() throws IOException {
    //parsing CSV to Arff
    CSVLoader loader = new CSVLoader();
    loader.setSource(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.csv"));
    Instances inst = loader.getDataSet();

    ArffSaver saver = new ArffSaver();
    saver.setInstances(inst);//w ww .  ja  v a  2  s.c om
    saver.setFile(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff"));
    saver.setDestination(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff"));
    saver.writeBatch();

    BufferedReader reader = new BufferedReader(
            new FileReader("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff"));
    Instances data = new Instances(reader);
    reader.close();
    data.setClassIndex(data.numAttributes() - 1);

    //Normalizando
    try {
        Normalize norm = new Normalize();
        norm.setInputFormat(data);
        data = Filter.useFilter(data, norm);

    } catch (Exception ex) {
        Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex);
    }

    File csv = new File("/root/TCC/Resultados/rbf.csv");
    FileWriter fw = new FileWriter(csv);
    BufferedWriter bw = new BufferedWriter(fw);

    for (int i = 1; i < 51; i++) {
        //instanciando o classificador
        RBFNetwork rbf = new RBFNetwork();
        rbf.setNumClusters(i);

        try {

            rbf.buildClassifier(data);
            Evaluation eval = new Evaluation(data);
            //System.out.println(eval.toSummaryString("\nResults\n======\n", false));
            eval.crossValidateModel(rbf, data, 10, new Random(1), new Object[] {});
            double auc = eval.areaUnderROC(1);
            System.out.println(auc);
            bw.write(Double.toString(auc));
            bw.newLine();

        } catch (Exception ex) {
            Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    bw.close();

}