Example usage for weka.classifiers Evaluation evaluateModel

List of usage examples for weka.classifiers Evaluation evaluateModel

Introduction

In this page you can find the example usage for weka.classifiers Evaluation evaluateModel.

Prototype

public static String evaluateModel(Classifier classifier, String[] options) throws Exception 

Source Link

Document

Evaluates a classifier with the options given in an array of strings.

Usage

From source file:org.opentox.jaqpot3.qsar.trainer.SvmRegression.java

License:Open Source License

@Override
public Model train(Instances data) throws JaqpotException {
    try {/* w  w  w. j a  va 2 s  . co  m*/
        Attribute target = data.attribute(targetUri.toString());
        if (target == null) {
            throw new QSARException("The prediction feature you provided was not found in the dataset");
        } else {
            if (!target.isNumeric()) {
                throw new QSARException("The prediction feature you provided is not numeric.");
            }
        }
        data.setClass(target);
        //data.deleteAttributeAt(0);//remove the first attribute, i.e. 'compound_uri' or 'URI'
        /* Very important: place the target feature at the end! (target = last)*/
        int numAttributes = data.numAttributes();
        int classIndex = data.classIndex();
        Instances orderedTrainingSet = null;
        List<String> properOrder = new ArrayList<String>(numAttributes);
        for (int j = 0; j < numAttributes; j++) {
            if (j != classIndex) {
                properOrder.add(data.attribute(j).name());
            }
        }
        properOrder.add(data.attribute(classIndex).name());
        try {
            orderedTrainingSet = InstancesUtil.sortByFeatureAttrList(properOrder, data, -1);
        } catch (JaqpotException ex) {
            logger.error(null, ex);
        }
        orderedTrainingSet.setClass(orderedTrainingSet.attribute(targetUri.toString()));

        getTask().getMeta()
                .addComment("Dataset successfully retrieved and converted into a weka.core.Instances object");
        UpdateTask firstTaskUpdater = new UpdateTask(getTask());
        firstTaskUpdater.setUpdateMeta(true);
        firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary?
        try {
            firstTaskUpdater.update();
        } catch (DbException ex) {
            throw new JaqpotException(ex);
        } finally {
            try {
                firstTaskUpdater.close();
            } catch (DbException ex) {
                throw new JaqpotException(ex);
            }
        }

        Model m = new Model(Configuration.getBaseUri().augment("model", getUuid().toString()));

        // INITIALIZE THE REGRESSOR regressor
        SVMreg regressor = new SVMreg();
        final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) };
        Kernel svm_kernel = null;
        if (kernel.equalsIgnoreCase("rbf")) {
            RBFKernel rbf_kernel = new RBFKernel();
            rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma)));
            rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
            svm_kernel = rbf_kernel;
        } else if (kernel.equalsIgnoreCase("polynomial")) {
            PolyKernel poly_kernel = new PolyKernel();
            poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree)));
            poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
            poly_kernel.setUseLowerOrder(true);
            svm_kernel = poly_kernel;
        } else if (kernel.equalsIgnoreCase("linear")) {
            PolyKernel poly_kernel = new PolyKernel();
            poly_kernel.setExponent((double) 1.0);
            poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
            poly_kernel.setUseLowerOrder(true);
            svm_kernel = poly_kernel;
        }

        try {
            regressor.setOptions(regressorOptions);
        } catch (final Exception ex) {
            throw new QSARException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or "
                    + "tolerance = {" + tolerance + "}.", ex);
        }
        regressor.setKernel(svm_kernel);
        // START TRAINING & CREATE MODEL
        try {
            regressor.buildClassifier(orderedTrainingSet);

            // evaluate classifier and print some statistics
            Evaluation eval = new Evaluation(orderedTrainingSet);
            eval.evaluateModel(regressor, orderedTrainingSet);
            String stats = eval.toSummaryString("", false);

            ActualModel am = new ActualModel(regressor);
            am.setStatistics(stats);
            m.setActualModel(am);
            // m.setStatistics(stats);
        } catch (NotSerializableException ex) {
            String message = "Model is not serializable";
            logger.error(message, ex);
            throw new JaqpotException(message, ex);
        } catch (final Exception ex) {
            throw new QSARException("Unexpected condition while trying to train "
                    + "the model. Possible explanation : {" + ex.getMessage() + "}", ex);
        }

        m.setAlgorithm(getAlgorithm());
        m.setCreatedBy(getTask().getCreatedBy());
        m.setDataset(datasetUri);
        m.addDependentFeatures(dependentFeature);
        try {
            dependentFeature.loadFromRemote();
        } catch (ServiceInvocationException ex) {
            java.util.logging.Logger.getLogger(SvmRegression.class.getName()).log(Level.SEVERE, null, ex);
        }
        m.addDependentFeatures(dependentFeature);

        m.setIndependentFeatures(independentFeatures);

        String predictionFeatureUri = null;
        Feature predictedFeature = publishFeature(m, dependentFeature.getUnits(),
                "Feature created as prediction feature for SVM model " + m.getUri(), datasetUri,
                featureService);
        m.addPredictedFeatures(predictedFeature);
        predictionFeatureUri = predictedFeature.getUri().toString();

        getTask().getMeta().addComment("Prediction feature " + predictionFeatureUri + " was created.");

        /* SET PARAMETERS FOR THE TRAINED MODEL */
        m.setParameters(new HashSet<Parameter>());
        Parameter<String> kernelParam = new Parameter("kernel", new LiteralValue<String>(kernel))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        kernelParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> costParam = new Parameter("cost", new LiteralValue<Double>(cost))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        costParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> gammaParam = new Parameter("gamma", new LiteralValue<Double>(gamma))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        gammaParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> epsilonParam = new Parameter("espilon", new LiteralValue<Double>(epsilon))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        epsilonParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Integer> degreeParam = new Parameter("degree", new LiteralValue<Integer>(degree))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        degreeParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> toleranceParam = new Parameter("tolerance", new LiteralValue<Double>(tolerance))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        toleranceParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));

        m.getParameters().add(kernelParam);
        m.getParameters().add(costParam);
        m.getParameters().add(gammaParam);
        m.getParameters().add(epsilonParam);
        m.getParameters().add(degreeParam);
        m.getParameters().add(toleranceParam);

        //save the instances being predicted to abstract trainer for calculating DoA
        predictedInstances = orderedTrainingSet;
        excludeAttributesDoA.add(dependentFeature.getUri().toString());

        return m;
    } catch (QSARException ex) {
        logger.debug(null, ex);
        throw new JaqpotException(ex);
    }
}

From source file:org.opentox.qsar.processors.trainers.classification.NaiveBayesTrainer.java

License:Open Source License

public QSARModel train(Instances data) throws QSARException {

    // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA
    // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING.
    final String rand = java.util.UUID.randomUUID().toString();
    final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff";
    final File tempFile = new File(temporaryFilePath);

    // SAVE THE DATA IN THE TEMPORARY FILE
    try {/*  ww  w .  j  a va2s  . co  m*/
        ArffSaver dataSaver = new ArffSaver();
        dataSaver.setInstances(data);
        dataSaver.setDestination(new FileOutputStream(tempFile));
        dataSaver.writeBatch();
        if (!tempFile.exists()) {
            throw new IOException("Temporary File was not created");
        }
    } catch (final IOException ex) {/*
                                    * The content of the dataset cannot be
                                    * written to the destination file due to
                                    * some communication issue.
                                    */
        tempFile.delete();
        throw new RuntimeException(
                "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex);
    }

    NaiveBayes classifier = new NaiveBayes();

    String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath,
            /// Save the model in the following directory
            "-d", ServerFolders.models_weka + "/" + uuid };

    try {
        Evaluation.evaluateModel(classifier, generalOptions);
    } catch (final Exception ex) {
        tempFile.delete();
        throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train "
                + "an SVM model. Possible explanation : {" + ex.getMessage() + "}", ex);
    }

    QSARModel model = new QSARModel();

    model.setParams(getParameters());
    model.setCode(uuid.toString());
    model.setAlgorithm(YaqpAlgorithms.NAIVE_BAYES);
    model.setDataset(datasetUri);
    model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT);

    ArrayList<Feature> independentFeatures = new ArrayList<Feature>();
    for (int i = 0; i < data.numAttributes(); i++) {
        Feature f = new Feature(data.attribute(i).name());
        if (data.classIndex() != i) {
            independentFeatures.add(f);
        }
    }

    Feature dependentFeature = new Feature(data.classAttribute().name());
    Feature predictedFeature = dependentFeature;
    model.setDependentFeature(dependentFeature);
    model.setIndependentFeatures(independentFeatures);
    model.setPredictionFeature(predictedFeature);
    tempFile.delete();
    return model;
}

From source file:org.opentox.qsar.processors.trainers.classification.SVCTrainer.java

License:Open Source License

public QSARModel train(Instances data) throws QSARException {

    // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA
    // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING.
    final String rand = java.util.UUID.randomUUID().toString();
    final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff";
    final File tempFile = new File(temporaryFilePath);

    // SAVE THE DATA IN THE TEMPORARY FILE
    try {//  www.  ja  va2 s.co  m
        ArffSaver dataSaver = new ArffSaver();
        dataSaver.setInstances(data);
        dataSaver.setDestination(new FileOutputStream(tempFile));
        dataSaver.writeBatch();
        if (!tempFile.exists()) {
            throw new IOException("Temporary File was not created");
        }
    } catch (final IOException ex) {/*
                                    * The content of the dataset cannot be
                                    * written to the destination file due to
                                    * some communication issue.
                                    */
        tempFile.delete();
        throw new RuntimeException(
                "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex);
    }

    // INITIALIZE THE CLASSIFIER
    SMO classifier = new SMO();
    classifier.setEpsilon(0.1);
    classifier.setToleranceParameter(tolerance);

    // CONSTRUCT A KERNEL ACCORDING TO THE POSTED PARAMETERS
    // SUPPORTED KERNELS ARE {rbf, linear, polynomial}
    Kernel svc_kernel = null;
    if (this.kernel.equalsIgnoreCase("rbf")) {
        RBFKernel rbf_kernel = new RBFKernel();
        rbf_kernel.setGamma(gamma);
        rbf_kernel.setCacheSize(cacheSize);
        svc_kernel = rbf_kernel;
    } else if (this.kernel.equalsIgnoreCase("polynomial")) {
        PolyKernel poly_kernel = new PolyKernel();
        poly_kernel.setExponent(degree);
        poly_kernel.setCacheSize(cacheSize);
        poly_kernel.setUseLowerOrder(true);
        svc_kernel = poly_kernel;
    } else if (this.kernel.equalsIgnoreCase("linear")) {
        PolyKernel linear_kernel = new PolyKernel();
        linear_kernel.setExponent((double) 1.0);
        linear_kernel.setCacheSize(cacheSize);
        linear_kernel.setUseLowerOrder(true);
        svc_kernel = linear_kernel;
    }
    classifier.setKernel(svc_kernel);

    String modelFilePath = ServerFolders.models_weka + "/" + uuid.toString();
    String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath,
            /// Save the model in the following directory
            "-d", modelFilePath };

    // AFTER ALL, BUILD THE CLASSIFICATION MODEL AND SAVE IT AS A SERIALIZED
    // WEKA FILE IN THE CORRESPONDING DIRECTORY.
    try {
        Evaluation.evaluateModel(classifier, generalOptions);
    } catch (final Exception ex) {
        tempFile.delete();
        throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train "
                + "a support vector classification model. Possible explanation : {" + ex.getMessage() + "}",
                ex);
    }

    ArrayList<Feature> independentFeatures = new ArrayList<Feature>();
    for (int i = 0; i < data.numAttributes(); i++) {
        Feature f = new Feature(data.attribute(i).name());
        if (data.classIndex() != i) {
            independentFeatures.add(f);
        }
    }

    Feature dependentFeature = new Feature(data.classAttribute().name());
    Feature predictedFeature = dependentFeature;

    QSARModel model = new QSARModel();
    model.setCode(uuid.toString());
    model.setAlgorithm(YaqpAlgorithms.SVC);
    model.setPredictionFeature(predictedFeature);
    model.setDependentFeature(dependentFeature);
    model.setIndependentFeatures(independentFeatures);
    model.setDataset(datasetUri);
    model.setParams(getParameters());
    model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT);

    tempFile.delete();
    return model;
}

From source file:org.opentox.qsar.processors.trainers.regression.MLRTrainer.java

License:Open Source License

/**
 * Trains the MLR model given an Instances object with the training data. The prediction
 * feature (class attributre) is specified in the constructor of the class.
 * @param data The training data as <code>weka.core.Instances</code> object.
 * @return The QSARModel corresponding to the trained model.
 * @throws QSARException In case the model cannot be trained
 * <p>/*ww w.  j  a va  2  s  . c o  m*/
 * <table>
 * <thead>
 * <tr>
 * <td><b>Code</b></td><td><b>Explanation</b></td>
 * </tr>
 * </thead>
 * <tbody>
 * <tr>
 * <td>XQReg1</td><td>Could not train the an model</td>
 * </tr>
 * <tr>
 * <td>XQReg2</td><td>Could not generate PMML representation for the model</td>
 * </tr>
 * <tr>
 * <td>XQReg202</td><td>The prediction feature you provided is not a valid numeric attribute of the dataset</td>
 * </tr>
 * </tbody>
 * </table>
 * </p>
 * @throws NullPointerException
 *      In case the provided training data is null.
 */
public QSARModel train(Instances data) throws QSARException {

    // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA
    // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING.
    final String rand = java.util.UUID.randomUUID().toString();
    final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff";
    final File tempFile = new File(temporaryFilePath);

    // SAVE THE DATA IN THE TEMPORARY FILE
    try {
        ArffSaver dataSaver = new ArffSaver();
        dataSaver.setInstances(data);
        dataSaver.setDestination(new FileOutputStream(tempFile));
        dataSaver.writeBatch();
    } catch (final IOException ex) {
        tempFile.delete();
        throw new RuntimeException(
                "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex);
    }

    LinearRegression linreg = new LinearRegression();
    String[] linRegOptions = { "-S", "1", "-C" };
    try {
        linreg.setOptions(linRegOptions);
        linreg.buildClassifier(data);
    } catch (final Exception ex) {// illegal options or could not build the classifier!
        String message = "MLR Model could not be trained";
        YaqpLogger.LOG.log(new Trace(getClass(), message + " :: " + ex));
        throw new QSARException(Cause.XQReg1, message, ex);
    }

    try {
        generatePMML(linreg, data);
    } catch (final YaqpIOException ex) {
        String message = "Could not generate PMML representation for MLR model :: " + ex;
        throw new QSARException(Cause.XQReg2, message, ex);
    }

    // PERFORM THE TRAINING
    String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath,
            /// Save the model in the following directory
            "-d", ServerFolders.models_weka + "/" + uuid };
    try {
        Evaluation.evaluateModel(linreg, generalOptions);
    } catch (final Exception ex) {
        tempFile.delete();
        throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train "
                + "an SVM model. Possible explanation : {" + ex.getMessage() + "}", ex);
    }

    ArrayList<Feature> independentFeatures = new ArrayList<Feature>();
    for (int i = 0; i < data.numAttributes(); i++) {
        Feature f = new Feature(data.attribute(i).name());
        if (data.classIndex() != i) {
            independentFeatures.add(f);
        }
    }

    Feature dependentFeature = new Feature(data.classAttribute().name());
    Feature predictedFeature = dependentFeature;

    QSARModel model = new QSARModel(uuid.toString(), predictedFeature, dependentFeature, independentFeatures,
            YaqpAlgorithms.MLR, new User(), null, datasetUri, ModelStatus.UNDER_DEVELOPMENT);
    model.setParams(new HashMap<String, AlgorithmParameter>());

    return model;

}

From source file:org.opentox.qsar.processors.trainers.regression.SVMTrainer.java

License:Open Source License

/**
 *
 * @param data// w w w. j  av  a2 s .  c  om
 * @return
 * @throws QSARException
 */
public QSARModel train(Instances data) throws QSARException {

    // NOTE: The checks (check if data is null and if the prediction feature is
    //       acceptable are found in WekaRegressor. The method preprocessData(Instances)
    //       does this job.        

    // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA
    // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING.
    final String rand = java.util.UUID.randomUUID().toString();
    final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff";
    final File tempFile = new File(temporaryFilePath);

    // SAVE THE DATA IN THE TEMPORARY FILE
    try {
        ArffSaver dataSaver = new ArffSaver();
        dataSaver.setInstances(data);
        dataSaver.setDestination(new FileOutputStream(tempFile));
        dataSaver.writeBatch();
    } catch (final IOException ex) {
        tempFile.delete();
        throw new RuntimeException(
                "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex);
    }

    // INITIALIZE THE REGRESSOR
    SVMreg regressor = new SVMreg();
    final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) };

    Kernel svm_kernel = null;
    if (kernel.equalsIgnoreCase("rbf")) {
        RBFKernel rbf_kernel = new RBFKernel();
        rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma)));
        rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
        svm_kernel = rbf_kernel;
    } else if (kernel.equalsIgnoreCase("polynomial")) {
        PolyKernel poly_kernel = new PolyKernel();
        poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree)));
        poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
        poly_kernel.setUseLowerOrder(true);
        svm_kernel = poly_kernel;
    } else if (kernel.equalsIgnoreCase("linear")) {
        PolyKernel poly_kernel = new PolyKernel();
        poly_kernel.setExponent((double) 1.0);
        poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
        poly_kernel.setUseLowerOrder(true);
        svm_kernel = poly_kernel;
    }
    regressor.setKernel(svm_kernel);
    try {
        regressor.setOptions(regressorOptions);
    } catch (final Exception ex) {
        tempFile.delete();
        throw new IllegalArgumentException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or "
                + "tolerance = {" + tolerance + "}.", ex);
    }

    // PERFORM THE TRAINING
    String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath,
            /// Save the model in the following directory
            "-d", ServerFolders.models_weka + "/" + uuid };
    try {
        Evaluation.evaluateModel(regressor, generalOptions);
    } catch (final Exception ex) {
        tempFile.delete();
        throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train "
                + "an SVM model. Possible explanation : {" + ex.getMessage() + "}", ex);
    }

    QSARModel model = new QSARModel();

    model.setParams(getParameters());
    model.setCode(uuid.toString());
    model.setAlgorithm(YaqpAlgorithms.SVM);
    model.setDataset(datasetUri);
    model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT);

    ArrayList<Feature> independentFeatures = new ArrayList<Feature>();
    for (int i = 0; i < data.numAttributes(); i++) {
        Feature f = new Feature(data.attribute(i).name());
        if (data.classIndex() != i) {
            independentFeatures.add(f);
        }
    }

    Feature dependentFeature = new Feature(data.classAttribute().name());
    Feature predictedFeature = dependentFeature;
    model.setDependentFeature(dependentFeature);
    model.setIndependentFeatures(independentFeatures);
    model.setPredictionFeature(predictedFeature);
    tempFile.delete();
    return model;
}

From source file:org.processmining.analysis.clusteranalysis.DecisionAnalyzer.java

License:Open Source License

/**
 * Creates an evaluation overview of the built classifier.
 * /*w  ww  .  ja  v  a  2s .  co m*/
 * @return the panel to be displayed as result evaluation view for the
 *         current decision point
 */
protected JPanel createEvaluationVisualization(Instances data) {
    // build text field to display evaluation statistics
    JTextPane statistic = new JTextPane();

    try {
        // build evaluation statistics
        Evaluation evaluation = new Evaluation(data);
        evaluation.evaluateModel(myClassifier, data);
        statistic.setText(evaluation.toSummaryString() + "\n\n" + evaluation.toClassDetailsString() + "\n\n"
                + evaluation.toMatrixString());

    } catch (Exception ex) {
        ex.printStackTrace();
        return createMessagePanel("Error while creating the decision tree evaluation view");
    }

    statistic.setFont(new Font("Courier", Font.PLAIN, 14));
    statistic.setEditable(false);
    statistic.setCaretPosition(0);

    JPanel resultViewPanel = new JPanel();
    resultViewPanel.setLayout(new BoxLayout(resultViewPanel, BoxLayout.PAGE_AXIS));
    resultViewPanel.add(new JScrollPane(statistic));

    return resultViewPanel;
}

From source file:personality_prediction.Evaluation_Result.java

void eval_result() {
    try {//w  ww.  j  a v a  2s .  c  o  m
        DataSource source_train = new DataSource(
                "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\training_data_neur.csv");
        Instances train = source_train.getDataSet();
        DataSource source_test = new DataSource(
                "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Testing dataset\\Testing_data_neur.csv");
        Instances test = source_test.getDataSet();
        train.setClassIndex(train.numAttributes() - 1);
        test.setClassIndex(train.numAttributes() - 1);
        // train classifier
        Classifier cls = new J48();
        cls.buildClassifier(train);
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(cls, test);
        System.out.println(eval.toSummaryString("\nResults\n======\n", false));

    } catch (Exception e) {
        System.out.println(e.getLocalizedMessage());
    }
}

From source file:predictor.Predictor.java

public static void multilayerPerceptron() throws Exception {

    DataSource train = new DataSource(configuration.getWorkspace() + "train_common.arff");
    DataSource test = new DataSource(configuration.getWorkspace() + "test_common.arff");

    Instances trainInstances = train.getDataSet();
    Instances testInstances = test.getDataSet();

    //last attribute classify
    trainInstances.setClassIndex(trainInstances.numAttributes() - 1);
    testInstances.setClassIndex(testInstances.numAttributes() - 1);
    //        /*ww w  . j  a va  2 s  . c  om*/
    //        Classifier cModel = (Classifier)new MultilayerPerceptron();  
    //        cModel.buildClassifier(trainInstances);  
    //
    //        weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel);
    //
    //        Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model");
    //
    //        // Test the model
    //        Evaluation eTest = new Evaluation(trainInstances);
    //        eTest.evaluateModel(cls, testInstances);

    MultilayerPerceptron mlp = new MultilayerPerceptron();
    mlp.buildClassifier(trainInstances);
    mlp.setHiddenLayers(configuration.getHiddenLayers());
    mlp.setLearningRate(configuration.getLearningRate());
    mlp.setTrainingTime(configuration.getEpocs());
    mlp.setMomentum(configuration.getMomentum());

    // train classifier
    Classifier cls = new MultilayerPerceptron();
    cls.buildClassifier(trainInstances);

    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(trainInstances);
    eval.evaluateModel(cls, testInstances);

    System.out.println(eval.toSummaryString());
}

From source file:regression.logisticRegression.LogisticRegressionCorrect.java

public void weka(JTextArea output) throws FileNotFoundException, IOException, Exception {
    this.finalPoints = new ArrayList<>();

    BufferedReader reader = new BufferedReader(new FileReader("weka.arff"));
    Instances instances = new Instances(reader);
    instances.setClassIndex(instances.numAttributes() - 1);
    String[] options = new String[4];
    options[0] = "-R";

    options[1] = "1.0E-8";
    options[2] = "-M";
    options[3] = "-1";

    logistic.setOptions(options);//  w w  w.  j av  a2  s . co  m

    logistic.buildClassifier(instances);

    for (int i = 0; i < instances.numInstances(); i++) {
        weka.core.Instance inst = instances.instance(i);
        Double classifiedClass = 1.0;
        if (logistic.classifyInstance(inst) == 1.0) {
            classifiedClass = 0.0;
        }

        System.out.println("classify: " + inst.attribute(0) + "|" + inst.value(0) + "->" + classifiedClass);
        double[] distributions = logistic.distributionForInstance(inst);
        output.append("Dla x= " + inst.value(0) + " prawdopodobiestwo wystpnienia zdarzenia wynosi: "
                + distributions[0] + " zatem naley on do klasy: " + classifiedClass + "\n");
        this.finalPoints.add(new Point(inst.value(0), classifiedClass));
        this.finalProbPoints.add(new Point(inst.value(0), distributions[0]));
        for (int j = 0; j < distributions.length; j++) {
            System.out.println("distribution: " + inst.value(0) + "->" + distributions[j]);

        }

    }

    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(instances);

    eval.evaluateModel(logistic, instances);
    FastVector pred = eval.predictions();

    for (int i = 0; i < eval.predictions().size(); i++) {

    }
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:sentinets.Prediction.java

License:Open Source License

public String updateModel(String inputFile, ArrayList<Double[]> metrics) {
    String output = "";
    this.setInstances(inputFile);
    FilteredClassifier fcls = (FilteredClassifier) this.cls;
    SGD cls = (SGD) fcls.getClassifier();
    Filter filter = fcls.getFilter();
    Instances insAll;// w ww  .  j  a  v a 2s.  c  o m
    try {
        insAll = Filter.useFilter(this.unlabled, filter);
        if (insAll.size() > 0) {
            Random rand = new Random(10);
            int folds = 10 > insAll.size() ? 2 : 10;
            Instances randData = new Instances(insAll);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal()) {
                randData.stratify(folds);
            }
            Evaluation eval = new Evaluation(randData);
            eval.evaluateModel(cls, insAll);
            System.out.println("Initial Evaluation");
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            output += "\n====" + "Initial Evaluation" + "====\n";
            output += "\n" + eval.toSummaryString();
            output += "\n" + eval.toClassDetailsString();
            System.out.println("Cross Validated Evaluation");
            output += "\n====" + "Cross Validated Evaluation" + "====\n";
            for (int n = 0; n < folds; n++) {
                Instances train = randData.trainCV(folds, n);
                Instances test = randData.testCV(folds, n);

                for (int i = 0; i < train.numInstances(); i++) {
                    cls.updateClassifier(train.instance(i));
                }

                eval.evaluateModel(cls, test);
                System.out.println("Cross Validated Evaluation fold: " + n);
                output += "\n====" + "Cross Validated Evaluation fold (" + n + ")====\n";
                System.out.println(eval.toSummaryString());
                System.out.println(eval.toClassDetailsString());
                output += "\n" + eval.toSummaryString();
                output += "\n" + eval.toClassDetailsString();
                metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            }
            for (int i = 0; i < insAll.numInstances(); i++) {
                cls.updateClassifier(insAll.instance(i));
            }
            eval.evaluateModel(cls, insAll);
            System.out.println("Final Evaluation");
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toClassDetailsString());
            output += "\n====" + "Final Evaluation" + "====\n";
            output += "\n" + eval.toSummaryString();
            output += "\n" + eval.toClassDetailsString();
            metrics.add(new Double[] { eval.fMeasure(0), eval.fMeasure(1), eval.weightedFMeasure() });
            fcls.setClassifier(cls);
            String modelFilePath = outputDir + "/" + Utils.getOutDir(Utils.OutDirIndex.MODELS)
                    + "/updatedClassifier.model";
            weka.core.SerializationHelper.write(modelFilePath, fcls);
            output += "\n" + "Updated Model saved at: " + modelFilePath;
        } else {
            output += "No new instances for training the model.";
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return output;
}