Example usage for weka.classifiers Evaluation toSummaryString

List of usage examples for weka.classifiers Evaluation toSummaryString

Introduction

In this page you can find the example usage for weka.classifiers Evaluation toSummaryString.

Prototype

public String toSummaryString(String title, boolean printComplexityStatistics) 

Source Link

Document

Outputs the performance statistics in summary form.

Usage

From source file:org.opentox.jaqpot3.qsar.trainer.SvmRegression.java

License:Open Source License

@Override
public Model train(Instances data) throws JaqpotException {
    try {/*ww w .j ava2  s  .co m*/
        Attribute target = data.attribute(targetUri.toString());
        if (target == null) {
            throw new QSARException("The prediction feature you provided was not found in the dataset");
        } else {
            if (!target.isNumeric()) {
                throw new QSARException("The prediction feature you provided is not numeric.");
            }
        }
        data.setClass(target);
        //data.deleteAttributeAt(0);//remove the first attribute, i.e. 'compound_uri' or 'URI'
        /* Very important: place the target feature at the end! (target = last)*/
        int numAttributes = data.numAttributes();
        int classIndex = data.classIndex();
        Instances orderedTrainingSet = null;
        List<String> properOrder = new ArrayList<String>(numAttributes);
        for (int j = 0; j < numAttributes; j++) {
            if (j != classIndex) {
                properOrder.add(data.attribute(j).name());
            }
        }
        properOrder.add(data.attribute(classIndex).name());
        try {
            orderedTrainingSet = InstancesUtil.sortByFeatureAttrList(properOrder, data, -1);
        } catch (JaqpotException ex) {
            logger.error(null, ex);
        }
        orderedTrainingSet.setClass(orderedTrainingSet.attribute(targetUri.toString()));

        getTask().getMeta()
                .addComment("Dataset successfully retrieved and converted into a weka.core.Instances object");
        UpdateTask firstTaskUpdater = new UpdateTask(getTask());
        firstTaskUpdater.setUpdateMeta(true);
        firstTaskUpdater.setUpdateTaskStatus(true);//TODO: Is this necessary?
        try {
            firstTaskUpdater.update();
        } catch (DbException ex) {
            throw new JaqpotException(ex);
        } finally {
            try {
                firstTaskUpdater.close();
            } catch (DbException ex) {
                throw new JaqpotException(ex);
            }
        }

        Model m = new Model(Configuration.getBaseUri().augment("model", getUuid().toString()));

        // INITIALIZE THE REGRESSOR regressor
        SVMreg regressor = new SVMreg();
        final String[] regressorOptions = { "-P", Double.toString(epsilon), "-T", Double.toString(tolerance) };
        Kernel svm_kernel = null;
        if (kernel.equalsIgnoreCase("rbf")) {
            RBFKernel rbf_kernel = new RBFKernel();
            rbf_kernel.setGamma(Double.parseDouble(Double.toString(gamma)));
            rbf_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
            svm_kernel = rbf_kernel;
        } else if (kernel.equalsIgnoreCase("polynomial")) {
            PolyKernel poly_kernel = new PolyKernel();
            poly_kernel.setExponent(Double.parseDouble(Integer.toString(degree)));
            poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
            poly_kernel.setUseLowerOrder(true);
            svm_kernel = poly_kernel;
        } else if (kernel.equalsIgnoreCase("linear")) {
            PolyKernel poly_kernel = new PolyKernel();
            poly_kernel.setExponent((double) 1.0);
            poly_kernel.setCacheSize(Integer.parseInt(Integer.toString(cacheSize)));
            poly_kernel.setUseLowerOrder(true);
            svm_kernel = poly_kernel;
        }

        try {
            regressor.setOptions(regressorOptions);
        } catch (final Exception ex) {
            throw new QSARException("Bad options in SVM trainer for epsilon = {" + epsilon + "} or "
                    + "tolerance = {" + tolerance + "}.", ex);
        }
        regressor.setKernel(svm_kernel);
        // START TRAINING & CREATE MODEL
        try {
            regressor.buildClassifier(orderedTrainingSet);

            // evaluate classifier and print some statistics
            Evaluation eval = new Evaluation(orderedTrainingSet);
            eval.evaluateModel(regressor, orderedTrainingSet);
            String stats = eval.toSummaryString("", false);

            ActualModel am = new ActualModel(regressor);
            am.setStatistics(stats);
            m.setActualModel(am);
            // m.setStatistics(stats);
        } catch (NotSerializableException ex) {
            String message = "Model is not serializable";
            logger.error(message, ex);
            throw new JaqpotException(message, ex);
        } catch (final Exception ex) {
            throw new QSARException("Unexpected condition while trying to train "
                    + "the model. Possible explanation : {" + ex.getMessage() + "}", ex);
        }

        m.setAlgorithm(getAlgorithm());
        m.setCreatedBy(getTask().getCreatedBy());
        m.setDataset(datasetUri);
        m.addDependentFeatures(dependentFeature);
        try {
            dependentFeature.loadFromRemote();
        } catch (ServiceInvocationException ex) {
            java.util.logging.Logger.getLogger(SvmRegression.class.getName()).log(Level.SEVERE, null, ex);
        }
        m.addDependentFeatures(dependentFeature);

        m.setIndependentFeatures(independentFeatures);

        String predictionFeatureUri = null;
        Feature predictedFeature = publishFeature(m, dependentFeature.getUnits(),
                "Feature created as prediction feature for SVM model " + m.getUri(), datasetUri,
                featureService);
        m.addPredictedFeatures(predictedFeature);
        predictionFeatureUri = predictedFeature.getUri().toString();

        getTask().getMeta().addComment("Prediction feature " + predictionFeatureUri + " was created.");

        /* SET PARAMETERS FOR THE TRAINED MODEL */
        m.setParameters(new HashSet<Parameter>());
        Parameter<String> kernelParam = new Parameter("kernel", new LiteralValue<String>(kernel))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        kernelParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> costParam = new Parameter("cost", new LiteralValue<Double>(cost))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        costParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> gammaParam = new Parameter("gamma", new LiteralValue<Double>(gamma))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        gammaParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> epsilonParam = new Parameter("espilon", new LiteralValue<Double>(epsilon))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        epsilonParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Integer> degreeParam = new Parameter("degree", new LiteralValue<Integer>(degree))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        degreeParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));
        Parameter<Double> toleranceParam = new Parameter("tolerance", new LiteralValue<Double>(tolerance))
                .setScope(Parameter.ParameterScope.OPTIONAL);
        toleranceParam.setUri(Services.anonymous().augment("parameter", RANDOM.nextLong()));

        m.getParameters().add(kernelParam);
        m.getParameters().add(costParam);
        m.getParameters().add(gammaParam);
        m.getParameters().add(epsilonParam);
        m.getParameters().add(degreeParam);
        m.getParameters().add(toleranceParam);

        //save the instances being predicted to abstract trainer for calculating DoA
        predictedInstances = orderedTrainingSet;
        excludeAttributesDoA.add(dependentFeature.getUri().toString());

        return m;
    } catch (QSARException ex) {
        logger.debug(null, ex);
        throw new JaqpotException(ex);
    }
}

From source file:org.wkwk.classifier.Access.java

/**
 * @param args the command line arguments
 * args[0] = filename train set//w  w  w.j  a  va  2  s .  co m
 * args[1] = filename test set
 * args[2] = remove attribute
 * args[3] = bias resample
 * @throws java.lang.Exception
 */
public static void main(String[] args) throws Exception {

    // Read Dataset (arff, csv)
    DataSource source = new DataSource("../data/weather.nominal.arff");
    //DataSource testSource = new DataSource(args[1]);
    Instances data = source.getDataSet();

    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }

    // Remove attr
    //        String[] rangeOps = new String[2];
    //        rangeOps[0] = "-R";                                    // "range"
    //        rangeOps[1] = args[2];                                 // first attribute
    //        Remove remove = new Remove();                         // new instance of filter
    //        remove.setOptions(rangeOps);                           // set options
    //        remove.setInputFormat(data);                          // inform filter about dataset **AFTER** setting options
    //        Instances newData = Filter.useFilter(data, remove);   // apply filter
    //        
    //        // Filter Resample
    //        String[] biasOps = new String[2];
    //        biasOps[0] = "-B";                                    // "range"
    //        biasOps[1] = args[3];                                 // first attribute
    //        Resample resample = new Resample();
    //        resample.setOptions(biasOps);
    //        resample.setInputFormat(data);
    //        newData = Filter.useFilter(data, resample);
    //        
    // Build Classifier
    MyC45 tree = new MyC45(); // new instance of tree
    tree.buildClassifier(data); // build classifier

    // Evaluation with test set
    //Instances testSet = testSource.getDataSet();
    // train classifier
    //Classifier cls = new MyId3();
    //cls.buildClassifier(data);
    // evaluate classifier and print some statistics
    //Evaluation eval = new Evaluation(data);
    //eval.evaluateModel(cls, testSet);
    //System.out.println(eval.toSummaryString("\nResults\n======\n", false));

    // Evaluation with 10 Fold-CV
    Evaluation evalCV = new Evaluation(data);
    evalCV.crossValidateModel(tree, data, 10, new Random(1));
    System.out.println(evalCV.toSummaryString("\nResults\n======\n", false));
}

From source file:personality_prediction.Evaluation_Result.java

void eval_result() {
    try {/* w ww.  j av a 2  s. co  m*/
        DataSource source_train = new DataSource(
                "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Training dataset\\training_data_neur.csv");
        Instances train = source_train.getDataSet();
        DataSource source_test = new DataSource(
                "C:\\Users\\divya\\Desktop\\Personality Mining\\WEKA_DataSet\\Testing dataset\\Testing_data_neur.csv");
        Instances test = source_test.getDataSet();
        train.setClassIndex(train.numAttributes() - 1);
        test.setClassIndex(train.numAttributes() - 1);
        // train classifier
        Classifier cls = new J48();
        cls.buildClassifier(train);
        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(cls, test);
        System.out.println(eval.toSummaryString("\nResults\n======\n", false));

    } catch (Exception e) {
        System.out.println(e.getLocalizedMessage());
    }
}

From source file:prismcrossvalidation.Classifier.java

static public String crossValidationPRISM_DISCRET() throws FileNotFoundException, IOException, Exception {
    String prismResult = "";
    String source = MainWindow.pathChooseField.getText();
    Instances data = DataLoad.loadData(source.replace("\\", "/"));

    data.setClassIndex(data.numAttributes() - 1);

    Discretize filter = new Discretize();
    Prism rules = new Prism();

    FilteredClassifier fClassifier = new FilteredClassifier();
    fClassifier.setFilter(filter); //Ustawienie aktualnego filtra
    fClassifier.setClassifier(rules); //Ustawienie aktualnego klasyfikatora

    Evaluation eval = new MyEvaluation(data);
    eval.crossValidateModel(fClassifier, data, fold, new Random(1)); //CV dla 10 foldow

    System.out.println("amount of folds: " + fold);
    MainWindow.logArea.append("Amount of folds: " + fold);

    System.out.println(eval.toSummaryString("Wyniki:", false));
    MainWindow.logArea.append(eval.toSummaryString("Wyniki:", false));

    return prismResult = eval.toSummaryString("Wyniki:", false);
}

From source file:regression.logisticRegression.LogisticRegressionCorrect.java

public void weka(JTextArea output) throws FileNotFoundException, IOException, Exception {
    this.finalPoints = new ArrayList<>();

    BufferedReader reader = new BufferedReader(new FileReader("weka.arff"));
    Instances instances = new Instances(reader);
    instances.setClassIndex(instances.numAttributes() - 1);
    String[] options = new String[4];
    options[0] = "-R";

    options[1] = "1.0E-8";
    options[2] = "-M";
    options[3] = "-1";

    logistic.setOptions(options);/* ww  w  .jav a  2  s  .co  m*/

    logistic.buildClassifier(instances);

    for (int i = 0; i < instances.numInstances(); i++) {
        weka.core.Instance inst = instances.instance(i);
        Double classifiedClass = 1.0;
        if (logistic.classifyInstance(inst) == 1.0) {
            classifiedClass = 0.0;
        }

        System.out.println("classify: " + inst.attribute(0) + "|" + inst.value(0) + "->" + classifiedClass);
        double[] distributions = logistic.distributionForInstance(inst);
        output.append("Dla x= " + inst.value(0) + " prawdopodobiestwo wystpnienia zdarzenia wynosi: "
                + distributions[0] + " zatem naley on do klasy: " + classifiedClass + "\n");
        this.finalPoints.add(new Point(inst.value(0), classifiedClass));
        this.finalProbPoints.add(new Point(inst.value(0), distributions[0]));
        for (int j = 0; j < distributions.length; j++) {
            System.out.println("distribution: " + inst.value(0) + "->" + distributions[j]);

        }

    }

    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(instances);

    eval.evaluateModel(logistic, instances);
    FastVector pred = eval.predictions();

    for (int i = 0; i < eval.predictions().size(); i++) {

    }
    System.out.println(eval.toSummaryString("\nResults\n======\n", false));
}

From source file:SpamDetector.SpamDetector.java

/**
 * @param args the command line arguments
 *//*w  w w .j a  va 2s  .c o m*/
public static void main(String[] args) throws IOException, Exception {
    ArrayList<ArrayList<String>> notSpam = processCSV("notspam.csv");
    ArrayList<ArrayList<String>> spam = processCSV("spam.csv");

    // Cobain generate attribute & data
    FeatureExtraction fe = new FeatureExtraction();
    fe.generateArff(spam, notSpam);

    // Cobain CART
    BufferedReader br = new BufferedReader(new FileReader("data.arff"));

    ArffReader arff = new ArffReader(br);
    Instances data = arff.getData();
    data.setClassIndex(data.numAttributes() - 1);

    SimpleCart tree = new SimpleCart();
    tree.buildClassifier(data);
    System.out.println(tree.toString());

    Evaluation eval = new Evaluation(data);
    eval.evaluateModel(tree, data);
    System.out.println(eval.toSummaryString("\n\n\n\nResults\n======\n", false));
    eval.crossValidateModel(tree, data, 10, new Random());
    System.out.println(eval.toSummaryString("\n\n\n\n10-Fold\n======\n", false));

}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get training error (from loaded data).
 *
 * @param verbose option to display evaluation information in the log window
 * @return classifier error on the training data set.
 *///from w  ww . j  ava2  s  .  c o m
public double getTrainingError(boolean verbose) {
    if (null == this.trainHeader)
        return -1;

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(this.loadedTrainingData);
        evaluation.evaluateModel(classifier, this.loadedTrainingData);
        if (verbose)
            IJ.log(evaluation.toSummaryString("\n=== Training set evaluation ===\n", false));
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image/*from   w  w  w  .j  a  v a 2s. c o  m*/
 * @param labels binary labels
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, int whiteClassIndex, int blackClassIndex,
        boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image (slice " + z + ")...");
        if (verbose)
            IJ.log("Creating features for test image (slice " + z + ")...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Use the same features as the current classifier
        testImageFeatures.setEnabledFeatures(featureStackArray.getEnabledFeatures());
        testImageFeatures.setMaximumSigma(maximumSigma);
        testImageFeatures.setMinimumSigma(minimumSigma);
        testImageFeatures.setMembranePatchSize(membranePatchSize);
        testImageFeatures.setMembraneSize(membraneThickness);
        testImageFeatures.updateFeaturesMT();
        testImageFeatures.setUseNeighbors(featureStackArray.useNeighborhood());
        filterFeatureStackByList(this.featureNames, testImageFeatures);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:trainableSegmentation.WekaSegmentation.java

License:GNU General Public License

/**
 * Get test error of current classifier on a specific image and its binary labels
 *
 * @param image input image/*from  www  .j a  v  a  2s.  c om*/
 * @param labels binary labels
 * @param filters list of filters to create features
 * @param whiteClassIndex index of the white class
 * @param blackClassIndex index of the black class
 * @param verbose option to display evaluation information in the log window
 * @return pixel classification error
 */
public double getTestError(ImagePlus image, ImagePlus labels, ImagePlus filters, int whiteClassIndex,
        int blackClassIndex, boolean verbose) {
    IJ.showStatus("Creating features for test image...");
    if (verbose)
        IJ.log("Creating features for test image " + image.getTitle() + "...");

    // Set proper class names (skip empty list ones)
    ArrayList<String> classNames = new ArrayList<String>();
    if (null == loadedClassNames) {
        for (int i = 0; i < numOfClasses; i++)
            if (examples[0].get(i).size() > 0)
                classNames.add(getClassLabels()[i]);
    } else
        classNames = loadedClassNames;

    // Apply labels
    final int height = image.getHeight();
    final int width = image.getWidth();
    final int depth = image.getStackSize();

    Instances testData = null;

    for (int z = 1; z <= depth; z++) {
        final ImagePlus testSlice = new ImagePlus(image.getImageStack().getSliceLabel(z),
                image.getImageStack().getProcessor(z));
        // Create feature stack for test image
        IJ.showStatus("Creating features for test image...");
        if (verbose)
            IJ.log("Creating features for test image " + z + "...");
        final FeatureStack testImageFeatures = new FeatureStack(testSlice);
        // Create features by applying the filters
        testImageFeatures.addFeaturesMT(filters);

        final Instances data = testImageFeatures.createInstances(classNames);
        data.setClassIndex(data.numAttributes() - 1);
        if (verbose)
            IJ.log("Assigning classes based on the labels...");

        final ImageProcessor slice = labels.getImageStack().getProcessor(z);
        for (int n = 0, y = 0; y < height; y++)
            for (int x = 0; x < width; x++, n++) {
                final double newValue = slice.getPixel(x, y) > 0 ? whiteClassIndex : blackClassIndex;
                data.get(n).setClassValue(newValue);
            }

        if (null == testData)
            testData = data;
        else {
            for (int i = 0; i < data.numInstances(); i++)
                testData.add(data.get(i));
        }
    }
    if (verbose)
        IJ.log("Evaluating test data...");

    double error = -1;
    try {
        final Evaluation evaluation = new Evaluation(testData);
        evaluation.evaluateModel(classifier, testData);
        if (verbose) {
            IJ.log(evaluation.toSummaryString("\n=== Test data evaluation ===\n", false));
            IJ.log(evaluation.toClassDetailsString() + "\n");
            IJ.log(evaluation.toMatrixString());
        }
        error = evaluation.errorRate();
    } catch (Exception e) {

        e.printStackTrace();
    }

    return error;
}

From source file:tucil.dua.ai.TucilDuaAi.java

public static void fullTrainingSet() throws Exception {
    Classifier j48 = new J48();
    j48.buildClassifier(datas);//from  www  .ja  va2s .  c o m

    Evaluation eval = new Evaluation(datas);
    eval.evaluateModel(j48, datas);
    System.out.println("=====Run Information======");
    System.out.println("======Classifier Model======");
    System.out.println(j48.toString());
    System.out.println(eval.toSummaryString("====Stats======\n", false));
    System.out.println(eval.toClassDetailsString("====Detailed Result=====\n"));
    System.out.println(eval.toMatrixString("======Confusion Matrix======\n"));
}