Example usage for weka.classifiers.meta FilteredClassifier buildClassifier

List of usage examples for weka.classifiers.meta FilteredClassifier buildClassifier

Introduction

In this page you can find the example usage for weka.classifiers.meta FilteredClassifier buildClassifier.

Prototype

public void buildClassifier(Instances data) throws Exception 

Source Link

Document

Build the classifier on the filtered data.

Usage

From source file:mulan.classifier.transformation.MultiLabelStacking.java

License:Open Source License

/**
 * Builds the base-level classifiers.//from  w w  w .  j av a 2 s  .co m
 * Their predictions are gathered in the baseLevelPredictions member
 * @param trainingSet 
 * @throws Exception
 */
public void buildBaseLevel(MultiLabelInstances trainingSet) throws Exception {
    train = new Instances(trainingSet.getDataSet());
    baseLevelData = new Instances[numLabels];
    baseLevelEnsemble = AbstractClassifier.makeCopies(baseClassifier, numLabels);
    if (normalize) {
        maxProb = new double[numLabels];
        minProb = new double[numLabels];
        Arrays.fill(minProb, 1);
    }
    // initialize the table holding the predictions of the first level
    // classifiers for each label for every instance of the training set
    baseLevelPredictions = new double[train.numInstances()][numLabels];

    for (int labelIndex = 0; labelIndex < numLabels; labelIndex++) {
        debug("Label: " + labelIndex);
        // transform the dataset according to the BR method
        baseLevelData[labelIndex] = BinaryRelevanceTransformation.transformInstances(train, labelIndices,
                labelIndices[labelIndex]);
        // attach indexes in order to keep track of the original positions
        baseLevelData[labelIndex] = new Instances(attachIndexes(baseLevelData[labelIndex]));
        // prepare the transformed dataset for stratified x-fold cv
        Random random = new Random(1);
        baseLevelData[labelIndex].randomize(random);
        baseLevelData[labelIndex].stratify(numFolds);
        debug("Creating meta-data");
        for (int j = 0; j < numFolds; j++) {
            debug("Label=" + labelIndex + ", Fold=" + j);
            Instances subtrain = baseLevelData[labelIndex].trainCV(numFolds, j, random);
            // create a filtered meta classifier, used to ignore
            // the index attribute in the build process
            // perform stratified x-fold cv and get predictions
            // for each class for every instance
            FilteredClassifier fil = new FilteredClassifier();
            fil.setClassifier(baseLevelEnsemble[labelIndex]);
            Remove remove = new Remove();
            remove.setAttributeIndices("first");
            remove.setInputFormat(subtrain);
            fil.setFilter(remove);
            fil.buildClassifier(subtrain);

            // Classify test instance
            Instances subtest = baseLevelData[labelIndex].testCV(numFolds, j);
            for (int i = 0; i < subtest.numInstances(); i++) {
                double distribution[] = new double[2];
                distribution = fil.distributionForInstance(subtest.instance(i));
                // Ensure correct predictions both for class values {0,1}
                // and {1,0}
                Attribute classAttribute = baseLevelData[labelIndex].classAttribute();
                baseLevelPredictions[(int) subtest.instance(i)
                        .value(0)][labelIndex] = distribution[classAttribute.indexOfValue("1")];
                if (normalize) {
                    if (distribution[classAttribute.indexOfValue("1")] > maxProb[labelIndex]) {
                        maxProb[labelIndex] = distribution[classAttribute.indexOfValue("1")];
                    }
                    if (distribution[classAttribute.indexOfValue("1")] < minProb[labelIndex]) {
                        minProb[labelIndex] = distribution[classAttribute.indexOfValue("1")];
                    }
                }
            }
        }
        // now we can detach the indexes from the first level datasets
        baseLevelData[labelIndex] = detachIndexes(baseLevelData[labelIndex]);

        debug("Building base classifier on full data");
        // build base classifier on the full training data
        baseLevelEnsemble[labelIndex].buildClassifier(baseLevelData[labelIndex]);
        baseLevelData[labelIndex].delete();
    }

    if (normalize) {
        normalizePredictions();
    }

}

From source file:nl.uva.sne.commons.ClusterUtils.java

private static FilteredClassifier buildModel(int[] indicesToRemove, int classIndex, Instances trainDataset,
        Classifier cl) throws Exception {
    FilteredClassifier model = new FilteredClassifier();
    model.setClassifier(AbstractClassifier.makeCopy(cl));
    Remove remove = new Remove();
    remove.setAttributeIndicesArray(indicesToRemove);
    remove.setInputFormat(trainDataset);
    remove.setInvertSelection(false);/*w  w w. j a v  a  2s  . c  om*/
    model.setFilter(remove);
    trainDataset.setClassIndex(classIndex);
    model.buildClassifier(trainDataset);
    //        int foldHash = trainDataset.toString().hashCode();
    //        String modelKey = createKey(indicesToRemove, foldHash);
    //        existingModels.put(modelKey, model);
    return model;
}

From source file:org.vimarsha.classifier.impl.FunctionWiseClassifier.java

License:Open Source License

/**
 * Classifies function wise test instances in the associated with the names labels mentioned in the arraylist passed as the argument.
 *
 * @param list - labels of instances contained in the test set that need to be classified.
 * @return TreeMap containing the instance labels and the associated classification results.
 * @throws ClassificationFailedException
 *///w  ww . j a v  a  2 s .  co m
@Override
public LinkedHashMap<String, String> classify(LinkedList<String> list) throws ClassificationFailedException {
    output = new LinkedHashMap<String, String>();
    J48 j48 = new J48();
    Remove rm = new Remove();
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);
        for (int i = 0; i < testSet.numInstances(); i++) {
            double pred = fc.classifyInstance(testSet.instance(i));
            if (list.isEmpty()) {
                output.put(String.valueOf(i + 1), testSet.classAttribute().value((int) pred));
            } else {
                output.put(list.get(i), testSet.classAttribute().value((int) pred));
            }
        }
    } catch (Exception ex) {
        throw new ClassificationFailedException();
    }
    return output;
}

From source file:org.vimarsha.classifier.impl.TimeslicedClassifier.java

License:Open Source License

/**
 * Classifies Timesliced test data instances.
 *
 * @return Resulting linked list with timelsiced classification results.
 * @throws ClassificationFailedException
 *///from  www .jav a2s  . com
@Override
public Object classify() throws ClassificationFailedException {
    output = new LinkedList<String>();
    J48 j48 = new J48();
    Remove rm = new Remove();
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);

        for (int i = 0; i < testSet.numInstances(); i++) {
            //System.out.println(testSet.instance(i));
            double pred = fc.classifyInstance(testSet.instance(i));
            output.add(testSet.classAttribute().value((int) pred));
        }
    } catch (Exception ex) {
        System.out.println(ex.toString());
        throw new ClassificationFailedException();
    }
    return output;
}

From source file:org.vimarsha.classifier.impl.WholeProgramClassifier.java

License:Open Source License

/**
 * Classifies whole program test instances,
 *
 * @return String containing the classification result of the evaluated program's dataset.
 * @throws ClassificationFailedException
 *///  ww w.j  av  a 2 s.co m
@Override
public Object classify() throws ClassificationFailedException {
    J48 j48 = new J48();
    Remove rm = new Remove();
    String output = null;
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);
        this.treeModel = j48.toString();
        double pred = fc.classifyInstance(testSet.instance(0));
        output = testSet.classAttribute().value((int) pred);
        classificationResult = output;
    } catch (Exception ex) {
        throw new ClassificationFailedException();
    }
    return output;
}

From source file:pl.nask.hsn2.service.analysis.JSWekaAnalyzer.java

License:Open Source License

private void createClassifier(String classifierName) {
    try {/*  w ww .  j  a v  a 2  s .co  m*/
        Classifier classifier = (Classifier) Class.forName(classifierName).newInstance();
        FilteredClassifier filteredClassifier = new FilteredClassifier();
        filteredClassifier.setClassifier(classifier);
        filteredClassifier.setFilter(new StringToWordVector());
        filteredClassifier.buildClassifier(trainingSet);
        fc = filteredClassifier;
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
    }
}