Example usage for weka.classifiers AbstractClassifier makeCopies

List of usage examples for weka.classifiers AbstractClassifier makeCopies

Introduction

In this page you can find the example usage for weka.classifiers AbstractClassifier makeCopies.

Prototype

public static Classifier[] makeCopies(Classifier model, int num) throws Exception 

Source Link

Document

Creates a given number of deep copies of the given classifier using serialization.

Usage

From source file:meka.classifiers.multilabel.BRq.java

License:Open Source License

@Override
public void buildClassifier(Instances data) throws Exception {
    testCapabilities(data);/*from   ww  w.j  a va 2  s .co  m*/

    int c = data.classIndex();

    if (getDebug())
        System.out.print("-: Creating " + c + " models (" + m_Classifier.getClass().getName() + "): ");
    m_MultiClassifiers = AbstractClassifier.makeCopies(m_Classifier, c);

    Instances sub_data = null;

    for (int i = 0; i < c; i++) {

        int indices[][] = new int[c][c - 1];
        for (int j = 0, k = 0; j < c; j++) {
            if (j != i) {
                indices[i][k++] = j;
            }
        }

        //Select only class attribute 'i'
        Remove FilterRemove = new Remove();
        FilterRemove.setAttributeIndicesArray(indices[i]);
        FilterRemove.setInputFormat(data);
        FilterRemove.setInvertSelection(true);
        sub_data = Filter.useFilter(data, FilterRemove);
        sub_data.setClassIndex(0);
        /* BEGIN downsample for this link */
        sub_data.randomize(m_Random);
        int numToRemove = sub_data.numInstances()
                - (int) Math.round(sub_data.numInstances() * m_DownSampleRatio);
        for (int m = 0, removed = 0; m < sub_data.numInstances(); m++) {
            if (sub_data.instance(m).classValue() <= 0.0) {
                sub_data.instance(m).setClassMissing();
                if (++removed >= numToRemove)
                    break;
            }
        }
        sub_data.deleteWithMissingClass();
        /* END downsample for this link */

        //Build the classifier for that class
        m_MultiClassifiers[i].buildClassifier(sub_data);
        if (getDebug())
            System.out.print(" " + (i + 1));

    }

    if (getDebug())
        System.out.println(" :-");

    m_InstancesTemplate = new Instances(sub_data, 0);

}

From source file:meka.classifiers.multilabel.HASEL.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {

    int L = D.classIndex();
    int N = D.numInstances();

    // Get partition from dataset hierarchy
    kMap = SuperLabelUtils.getPartitionFromDatasetHierarchy(D);
    m_M = kMap.length;//from  w w w  .  ja v a 2 s .c o m
    m_Classifiers = AbstractClassifier.makeCopies(m_Classifier, m_M);
    m_InstancesTemplates = new Instances[m_M];

    for (int i = 0; i < m_M; i++) {

        if (getDebug())
            System.out.println("Building model " + (i + 1) + "/" + m_M + ": " + Arrays.toString(kMap[i]));
        Instances D_i = SuperLabelUtils.makePartitionDataset(D, kMap[i]);
        m_Classifiers[i].buildClassifier(D_i);
        m_InstancesTemplates[i] = new Instances(D_i, 0);
    }

}

From source file:meka.classifiers.multilabel.RAkEL.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);//from  w ww .  j a v  a 2s .c  o m

    int L = D.classIndex();
    Random r = new Random(m_S);

    if (getDebug())
        System.out.println("Building " + m_M + " models of " + m_K + " random subsets:");

    m_InstancesTemplates = new Instances[m_M];
    kMap = new int[m_M][m_K];
    m_Classifiers = AbstractClassifier.makeCopies(m_Classifier, m_M);
    for (int i = 0; i < m_M; i++) {
        kMap[i] = SuperLabelUtils.get_k_subset(L, m_K, r);
        if (getDebug())
            System.out.println("\tmodel " + (i + 1) + "/" + m_M + ": " + Arrays.toString(kMap[i]) + ", P=" + m_P
                    + ", N=" + m_N);
        Instances D_i = SuperLabelUtils.makePartitionDataset(D, kMap[i], m_P, m_N);
        m_Classifiers[i].buildClassifier(D_i);
        m_InstancesTemplates[i] = new Instances(D_i, 0);
    }
}

From source file:meka.classifiers.multilabel.RAkELd.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {

    int L = D.classIndex();
    int N = D.numInstances();
    Random r = new Random(m_S);

    // Note: a slightly round-about way of doing it:
    int num = (int) Math.ceil(L / m_K);
    kMap = SuperLabelUtils.generatePartition(A.make_sequence(L), num, r, true);
    m_M = kMap.length;//from  www. jav  a  2  s  .  co  m
    m_Classifiers = AbstractClassifier.makeCopies(m_Classifier, m_M);
    m_InstancesTemplates = new Instances[m_M];

    if (getDebug())
        System.out.println("Building " + m_M + " models of " + m_K + " partitions:");

    for (int i = 0; i < m_M; i++) {

        if (getDebug())
            System.out.println("\tpartitioning model " + (i + 1) + "/" + m_M + ": " + Arrays.toString(kMap[i])
                    + ", P=" + m_P + ", N=" + m_N);
        Instances D_i = SuperLabelUtils.makePartitionDataset(D, kMap[i], m_P, m_N);
        if (getDebug())
            System.out.println("\tbuilding model " + (i + 1) + "/" + m_M + ": " + Arrays.toString(kMap[i]));

        m_Classifiers[i].buildClassifier(D_i);
        m_InstancesTemplates[i] = new Instances(D_i, 0);

    }

}

From source file:meka.classifiers.multitarget.RAkELd.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    /*/*from w  ww  . ja va 2  s .co m*/
    This is a slow way of doing things at the moment, making use of multitarget.SCC functionality,
    even though multilabel.RAkELd is not a meta multi-label classifier.
     */

    int L = D.classIndex();
    int N = D.numInstances();
    Random r = new Random(m_S);

    // Note: a slightly round-about way of doing it:
    int num = (int) Math.ceil(L / m_K);
    kMap = SuperLabelUtils.generatePartition(A.make_sequence(L), num, r, true);
    m_M = kMap.length;
    vMap = new int[m_M][][];
    m_Classifiers = AbstractClassifier.makeCopies(m_Classifier, m_M);
    m_InstancesTemplates = new Instances[m_M];

    if (getDebug())
        System.out.println("Building " + m_M + " models of " + m_K + " partitions:");

    D = SuperLabelUtils.SLTransformation(D, kMap, m_P, m_N);

    for (int i = 0; i < m_M; i++) {

        /*
        if (getDebug()) 
           System.out.println("\tpartitioning model "+(i+1)+"/"+m_M+": "+Arrays.toString(kMap[i])+", P="+m_P+", N="+m_N);
                
        Instances D_i = SuperLabelUtils.makePartitionDataset(D,kMap[i],m_P,m_N);
        */

        Instances D_i = F.keepLabels(D, D.classIndex(), new int[] { i });
        D_i.setClassIndex(0);

        //vMap[i] = SuperLabelUtils.extractValues(D_i);

        if (getDebug())
            System.out.println("\tbuilding model " + (i + 1) + "/" + m_M + ": " + Arrays.toString(kMap[i]));

        m_Classifiers[i].buildClassifier(D_i);
        m_InstancesTemplates[i] = new Instances(D_i, 0);

    }

}

From source file:mulan.classifier.transformation.CalibratedLabelRanking.java

License:Open Source License

@Override
protected void buildInternal(MultiLabelInstances trainingSet) throws Exception {
    // Virtual label models
    debug("Building calibration label models");
    System.out.println("Building calibration label models");
    virtualLabelModels = new BinaryRelevance(getBaseClassifier());
    virtualLabelModels.setDebug(getDebug());
    virtualLabelModels.build(trainingSet);

    // One-vs-one models
    numModels = ((numLabels) * (numLabels - 1)) / 2;
    oneVsOneModels = AbstractClassifier.makeCopies(getBaseClassifier(), numModels);
    nodata = new boolean[numModels];
    metaDataTest = new Instances[numModels];

    Instances trainingData = trainingSet.getDataSet();

    int counter = 0;
    // Creation of one-vs-one models
    for (int label1 = 0; label1 < numLabels - 1; label1++) {
        // Attribute of label 1
        Attribute attrLabel1 = trainingData.attribute(labelIndices[label1]);
        for (int label2 = label1 + 1; label2 < numLabels; label2++) {
            debug("Building one-vs-one model " + (counter + 1) + "/" + numModels);
            System.out.println("Building one-vs-one model " + (counter + 1) + "/" + numModels);
            // Attribute of label 2
            Attribute attrLabel2 = trainingData.attribute(labelIndices[label2]);

            // initialize training set
            Instances dataOneVsOne = new Instances(trainingData, 0);
            // filter out examples with no preference
            for (int i = 0; i < trainingData.numInstances(); i++) {
                Instance tempInstance;/*w  w w. jav  a 2s.c  o  m*/
                if (trainingData.instance(i) instanceof SparseInstance) {
                    tempInstance = new SparseInstance(trainingData.instance(i));
                } else {
                    tempInstance = new DenseInstance(trainingData.instance(i));
                }

                int nominalValueIndex;
                nominalValueIndex = (int) tempInstance.value(labelIndices[label1]);
                String value1 = attrLabel1.value(nominalValueIndex);
                nominalValueIndex = (int) tempInstance.value(labelIndices[label2]);
                String value2 = attrLabel2.value(nominalValueIndex);

                if (!value1.equals(value2)) {
                    tempInstance.setValue(attrLabel1, value1);
                    dataOneVsOne.add(tempInstance);
                }
            }

            // remove all labels apart from label1 and place it at the end
            Reorder filter = new Reorder();
            int numPredictors = trainingData.numAttributes() - numLabels;
            int[] reorderedIndices = new int[numPredictors + 1];
            for (int i = 0; i < numPredictors; i++) {
                reorderedIndices[i] = featureIndices[i];
            }
            reorderedIndices[numPredictors] = labelIndices[label1];
            filter.setAttributeIndicesArray(reorderedIndices);
            filter.setInputFormat(dataOneVsOne);
            dataOneVsOne = Filter.useFilter(dataOneVsOne, filter);
            //System.out.println(dataOneVsOne.toString());
            dataOneVsOne.setClassIndex(numPredictors);

            // build model label1 vs label2
            if (dataOneVsOne.size() > 0) {
                oneVsOneModels[counter].buildClassifier(dataOneVsOne);
            } else {
                nodata[counter] = true;
            }
            dataOneVsOne.delete();
            metaDataTest[counter] = dataOneVsOne;
            counter++;
        }
    }
}

From source file:mulan.classifier.transformation.MultiLabelStacking.java

License:Open Source License

/**
 * Initializes all the parameters used in the meta-level.
 * Calculates the correlated labels if meta-level pruning is applied.
 *
 * @param dataSet//from  ww  w  .  ja  va  2  s . c o  m
 * @param metaClassifier
 * @param includeAttrs
 * @param metaPercentage
 * @param eval
 * @throws Exception
 */
public void initializeMetaLevel(MultiLabelInstances dataSet, Classifier metaClassifier, boolean includeAttrs,
        double metaPercentage, ASEvaluation eval) throws Exception {
    this.metaClassifier = metaClassifier;
    metaLevelEnsemble = AbstractClassifier.makeCopies(metaClassifier, numLabels);
    metaLevelData = new Instances[numLabels];
    metaLevelFilteredEnsemble = new FilteredClassifier[numLabels];
    this.includeAttrs = includeAttrs;
    // calculate the number of correlated labels that corresponds to the
    // given percentage
    topkCorrelated = (int) Math.floor(metaPercentage * numLabels);
    if (topkCorrelated < 1) {
        debug("Too small percentage, selecting k=1");
        topkCorrelated = 1;
    }
    if (topkCorrelated < numLabels) {// pruning should be applied
        selectedAttributes = new int[numLabels][];
        if (eval == null) {// calculate the PhiCoefficient
            Statistics phi = new Statistics();
            phi.calculatePhi(dataSet);
            for (int i = 0; i < numLabels; i++) {
                selectedAttributes[i] = phi.topPhiCorrelatedLabels(i, topkCorrelated);
            }
        } else {// apply feature selection
            AttributeSelection attsel = new AttributeSelection();
            Ranker rankingMethod = new Ranker();
            rankingMethod.setNumToSelect(topkCorrelated);
            attsel.setEvaluator(eval);
            attsel.setSearch(rankingMethod);
            // create a dataset consisting of all the classes of each
            // instance plus the class we want to select attributes from
            for (int i = 0; i < numLabels; i++) {
                ArrayList<Attribute> attributes = new ArrayList<Attribute>();

                for (int j = 0; j < numLabels; j++) {
                    attributes.add(train.attribute(labelIndices[j]));
                }
                attributes.add(train.attribute(labelIndices[i]).copy("meta"));

                Instances iporesult = new Instances("Meta format", attributes, 0);
                iporesult.setClassIndex(numLabels);
                for (int k = 0; k < train.numInstances(); k++) {
                    double[] values = new double[numLabels + 1];
                    for (int m = 0; m < numLabels; m++) {
                        values[m] = Double.parseDouble(train.attribute(labelIndices[m])
                                .value((int) train.instance(k).value(labelIndices[m])));
                    }
                    values[numLabels] = Double.parseDouble(train.attribute(labelIndices[i])
                            .value((int) train.instance(k).value(labelIndices[i])));
                    Instance metaInstance = DataUtils.createInstance(train.instance(k), 1, values);
                    metaInstance.setDataset(iporesult);
                    iporesult.add(metaInstance);
                }
                attsel.SelectAttributes(iporesult);
                selectedAttributes[i] = attsel.selectedAttributes();
                iporesult.delete();
            }
        }
    }
}

From source file:mulan.classifier.transformation.MultiLabelStacking.java

License:Open Source License

/**
 * Builds the base-level classifiers.//from ww w.  j  a v  a  2s .c o m
 * Their predictions are gathered in the baseLevelPredictions member
 * @param trainingSet 
 * @throws Exception
 */
public void buildBaseLevel(MultiLabelInstances trainingSet) throws Exception {
    train = new Instances(trainingSet.getDataSet());
    baseLevelData = new Instances[numLabels];
    baseLevelEnsemble = AbstractClassifier.makeCopies(baseClassifier, numLabels);
    if (normalize) {
        maxProb = new double[numLabels];
        minProb = new double[numLabels];
        Arrays.fill(minProb, 1);
    }
    // initialize the table holding the predictions of the first level
    // classifiers for each label for every instance of the training set
    baseLevelPredictions = new double[train.numInstances()][numLabels];

    for (int labelIndex = 0; labelIndex < numLabels; labelIndex++) {
        debug("Label: " + labelIndex);
        // transform the dataset according to the BR method
        baseLevelData[labelIndex] = BinaryRelevanceTransformation.transformInstances(train, labelIndices,
                labelIndices[labelIndex]);
        // attach indexes in order to keep track of the original positions
        baseLevelData[labelIndex] = new Instances(attachIndexes(baseLevelData[labelIndex]));
        // prepare the transformed dataset for stratified x-fold cv
        Random random = new Random(1);
        baseLevelData[labelIndex].randomize(random);
        baseLevelData[labelIndex].stratify(numFolds);
        debug("Creating meta-data");
        for (int j = 0; j < numFolds; j++) {
            debug("Label=" + labelIndex + ", Fold=" + j);
            Instances subtrain = baseLevelData[labelIndex].trainCV(numFolds, j, random);
            // create a filtered meta classifier, used to ignore
            // the index attribute in the build process
            // perform stratified x-fold cv and get predictions
            // for each class for every instance
            FilteredClassifier fil = new FilteredClassifier();
            fil.setClassifier(baseLevelEnsemble[labelIndex]);
            Remove remove = new Remove();
            remove.setAttributeIndices("first");
            remove.setInputFormat(subtrain);
            fil.setFilter(remove);
            fil.buildClassifier(subtrain);

            // Classify test instance
            Instances subtest = baseLevelData[labelIndex].testCV(numFolds, j);
            for (int i = 0; i < subtest.numInstances(); i++) {
                double distribution[] = new double[2];
                distribution = fil.distributionForInstance(subtest.instance(i));
                // Ensure correct predictions both for class values {0,1}
                // and {1,0}
                Attribute classAttribute = baseLevelData[labelIndex].classAttribute();
                baseLevelPredictions[(int) subtest.instance(i)
                        .value(0)][labelIndex] = distribution[classAttribute.indexOfValue("1")];
                if (normalize) {
                    if (distribution[classAttribute.indexOfValue("1")] > maxProb[labelIndex]) {
                        maxProb[labelIndex] = distribution[classAttribute.indexOfValue("1")];
                    }
                    if (distribution[classAttribute.indexOfValue("1")] < minProb[labelIndex]) {
                        minProb[labelIndex] = distribution[classAttribute.indexOfValue("1")];
                    }
                }
            }
        }
        // now we can detach the indexes from the first level datasets
        baseLevelData[labelIndex] = detachIndexes(baseLevelData[labelIndex]);

        debug("Building base classifier on full data");
        // build base classifier on the full training data
        baseLevelEnsemble[labelIndex].buildClassifier(baseLevelData[labelIndex]);
        baseLevelData[labelIndex].delete();
    }

    if (normalize) {
        normalizePredictions();
    }

}

From source file:mulan.classifier.transformation.MultiLabelStacking.java

License:Open Source License

/**
 * Sets the type of the meta classifier and initializes the ensemble
 *
 * @param metaClassifier// w w  w.  java2 s  . com
 * @throws Exception
 */
public void setMetaAlgorithm(Classifier metaClassifier) throws Exception {
    this.metaClassifier = metaClassifier;
    metaLevelEnsemble = AbstractClassifier.makeCopies(metaClassifier, numLabels);
}

From source file:mulan.classifier.transformation.Pairwise.java

License:Open Source License

@Override
protected void buildInternal(MultiLabelInstances train) throws Exception {
    numModels = ((numLabels) * (numLabels - 1)) / 2;
    oneVsOneModels = AbstractClassifier.makeCopies(getBaseClassifier(), numModels);
    nodata = new boolean[numModels];

    debug("preparing shell");
    pt = new PairwiseTransformation(train);

    int counter = 0;
    // Creation of one-vs-one models
    for (int label1 = 0; label1 < numLabels - 1; label1++) {
        for (int label2 = label1 + 1; label2 < numLabels; label2++) {
            debug("Building one-vs-one model " + (counter + 1) + "/" + numModels);
            // initialize training set
            Instances dataOneVsOne = pt.transformInstances(label1, label2);

            // build model label1 vs label2
            if (dataOneVsOne.size() > 0) {
                oneVsOneModels[counter].buildClassifier(dataOneVsOne);
            } else {
                nodata[counter] = true;/*w w w. j  a va2  s  .c  o  m*/
            }
            counter++;
        }
    }
}