Example usage for weka.core Instances classIndex

List of usage examples for weka.core Instances classIndex

Introduction

In this page you can find the example usage for weka.core Instances classIndex.

Prototype


publicint classIndex() 

Source Link

Document

Returns the class attribute's index.

Usage

From source file:meka.classifiers.multilabel.LabelTransformationClassifier.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);//from w  w  w  . jav  a  2s .  co m

    int L = D.classIndex();

    if (getDebug())
        System.out.print("transforming labels with size: " + L + " baseModel: "
                + m_Classifier.getClass().getName() + " ");

    Instances transformed_D = this.transformLabels(D);

    m_Classifier.buildClassifier(transformed_D);
}

From source file:meka.classifiers.multilabel.MajorityLabelset.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);/*  ww w .j  a v a  2  s  .  c  o  m*/

    int L = D.classIndex();
    this.prediction = new double[L];

    for (int i = 0; i < D.numInstances(); i++) {
        updateCount(D.instance(i), L);
    }

}

From source file:meka.classifiers.multilabel.Maniac.java

License:Open Source License

@Override
public Instance transformInstance(Instance x) throws Exception {

    Instances tmpInst = new Instances(x.dataset());

    tmpInst.delete();//from w w w .  j av a2  s.  c o m
    tmpInst.add(x);

    Instances features = this.extractPart(tmpInst, false);

    Instances pseudoLabels = new Instances(this.compressedTemplateInst);
    Instance tmpin = pseudoLabels.instance(0);
    pseudoLabels.delete();

    pseudoLabels.add(tmpin);

    for (int i = 0; i < pseudoLabels.classIndex(); i++) {
        pseudoLabels.instance(0).setMissing(i);
    }

    Instances newDataSet = Instances.mergeInstances(pseudoLabels, features);
    newDataSet.setClassIndex(pseudoLabels.numAttributes());

    return newDataSet.instance(0);
}

From source file:meka.classifiers.multilabel.MCC.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);/*from  www.  j a  va  2 s.c  om*/

    // Variables

    int L = D.classIndex();
    int N = D.numInstances();
    int d = D.numAttributes() - L;
    m_R = new Random(m_S);

    prepareChain(L);
    int s[] = retrieveChain();

    if (getDebug())
        System.out.println("s_[0] = " + Arrays.toString(s));

    // If we want to optimize the chain space ...
    if (m_Is > 0) {

        // Make CC
        CC h = CCUtils.buildCC(s, D, m_Classifier);

        if (getDebug())
            System.out.println("Optimising s ... (" + m_Is + " iterations):");

        double w = payoff(h, new Instances(D));
        if (getDebug())
            System.out.println("h_{t=" + 0 + "} := " + Arrays.toString(s)); //+"; w = "+w);

        for (int t = 0; t < m_Is; t++) {

            // propose a chain s' by swapping two elements in s
            int s_[] = Arrays.copyOf(A.swap(s, m_R), s.length);

            // build h'
            CC h_ = CCUtils.buildCC(s_, D, m_Classifier);

            // rate h'
            double w_ = payoff(h_, new Instances(D));

            // accept h' over h ? 
            if (w_ > w) {
                w = w_;
                s = s_;
                h = h_;
                if (getDebug())
                    System.out.println("h_{t=" + (t + 1) + "} := " + Arrays.toString(s)); //+"; w = "+w);
                //if (getDebug()) System.out.print("& "+Utils.doubleToString(likelihood(h_,new Instances(D),1),8,2));
                //if (getDebug()) System.out.print("& "+Utils.doubleToString(likelihood(h_,new Instances(D),2),8,2));
                //if (getDebug()) System.out.println("& "+Utils.doubleToString(likelihood(h_,new Instances(D),5),8,2));
            }
        }
    }
    if (getDebug())
        System.out.println("---");

    this.prepareChain(s);
    super.buildClassifier(D);
}

From source file:meka.classifiers.multilabel.meta.DeepML.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);/*from w  w w  .j  a v  a2  s  .c  o  m*/

    // Extract variables

    int L = D.classIndex();
    int d = D.numAttributes() - L;
    double X_[][] = MLUtils.getXfromD(D);

    // Pre Tune ===========================
    /*
    if (m_M < 0 || m_R < 0 || m_H < 0) {
       System.out.println("We will do some pre-tuning here ...");
       //BR h = new BR();
       //h.setClassifier(new SMO());
       String ops[] = RBMTools.tuneRBM((MultilabelClassifier)m_Classifier,D,m_R,m_M,m_H,m_E);
       System.out.println("got: "+Arrays.toString(ops));
       this.setOptions(ops);
    }
    */
    // ====================================

    // Build DBM
    dbm = createDBM(d);
    dbm.setSeed(m_Seed);

    dbm.setE(m_E);

    // Train RBM, get Z
    long before = System.currentTimeMillis();
    dbm.train(X_, L);
    rbm_time = System.currentTimeMillis() - before;
    double Z[][] = dbm.prob_Z(X_);
    if (getDebug()) {
        Matrix tW[] = dbm.getWs();
        System.out.println("X = \n" + MatrixUtils.toString(X_));
        System.out.println("W = \n" + MatrixUtils.toString(tW[0].getArray()));
        System.out.println("Y = \n" + MatrixUtils.toString(MLUtils.getYfromD(D), 0));
        System.out.println("Z = \n" + MatrixUtils.toString(MatrixUtils.threshold(Z, 0.5), 0));
        /*
        Instances newD = RBMTools.makeDataset(D,M.threshold(Z,0.5));
        System.out.println(""+newD);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(newD);
        saver.setFile(new File("newD.arff"));
        saver.writeBatch();
        System.exit(1);
        */
    }

    // Train Classifier
    m_InstancesTemplate = new Instances(MLUtils.replaceZasAttributes(D, Z, L)); // did not clear
    m_Classifier.buildClassifier(m_InstancesTemplate);
}

From source file:meka.classifiers.multilabel.meta.MBR.java

License:Open Source License

@Override
public void buildClassifier(Instances data) throws Exception {
    testCapabilities(data);/*  w w  w .jav a 2s .co  m*/

    int c = data.classIndex();

    // Base BR

    if (getDebug())
        System.out.println("Build BR Base (" + c + " models)");
    m_BASE = (BR) AbstractClassifier.forName(getClassifier().getClass().getName(),
            ((AbstractClassifier) getClassifier()).getOptions());
    m_BASE.buildClassifier(data);

    // Meta BR

    if (getDebug())
        System.out.println("Prepare Meta data           ");
    Instances meta_data = new Instances(data);

    FastVector BinaryClass = new FastVector(c);
    BinaryClass.addElement("0");
    BinaryClass.addElement("1");

    for (int i = 0; i < c; i++) {
        meta_data.insertAttributeAt(new Attribute("metaclass" + i, BinaryClass), c);
    }

    for (int i = 0; i < data.numInstances(); i++) {
        double cfn[] = m_BASE.distributionForInstance(data.instance(i));
        for (int a = 0; a < cfn.length; a++) {
            meta_data.instance(i).setValue(a + c, cfn[a]);
        }
    }

    meta_data.setClassIndex(c);
    m_InstancesTemplate = new Instances(meta_data, 0);

    if (getDebug())
        System.out.println("Build BR Meta (" + c + " models)");

    m_META = (BR) AbstractClassifier.forName(getClassifier().getClass().getName(),
            ((AbstractClassifier) getClassifier()).getOptions());
    m_META.buildClassifier(meta_data);
}

From source file:meka.classifiers.multilabel.meta.RandomSubspaceML.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);//from  ww w  .j  ava  2s.  com

    m_InstancesTemplates = new Instances[m_NumIterations];
    m_InstanceTemplates = new Instance[m_NumIterations];

    if (getDebug())
        System.out.println("-: Models: ");

    m_Classifiers = ProblemTransformationMethod.makeCopies((ProblemTransformationMethod) m_Classifier,
            m_NumIterations);

    Random r = new Random(m_Seed);

    int N_sub = (D.numInstances() * m_BagSizePercent / 100);

    int L = D.classIndex();
    int d = D.numAttributes() - L;
    int d_new = d * m_AttSizePercent / 100;
    m_IndicesCut = new int[m_NumIterations][];

    for (int i = 0; i < m_NumIterations; i++) {

        // Downsize the instance space (exactly like in EnsembleML.java)

        if (getDebug())
            System.out.print("\t" + (i + 1) + ": ");
        D.randomize(r);
        Instances D_cut = new Instances(D, 0, N_sub);
        if (getDebug())
            System.out.print("N=" + D.numInstances() + " -> N'=" + D_cut.numInstances() + ", ");

        // Downsize attribute space

        D_cut.setClassIndex(-1);
        int indices_a[] = A.make_sequence(L, d + L);
        A.shuffle(indices_a, r);
        indices_a = Arrays.copyOfRange(indices_a, 0, d - d_new);
        Arrays.sort(indices_a);
        m_IndicesCut[i] = A.invert(indices_a, D.numAttributes());
        D_cut = F.remove(D_cut, indices_a, false);
        D_cut.setClassIndex(L);
        if (getDebug())
            System.out.print(" A:=" + (D.numAttributes() - L) + " -> A'=" + (D_cut.numAttributes() - L) + " ("
                    + m_IndicesCut[i][L] + ",...," + m_IndicesCut[i][m_IndicesCut[i].length - 1] + ")");

        // Train multi-label classifier

        if (m_Classifiers[i] instanceof Randomizable)
            ((Randomizable) m_Classifiers[i]).setSeed(m_Seed + i);
        if (getDebug())
            System.out.println(".");

        m_Classifiers[i].buildClassifier(D_cut);
        m_InstanceTemplates[i] = D_cut.instance(1);
        m_InstancesTemplates[i] = new Instances(D_cut, 0);
    }
    if (getDebug())
        System.out.println(":-");
}

From source file:meka.classifiers.multilabel.meta.SubsetMapper.java

License:Open Source License

@Override
public void buildClassifier(Instances D) throws Exception {
    testCapabilities(D);//from  ww  w. j  a  va 2  s .co m

    for (int i = 0; i < D.numInstances(); i++) {
        m_Count.put(MLUtils.toBitString(D.instance(i), D.classIndex()), 0);
    }

    m_Classifier.buildClassifier(D);

}

From source file:meka.classifiers.multilabel.MLCBMaD.java

License:Open Source License

@Override
public Instance transformInstance(Instance x) throws Exception {
    Instances tmpInst = new Instances(x.dataset());

    tmpInst.delete();//from  ww w .  ja va 2s.c o  m
    tmpInst.add(x);

    Instances features = this.extractPart(tmpInst, false);

    Instances pseudoLabels = new Instances(this.compressedMatrix);
    Instance tmpin = pseudoLabels.instance(0);
    pseudoLabels.delete();

    pseudoLabels.add(tmpin);

    for (int i = 0; i < pseudoLabels.classIndex(); i++) {
        pseudoLabels.instance(0).setMissing(i);
    }

    Instances newDataSet = Instances.mergeInstances(pseudoLabels, features);
    newDataSet.setClassIndex(this.size);

    return newDataSet.instance(0);
}

From source file:meka.classifiers.multilabel.MULAN.java

License:Open Source License

@Override
public void buildClassifier(Instances instances) throws Exception {
    testCapabilities(instances);//from  w  w  w.j  a  va2 s .c  om

    long before = System.currentTimeMillis();
    if (getDebug())
        System.err.print(" moving target attributes to the beginning ... ");

    Random r = instances.getRandomNumberGenerator(0);
    String name = "temp_" + MLUtils.getDatasetName(instances) + "_" + r.nextLong() + ".arff";
    System.err.println("Using temporary file: " + name);
    int L = instances.classIndex();

    // rename attributes, because MULAN doesn't deal well with hypens etc
    for (int i = L; i < instances.numAttributes(); i++) {
        instances.renameAttribute(i, "a_" + i);
    }
    BufferedWriter writer = new BufferedWriter(new FileWriter(name));
    m_InstancesTemplate = F.meka2mulan(new Instances(instances), L);
    writer.write(m_InstancesTemplate.toString());
    writer.flush();
    writer.close();
    MultiLabelInstances train = new MultiLabelInstances(name, L);
    try {
        new File(name).delete();
    } catch (Exception e) {
        System.err.println(
                "[Error] Failed to delete temporary file: " + name + ". You may want to delete it manually.");
    }

    if (getDebug())
        System.out.println(" done ");
    long after = System.currentTimeMillis();

    System.err.println("[Note] Discount " + ((after - before) / 1000.0) + " seconds from this build time");

    m_InstancesTemplate = new Instances(train.getDataSet(), 0);

    System.out.println("CLASSIFIER " + m_Classifier);

    //m_InstancesTemplate.delete();
    if (m_MethodString.equals("BR"))
        m_MULAN = new BinaryRelevance(m_Classifier);
    else if (m_MethodString.equals("LP"))
        m_MULAN = new LabelPowerset(m_Classifier);
    else if (m_MethodString.equals("CLR"))
        m_MULAN = new CalibratedLabelRanking(m_Classifier);
    else if (m_MethodString.equals("RAkEL1")) {
        m_MULAN = new RAkEL(new LabelPowerset(m_Classifier), 10, L / 2);
        System.out.println("m=10,k=" + (L / 2));
    } else if (m_MethodString.equals("RAkEL2")) {
        m_MULAN = new RAkEL(new LabelPowerset(m_Classifier), 2 * L, 3);
        System.out.println("m=" + (L * 2) + ",k=3");
    } else if (m_MethodString.equals("MLkNN"))
        m_MULAN = new MLkNN(10, 1.0);
    else if (m_MethodString.equals("IBLR_ML"))
        m_MULAN = new IBLR_ML(10);
    else if (m_MethodString.equals("BPMLL")) { //BPMLL is run withthe number of hidden units equal to 20% of the input units.
        m_MULAN = new BPMLL();
        ((BPMLL) m_MULAN).setLearningRate(0.01);
        ((BPMLL) m_MULAN).setHiddenLayers(new int[] { 30 });
        ((BPMLL) m_MULAN).setTrainingEpochs(100);
    } else if (m_MethodString.startsWith("HOMER")) {
        //Class m = Class.forName("HierarchyBuilder.Method.Random");
        //Class w = Class.forName("mulan.classifier.LabelPowerset");
        //Constructor c = new h.getConstructor(new Class[]{MultiLabelLearner.class, Integer.TYPE, HierarchyBuilder.Method.class});
        //Object obj = h.newInstance();

        String ops[] = m_MethodString.split("\\.");

        // number of clusters
        int n = 3;
        try {
            n = Integer.parseInt(ops[2]);
        } catch (Exception e) {
            System.err.println("[Warning] Could not parse number of clusters, using default: " + n);
        }

        // learner
        // @TODO use reflection here
        MultiLabelLearner mll = new LabelPowerset(m_Classifier);
        if (ops[3].equalsIgnoreCase("BinaryRelevance")) {
            mll = new BinaryRelevance(m_Classifier);
        } else if (ops[3].equalsIgnoreCase("ClassifierChain")) {
            mll = new ClassifierChain(m_Classifier);
        } else if (ops[3].equalsIgnoreCase("LabelPowerset")) {
            // already set
        } else {
            System.err.println(
                    "[Warning] Did not recognise classifier type String, using default: LabelPowerset");
        }

        if (getDebug()) {
            System.out.println("HOMER(" + mll + "," + n + "," + ops[1] + ")");
        }

        m_MULAN = new HOMER(mll, n, HierarchyBuilder.Method.valueOf(ops[1]));
    } else
        throw new Exception("Could not find MULAN Classifier by that name: " + m_MethodString);

    m_MULAN.setDebug(getDebug());
    m_MULAN.build(train);
}