Example usage for weka.core Instance deleteAttributeAt

List of usage examples for weka.core Instance deleteAttributeAt

Introduction

In this page you can find the example usage for weka.core Instance deleteAttributeAt.

Prototype

public void deleteAttributeAt(int position);

Source Link

Document

Deletes an attribute at the given position (0 to numAttributes() - 1).

Usage

From source file:machinelearningcw.RandomLinearPerceptron.java

@Override
public double[] distributionForInstance(Instance instnc) throws Exception {
    Instance newInstance;
    double classify[] = new double[2];
    // /*w w  w.j  ava  2  s  .  c  o  m*/
    for (int n = 0; n < instnc.numAttributes(); n++) {

        // inst.setValue(n, instnc.value(n));
    }
    for (int i = 0; i < ensemble.length; i++) {
        newInstance = new DenseInstance(instnc);
        //sort in reverse order to stop out of bounds exception
        Arrays.sort(attributesdeleted[i], Collections.reverseOrder());
        for (int j = 0; j < attributesdeleted[i].length; j++) {
            /* System.out.println("ATTEMPTING TO DELETE: 
              "+attributesdeleted[i][j]);*/
            //  System.out.println(newInstance.numAttributes()-1);

            //delete the attributes deleted in the buildclassify method
            newInstance.deleteAttributeAt(attributesdeleted[i][j]);

        }

        //add up all the predictions classified in the ensemble
        double result = ensemble[i].classifyInstance(newInstance);
        if (result == 0) {
            classify[0] += 1;
        } else {
            classify[1] += 1;
        }
    }
    //   System.out.println("0: "+ classify[0]+" 1: "+classify[1]);
    return classify;
}

From source file:meka.classifiers.multilabel.cc.CNode.java

License:Open Source License

/**
 * Transform - turn [y1,y2,y3,x1,x2] into [y1,y2,x1,x2].
 * @return transformed Instance/*  w  ww. ja v  a  2s  . co  m*/
 */
public Instance transform(Instance x, double ypred[]) throws Exception {
    x = (Instance) x.copy();
    int L = x.classIndex();
    int L_c = (paY.length + 1);
    x.setDataset(null);
    for (int j = 0; j < (L - L_c); j++) {
        x.deleteAttributeAt(0);
    }
    for (int pa : paY) {
        //System.out.println("x_["+map[pa]+"] <- "+ypred[pa]);
        x.setValue(map[pa], ypred[pa]);
    }
    x.setDataset(T);
    x.setClassMissing();
    return x;
}

From source file:meka.core.CCUtils.java

License:Open Source License

/**
 * LinkTransform - prepare 'x' for testing at a node 'j' of the chain, by excluding 'exl'.
 * @param   x      instance/* www  . j a v a  2  s .  com*/
 * @param   excl   indices of labels which are NOT parents of j
 * @param   _D      the dataset template to use
 * @return   the transformed instance
 */
public static Instance linkTransformation(Instance x, int excl[], Instances _D) {
    // copy
    Instance copy = (Instance) x.copy();
    copy.setDataset(null);

    // delete attributes we don't need
    for (int i = excl.length - 1; i >= 0; i--) {
        copy.deleteAttributeAt(excl[i]);
    }

    //set template
    copy.setDataset(_D);

    return copy;
}

From source file:meka.core.MLUtils.java

License:Open Source License

/**
 * Delete attributes from an instance 'x' indexed by 'indicesToRemove[]'.
 * @param   x               instance/*from   ww  w . ja v a  2  s .  c om*/
 * @param   indicesToRemove      array of attribute indices
 * @return   the modified dataset
 */
public static final Instance deleteAttributesAt(Instance x, int indicesToRemove[]) {//, boolean keep) {
    Arrays.sort(indicesToRemove);
    for (int j = indicesToRemove.length - 1; j >= 0; j--) {
        x.deleteAttributeAt(indicesToRemove[j]);
    }
    return x;
}

From source file:meka.core.MLUtils.java

License:Open Source License

public static final Instance setTemplate(Instance x, Instances instancesTemplate) {
    int L = x.classIndex();
    int L_t = instancesTemplate.classIndex();
    x = (Instance) x.copy();//w  ww  .j a  v a  2  s  .c o m
    x.setDataset(null);
    for (int i = L_t; i < L; i++)
        x.deleteAttributeAt(0);
    x.setDataset(instancesTemplate);
    return x;
}

From source file:meka.core.PSUtils.java

License:Open Source License

/**
 * Convert a multi-label instance into a multi-class instance, according to a template.
 *//*w ww .j av  a  2s .  c om*/
public static Instance convertInstance(Instance x, int L, Instances template) {
    Instance x_ = (Instance) x.copy();
    x_.setDataset(null);
    for (int i = 0; i < L; i++)
        x_.deleteAttributeAt(0);
    x_.insertAttributeAt(0);
    x_.setDataset(template);
    return x_;
}

From source file:milk.classifiers.MIBoost.java

License:Open Source License

/**
  * Builds the classifier/*  www  . ja v  a  2 s . c om*/
  *
  * @param train the training data to be used for generating the
  * boosted classifier.
  * @exception Exception if the classifier could not be built successfully
  */
 public void buildClassifier(Exemplars exps) throws Exception {

     Exemplars train = new Exemplars(exps);

     if (train.classAttribute().type() != Attribute.NOMINAL) {
         throw new Exception("Class attribute must be nominal.");
     }
     if (train.checkForStringAttributes()) {
         throw new Exception("Can't handle string attributes!");
     }

     m_ClassIndex = train.classIndex();
     m_IdIndex = train.idIndex();
     m_NumClasses = train.numClasses();
     m_NumIterations = m_MaxIterations;

     if (m_NumClasses > 2) {
         throw new Exception("Not yet prepared to deal with multiple classes!");
     }

     if (m_Classifier == null)
         throw new Exception("A base classifier has not been specified!");
     if (!(m_Classifier instanceof WeightedInstancesHandler))
         throw new Exception("Base classifier cannot handle weighted instances!");

     m_Models = Classifier.makeCopies(m_Classifier, getMaxIterations());
     if (m_Debug)
         System.err.println("Base classifier: " + m_Classifier.getClass().getName());

     m_Beta = new double[m_NumIterations];
     m_Attributes = new Instances(train.exemplar(0).getInstances(), 0);

     double N = (double) train.numExemplars(), sumNi = 0;
     Instances data = new Instances(m_Attributes, 0);// Data to learn a model   
     data.deleteAttributeAt(m_IdIndex);// ID attribute useless   
     Instances dataset = new Instances(data, 0);

     // Initialize weights
     for (int i = 0; i < N; i++)
         sumNi += train.exemplar(i).getInstances().numInstances();

     for (int i = 0; i < N; i++) {
         Exemplar exi = train.exemplar(i);
         exi.setWeight(sumNi / N);
         Instances insts = exi.getInstances();
         double ni = (double) insts.numInstances();
         for (int j = 0; j < ni; j++) {
             Instance ins = new Instance(insts.instance(j));// Copy
             //insts.instance(j).setWeight(1.0);   

             ins.deleteAttributeAt(m_IdIndex);
             ins.setDataset(dataset);
             ins.setWeight(exi.weight() / ni);
             data.add(ins);
         }
     }

     // Assume the order of the instances are preserved in the Discretize filter
     if (m_DiscretizeBin > 0) {
         m_Filter = new Discretize();
         m_Filter.setInputFormat(new Instances(data, 0));
         m_Filter.setBins(m_DiscretizeBin);
         data = Filter.useFilter(data, m_Filter);
     }

     // Main algorithm
     int dataIdx;
     iterations: for (int m = 0; m < m_MaxIterations; m++) {
         if (m_Debug)
             System.err.println("\nIteration " + m);
         // Build a model
         m_Models[m].buildClassifier(data);

         // Prediction of each bag
         double[] err = new double[(int) N], weights = new double[(int) N];
         boolean perfect = true, tooWrong = true;
         dataIdx = 0;
         for (int n = 0; n < N; n++) {
             Exemplar exn = train.exemplar(n);
             // Prediction of each instance and the predicted class distribution
             // of the bag      
             double nn = (double) exn.getInstances().numInstances();
             for (int p = 0; p < nn; p++) {
                 Instance testIns = data.instance(dataIdx++);
                 if ((int) m_Models[m].classifyInstance(testIns) != (int) exn.classValue()) // Weighted instance-wise 0-1 errors
                     err[n]++;
             }
             weights[n] = exn.weight();
             err[n] /= nn;
             if (err[n] > 0.5)
                 perfect = false;
             if (err[n] < 0.5)
                 tooWrong = false;
         }

         if (perfect || tooWrong) { // No or 100% classification error, cannot find beta
             if (m == 0)
                 m_Beta[m] = 1.0;
             else
                 m_Beta[m] = 0;
             m_NumIterations = m + 1;
             if (m_Debug)
                 System.err.println("No errors");
             break iterations;
         }

         double[] x = new double[1];
         x[0] = 0;
         double[][] b = new double[2][x.length];
         b[0][0] = Double.NaN;
         b[1][0] = Double.NaN;

         OptEng opt = new OptEng();
         opt.setWeights(weights);
         opt.setErrs(err);
         //opt.setDebug(m_Debug);
         if (m_Debug)
             System.out.println("Start searching for c... ");
         x = opt.findArgmin(x, b);
         while (x == null) {
             x = opt.getVarbValues();
             if (m_Debug)
                 System.out.println("200 iterations finished, not enough!");
             x = opt.findArgmin(x, b);
         }
         if (m_Debug)
             System.out.println("Finished.");
         m_Beta[m] = x[0];

         if (m_Debug)
             System.err.println("c = " + m_Beta[m]);

         // Stop if error too small or error too big and ignore this model
         if (Double.isInfinite(m_Beta[m]) || Utils.smOrEq(m_Beta[m], 0)) {
             if (m == 0)
                 m_Beta[m] = 1.0;
             else
                 m_Beta[m] = 0;
             m_NumIterations = m + 1;
             if (m_Debug)
                 System.err.println("Errors out of range!");
             break iterations;
         }

         // Update weights of data and class label of wfData
         dataIdx = 0;
         double totWeights = 0;
         for (int r = 0; r < N; r++) {
             Exemplar exr = train.exemplar(r);
             exr.setWeight(weights[r] * Math.exp(m_Beta[m] * (2.0 * err[r] - 1.0)));
             totWeights += exr.weight();
         }

         if (m_Debug)
             System.err.println("Total weights = " + totWeights);

         for (int r = 0; r < N; r++) {
             Exemplar exr = train.exemplar(r);
             double num = (double) exr.getInstances().numInstances();
             exr.setWeight(sumNi * exr.weight() / totWeights);
             //if(m_Debug)
             //    System.err.print("\nExemplar "+r+"="+exr.weight()+": \t");
             for (int s = 0; s < num; s++) {
                 Instance inss = data.instance(dataIdx);
                 inss.setWeight(exr.weight() / num);
                 //    if(m_Debug)
                 //  System.err.print("instance "+s+"="+inss.weight()+
                 //          "|ew*iw*sumNi="+data.instance(dataIdx).weight()+"\t");
                 if (Double.isNaN(inss.weight()))
                     throw new Exception("instance " + s + " in bag " + r + " has weight NaN!");
                 dataIdx++;
             }
             //if(m_Debug)
             //    System.err.println();
         }
     }
 }

From source file:milk.classifiers.MIWrapper.java

License:Open Source License

public Instances transform(Exemplars train) throws Exception {

     Instances data = new Instances(m_Attributes);// Data to learn a model   
     data.deleteAttributeAt(m_IdIndex);// ID attribute useless   
     Instances dataset = new Instances(data, 0);
     double sumNi = 0, // Total number of instances
             N = train.numExemplars(); // Number of exemplars

     for (int i = 0; i < N; i++)
         sumNi += train.exemplar(i).getInstances().numInstances();

     // Initialize weights
     for (int i = 0; i < N; i++) {
         Exemplar exi = train.exemplar(i);
         // m_Prior[(int)exi.classValue()]++;
         Instances insts = exi.getInstances();
         double ni = (double) insts.numInstances();
         for (int j = 0; j < ni; j++) {
             Instance ins = new Instance(insts.instance(j));// Copy      
             ins.deleteAttributeAt(m_IdIndex);
             ins.setDataset(dataset);//from  w w w  .  j  a v a 2 s .  c  o  m
             ins.setWeight(sumNi / (N * ni));
             //ins.setWeight(1);
             data.add(ins);
         }
     }

     return data;
 }

From source file:moa.core.utils.Converter.java

License:Open Source License

public Instance formatInstance(Instance original) {

    //Copy the original instance
    Instance converted = (Instance) original.copy();
    converted.setDataset(null);//w ww . j a  va2 s  .com

    //Delete all class attributes
    for (int j = 0; j < m_L; j++) {
        converted.deleteAttributeAt(0);
    }

    //Add one of those class attributes at the begginning
    converted.insertAttributeAt(0);

    //Hopefully setting the dataset will configure that attribute properly
    converted.setDataset(m_InstancesTemplate);

    return converted;

}

From source file:moa.gui.BatchCmd.java

License:Apache License

public void run() {
    ArrayList<DataPoint> pointBuffer0 = new ArrayList<DataPoint>();
    int m_timestamp = 0;
    int decayHorizon = stream.getDecayHorizon();

    double decay_threshold = stream.getDecayThreshold();
    double decay_rate = (-1 * Math.log(decay_threshold) / decayHorizon);

    int counter = decayHorizon;

    while (m_timestamp < totalInstances && stream.hasMoreInstances()) {
        m_timestamp++;//from   ww w  .  j  a  v  a  2 s  .  com
        counter--;
        Instance next = stream.nextInstance();
        DataPoint point0 = new DataPoint(next, m_timestamp);
        pointBuffer0.add(point0);

        Instance traininst0 = new DenseInstance(point0);
        if (clusterer instanceof ClusterGenerator)
            traininst0.setDataset(point0.dataset());
        else
            traininst0.deleteAttributeAt(point0.classIndex());

        clusterer.trainOnInstanceImpl(traininst0);

        if (counter <= 0) {
            //                if(m_timestamp%(totalInstances/10) == 0)
            //                    System.out.println("Thread"+threadID+":"+(m_timestamp*100/totalInstances)+"% ");
            for (DataPoint p : pointBuffer0)
                p.updateWeight(m_timestamp, decay_rate);

            Clustering gtClustering0;
            Clustering clustering0 = null;

            gtClustering0 = new Clustering(pointBuffer0);
            if (useMicroGT && stream instanceof RandomRBFGeneratorEvents) {
                gtClustering0 = ((RandomRBFGeneratorEvents) stream).getMicroClustering();
            }

            clustering0 = clusterer.getClusteringResult();
            if (clusterer.implementsMicroClusterer()) {
                if (clusterer instanceof ClusterGenerator && stream instanceof RandomRBFGeneratorEvents) {
                    ((ClusterGenerator) clusterer)
                            .setSourceClustering(((RandomRBFGeneratorEvents) stream).getMicroClustering());
                }
                Clustering microC = clusterer.getMicroClusteringResult();
                if (clusterer.evaluateMicroClusteringOption.isSet()) {
                    clustering0 = microC;
                } else {
                    if (clustering0 == null && microC != null)
                        clustering0 = moa.clusterers.KMeans.gaussianMeans(gtClustering0, microC);
                }
            }

            //evaluate
            for (int i = 0; i < measures.length; i++) {
                try {
                    /*double sec =*/ measures[i].evaluateClusteringPerformance(clustering0, gtClustering0,
                            pointBuffer0);
                    //System.out.println("Eval of "+measures[i].getClass().getSimpleName()+" at "+m_timestamp+" took "+sec);
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
            }

            pointBuffer0.clear();
            counter = decayHorizon;
        }
    }
}