List of usage examples for weka.core Instance setWeight
public void setWeight(double weight);
From source file:moa.classifiers.meta.OzaBagLambda.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { for (int i = 0; i < this.ensemble.length; i++) { int k = MiscUtils.poisson(this.lambdaOption.getValue(), this.classifierRandom); if (!m_debug) { if (k > 0) { Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); }/*from ww w .j a v a 2 s . c o m*/ } if (m_debug) { System.out.println(inst.weight() * k); } } }
From source file:moa.classifiers.meta.OzaBoost.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { double lambda_d = 1.0; for (int i = 0; i < this.ensemble.length; i++) { double k = this.pureBoostOption.isSet() ? lambda_d : MiscUtils.poisson(lambda_d, this.classifierRandom); if (k > 0.0) { Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); }// www . j a v a 2 s. c o m if (this.ensemble[i].correctlyClassifies(inst)) { this.scms[i] += lambda_d; lambda_d *= this.trainingWeightSeenByModel / (2 * this.scms[i]); } else { this.swms[i] += lambda_d; lambda_d *= this.trainingWeightSeenByModel / (2 * this.swms[i]); } } }
From source file:moa.classifiers.meta.OzaBoostAdwin.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { int numClasses = inst.numClasses(); // Set log (k-1) and (k-1) for SAMME Method if (this.sammeOption.isSet()) { this.Km1 = numClasses - 1; this.logKm1 = Math.log(this.Km1); this.initKm1 = false; }// w w w. j a v a 2s.c om //Output Codes if (this.initMatrixCodes == true) { this.matrixCodes = new int[this.ensemble.length][inst.numClasses()]; for (int i = 0; i < this.ensemble.length; i++) { int numberOnes; int numberZeros; do { // until we have the same number of zeros and ones numberOnes = 0; numberZeros = 0; for (int j = 0; j < numClasses; j++) { int result = 0; if (j == 1 && numClasses == 2) { result = 1 - this.matrixCodes[i][0]; } else { result = (this.classifierRandom.nextBoolean() ? 1 : 0); } this.matrixCodes[i][j] = result; if (result == 1) { numberOnes++; } else { numberZeros++; } } } while ((numberOnes - numberZeros) * (numberOnes - numberZeros) > (this.ensemble.length % 2)); } this.initMatrixCodes = false; } boolean Change = false; double lambda_d = 1.0; Instance weightedInst = (Instance) inst.copy(); for (int i = 0; i < this.ensemble.length; i++) { double k = this.pureBoostOption.isSet() ? lambda_d : MiscUtils.poisson(lambda_d * this.Km1, this.classifierRandom); if (k > 0.0) { if (this.outputCodesOption.isSet()) { weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]); } weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); } boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst); if (correctlyClassifies) { this.scms[i] += lambda_d; lambda_d *= this.trainingWeightSeenByModel / (2 * this.scms[i]); } else { this.swms[i] += lambda_d; lambda_d *= this.trainingWeightSeenByModel / (2 * this.swms[i]); } double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) { if (this.ADError[i].getEstimation() > ErrEstim) { Change = true; } } } if (Change) { numberOfChangesDetected++; double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax].resetLearning(); //this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN((double) this.deltaAdwinOption.getValue()); this.scms[imax] = 0; this.swms[imax] = 0; } } }
From source file:moa.classifiers.meta.PAME.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { // get the prediction vector back double[] ht = this.getPredictions(inst); double yt = inst.classValue(); if (inst.classIndex() == 0) { this.rareCount += 1.0; }/* ww w .j a va2 s . co m*/ this.count += 1.0; // convert to a positive / negative classification scenario if (yt == 0) { yt = 1.0; } else { yt = -1.0; } /* * update expert weights */ if (this.updateMethodOption.getChosenIndex() == PAME1) { pame1_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME2) { pame2_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME3) { pame3_weights(ht, yt); } /* * we are going to use an online bagging / boosting strategy to update the * experts. In the end our objective with the weight formulation is a bit * more of a decision theoretic approach. */ for (int i = 0; i < this.ensemble.length; i++) { // sample from a Poisson probability distribution as implemented in // online bagging and boosting] double w; if (this.overSampleOption.isSet() && inst.classIndex() == 0) { w = 1.0 / (this.rareCount / this.count); if (this.logTransformOption.isSet()) { w = Math.log(w); } } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) { w = 1.0 - this.rareCount / this.count; } else { w = 1.0; } int k = MiscUtils.poisson(w, this.classifierRandom); // update the expert accordingly if (k > 0) { // this works by updating the expert k-times with the same example. // thus is k = 4. the expert is trained updated on the same example // 4 times in a row. pretty easy. Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); // set the # of training times this.ensemble[i].trainOnInstance(weightedInst); // update expert } } this.n_negativeWeights = 0; for (int i = 0; i < this.weights.length; i++) { if (this.weights[i] < 0.0) this.n_negativeWeights++; } }
From source file:moa.classifiers.meta.PAMEAdwin.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { // get the prediction vector back double[] ht = this.getPredictions(inst); double yt = inst.classValue(); if (inst.classIndex() == 0) { this.rareCount += 1.0; }//from w w w . j a v a 2s . c o m this.count += 1.0; // convert to a positive / negative classification scenario if (yt == 0) { //System.out.println("Y is positive" + yt); yt = 1.0; } else { //System.out.println("Y is negative" + yt); yt = -1.0; } /* * update expert weights */ if (this.updateMethodOption.getChosenIndex() == PAME1) { pame1_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME2) { pame2_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME3) { pame3_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME23) { pame23_weights(ht, yt); } /* * we are going to use an online bagging / boosting strategy to update the * experts. In the end our objective with the weight formulation is a bit * more of a decision theoretic approach. */ boolean Change = false; for (int i = 0; i < this.ensemble.length; i++) { // sample from a Poisson probability distribution as implemented in // online bagging and boosting double w; if (this.overSampleOption.isSet() && inst.classIndex() == 0) { w = 1.0 / (this.rareCount / this.count); if (this.logTransformOption.isSet()) { w = Math.log(w); } } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) { w = 1.0 - this.rareCount / this.count; } else { w = 1.0; } int k = MiscUtils.poisson(w, this.classifierRandom); // update the expert accordingly if (k > 0) { // this works by updating the expert k-times with the same example. // thus is k = 4. the expert is trained updated on the same example // 4 times in a row. pretty easy. Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); // set the # of training times this.ensemble[i].trainOnInstance(weightedInst); // update expert } boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(inst); double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) { if (this.ADError[i].getEstimation() > ErrEstim) { Change = true; } } } /* * if change was detected, remove the worst expert from the ensemble of * classifiers. */ if (Change) { double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax].resetLearning(); //this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN(); } } this.n_negativeWeights = 0; for (int i = 0; i < this.weights.length; i++) { if (this.weights[i] < 0.0) this.n_negativeWeights++; } }
From source file:moa.classifiers.meta.RandomRules.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { for (int i = 0; i < this.ensemble.length; i++) { int k = 1; if (this.useBaggingOption.isSet()) { k = MiscUtils.poisson(1.0, this.classifierRandom); }//from w ww.j a va 2s .c o m if (k > 0) { Instance weightedInst = transformInstance(inst, i); weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); } } }
From source file:moa.classifiers.multilabel.meta.MLOzaBagAdwin.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { boolean Change = false; for (int i = 0; i < this.ensemble.length; i++) { int k = MiscUtils.poisson(1.0, this.classifierRandom); if (k > 0) { Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); }/*from w w w . ja va 2 s. c om*/ double[] prediction = this.ensemble[i].getVotesForInstance(inst); //Compute accuracy double actual[] = new double[prediction.length]; for (short j = 0; j < prediction.length; j++) { actual[j] = inst.value(j); } // calculate int p_sum = 0, r_sum = 0; int set_union = 0; int set_inter = 0; double t = 0.01; for (int j = 0; j < prediction.length; j++) { int p = (prediction[j] >= t) ? 1 : 0; int R = (int) actual[j]; if (p == 1) { p_sum++; // predt 1, real 1 if (R == 1) { set_inter++; set_union++; } // predt 1, real 0 else { set_union++; } } else { // predt 0, real 1 if (R == 1) { set_union++; } // predt 0, real 0 else { } } } double accuracy = 0.0; if (set_union > 0) //avoid NaN { accuracy = ((double) set_inter / (double) set_union); } double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(1.0 - accuracy)) { if (this.ADError[i].getEstimation() > ErrEstim) { Change = true; } } } if (Change) { System.err.println("change!"); double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax] = null; this.ensemble[imax] = (Classifier) getPreparedClassOption(this.baseLearnerOption); this.ensemble[imax].setModelContext(this.modelContext); this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN(); } } }
From source file:moa.classifiers.novelClass.SluiceBox.SluiceBoxClassifier.java
License:Apache License
@Override public void trainOnInstanceImpl(Instance inst) { Instance pseudoPoint = augmentInstance(inst); if (inst.weight() < weka.core.Utils.SMALL) { double[] votes = roughClassifier.getVotesForInstance(inst); pseudoPoint.setClassValue(weka.core.Utils.maxIndex(votes)); pseudoPoint.setWeight(hypothesisWeightOption.getValue()); } else {// w ww . j a va 2s . co m roughClassifier.trainOnInstance(inst); } if (this.warmupCache != null) { if (this.warmupCache.size() < this.warmupLengthOption.getValue()) { this.warmupCache.add(pseudoPoint); } else { this.dynamicStreamClustering.initialize(warmupCache); this.warmupCache.clear(); this.warmupCache = null; } } else { this.dynamicStreamClustering.trainOnInstance(pseudoPoint); } }
From source file:moa.classifiers.OzaBagAdwin.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { boolean Change = false; for (int i = 0; i < this.ensemble.length; i++) { int k = MiscUtils.poisson(1.0, this.classifierRandom); if (k > 0) { Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); }// www . j a v a 2s . c om boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(inst); double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) if (this.ADError[i].getEstimation() > ErrEstim) Change = true; } if (Change) { double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax].resetLearning(); //this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN(); } } }
From source file:moa.clusterer.FeS2.java
License:Apache License
/** * //from ww w .j av a 2 s . c o m * @param c cluster that is being compared against * @param x real data instance * @return DenseInstance made to work with the outlier-detecting perceptron */ private Instance makePerceptronInstance(Riffle c, Instance x) { Instance pseudoPoint = new DenseInstance(this.outlierPerceptronTrainingSet.numAttributes()); pseudoPoint.setDataset(outlierPerceptronTrainingSet); double p = c.getInclusionProbability(x); double r = (c.getRadius() != 0) ? c.getRadius() : 1; //double w = c.getWeight(); double N = Math.min(c.size(), 1.0 / (this.learningRateAlphaOption.getValue() + 1e-9)); double d = c.getCenterDistance(x); double logP = (p == 0) ? 0 : Math.log(p); double logDR = (r == 0 || (d / r) == 0) ? 0 : Math.log(d / r); pseudoPoint.setValue(0, logP); pseudoPoint.setValue(1, logDR); pseudoPoint.setValue(2, logDR * logP); pseudoPoint.setValue(3, logP - Math.log(1.0 / Math.pow(2.0 * N, this.universalCluster.getHeader().numAttributes()))); pseudoPoint.setClassValue(0); pseudoPoint.setWeight(0.0); return pseudoPoint; }