Example usage for weka.core Instance weight

List of usage examples for weka.core Instance weight

Introduction

In this page you can find the example usage for weka.core Instance weight.

Prototype

public double weight();

Source Link

Document

Returns the instance's weight.

Usage

From source file:moa.classifiers.meta.RandomRules.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    for (int i = 0; i < this.ensemble.length; i++) {
        int k = 1;
        if (this.useBaggingOption.isSet()) {
            k = MiscUtils.poisson(1.0, this.classifierRandom);
        }//from   w  ww .  j a  v a 2  s  .  com
        if (k > 0) {
            Instance weightedInst = transformInstance(inst, i);
            weightedInst.setWeight(inst.weight() * k);
            this.ensemble[i].trainOnInstance(weightedInst);
        }
    }
}

From source file:moa.classifiers.meta.WeightedMajorityAlgorithm.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    double totalWeight = 0.0;
    for (int i = 0; i < this.ensemble.length; i++) {
        boolean prune = false;
        if (!this.ensemble[i].correctlyClassifies(inst)) {
            if (this.ensembleWeights[i] > this.gammaOption.getValue() / this.ensembleWeights.length) {
                this.ensembleWeights[i] *= this.betaOption.getValue() * inst.weight();
            } else if (this.pruneOption.isSet()) {
                prune = true;/*from w w w. ja  va 2  s  .  co  m*/
                discardModel(i);
                i--;
            }
        }
        if (!prune) {
            totalWeight += this.ensembleWeights[i];
            this.ensemble[i].trainOnInstance(inst);
        }
    }
    // normalize weights
    for (int i = 0; i < this.ensembleWeights.length; i++) {
        this.ensembleWeights[i] /= totalWeight;
    }
}

From source file:moa.classifiers.multilabel.MajorityLabelset.java

License:Open Source License

protected void updateCount(Instance x, int L) {

    String y = toBitString(x, L);

    if (classFreqs.containsKey(y)) {
        double freq = classFreqs.get(y) + x.weight();
        classFreqs.put(y, freq);/*  ww w.  j  a  v a 2s  .  c o  m*/
        if (freq >= maxValue) {
            maxValue = freq;
            this.prediction = new double[L];
            for (int j = 0; j < L; j++) {
                this.prediction[j] = x.value(j);
            }
        }
    } else {
        classFreqs.put(y, x.weight());
    }
}

From source file:moa.classifiers.multilabel.meta.MLOzaBagAdwin.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {

    boolean Change = false;
    for (int i = 0; i < this.ensemble.length; i++) {
        int k = MiscUtils.poisson(1.0, this.classifierRandom);
        if (k > 0) {
            Instance weightedInst = (Instance) inst.copy();
            weightedInst.setWeight(inst.weight() * k);
            this.ensemble[i].trainOnInstance(weightedInst);
        }// ww w  . j  ava  2 s . c  o m
        double[] prediction = this.ensemble[i].getVotesForInstance(inst);
        //Compute accuracy
        double actual[] = new double[prediction.length];
        for (short j = 0; j < prediction.length; j++) {
            actual[j] = inst.value(j);
        }
        // calculate
        int p_sum = 0, r_sum = 0;
        int set_union = 0;
        int set_inter = 0;
        double t = 0.01;
        for (int j = 0; j < prediction.length; j++) {
            int p = (prediction[j] >= t) ? 1 : 0;
            int R = (int) actual[j];
            if (p == 1) {
                p_sum++;
                // predt 1, real 1
                if (R == 1) {
                    set_inter++;
                    set_union++;
                } // predt 1, real 0
                else {
                    set_union++;
                }
            } else {
                // predt 0, real 1
                if (R == 1) {
                    set_union++;
                } // predt 0, real 0
                else {
                }
            }
        }
        double accuracy = 0.0;
        if (set_union > 0) //avoid NaN
        {
            accuracy = ((double) set_inter / (double) set_union);
        }
        double ErrEstim = this.ADError[i].getEstimation();
        if (this.ADError[i].setInput(1.0 - accuracy)) {
            if (this.ADError[i].getEstimation() > ErrEstim) {
                Change = true;
            }
        }
    }
    if (Change) {
        System.err.println("change!");
        double max = 0.0;
        int imax = -1;
        for (int i = 0; i < this.ensemble.length; i++) {
            if (max < this.ADError[i].getEstimation()) {
                max = this.ADError[i].getEstimation();
                imax = i;
            }
        }
        if (imax != -1) {

            this.ensemble[imax] = null;
            this.ensemble[imax] = (Classifier) getPreparedClassOption(this.baseLearnerOption);
            this.ensemble[imax].setModelContext(this.modelContext);
            this.ensemble[imax].trainOnInstance(inst);
            this.ADError[imax] = new ADWIN();
        }
    }
}

From source file:moa.classifiers.NaiveBayesMultinomial.java

License:Open Source License

/**
 * Trains the classifier with the given instance.
 *
 * @param instance    the new training instance to include in the model
 *///w  w w .  jav a 2  s.c  o m
@Override
public void trainOnInstanceImpl(Instance inst) {
    if (this.reset == true) {
        this.m_numClasses = inst.numClasses();
        double laplace = this.laplaceCorrectionOption.getValue();
        int numAttributes = inst.numAttributes();

        m_probOfClass = new double[m_numClasses];
        Arrays.fill(m_probOfClass, laplace);

        m_classTotals = new double[m_numClasses];
        Arrays.fill(m_classTotals, laplace * numAttributes);

        m_wordTotalForClass = new double[numAttributes][m_numClasses];
        for (double[] wordTotal : m_wordTotalForClass) {
            Arrays.fill(wordTotal, laplace);
        }
        this.reset = false;
    }
    // Update classifier
    int classIndex = inst.classIndex();
    int classValue = (int) inst.value(classIndex);

    double w = inst.weight();
    m_probOfClass[classValue] += w;

    m_classTotals[classValue] += w * totalSize(inst);
    double total = m_classTotals[classValue];

    for (int i = 0; i < inst.numValues(); i++) {
        int index = inst.index(i);
        if (index != classIndex && !inst.isMissing(i)) {
            m_wordTotalForClass[index][classValue] += w * inst.valueSparse(i);
        }
    }
}

From source file:moa.classifiers.novelClass.AbstractNovelClassClassifier.java

License:Apache License

@Override
public void trainOnInstance(Instance inst) {
    // By convention, instances with weight == 0 are for unsupervised training
    this.trainingWeightSeenByModel += inst.weight();
    trainOnInstanceImpl(inst);//from  ww  w.  j a  va  2  s. co m
}

From source file:moa.classifiers.novelClass.SluiceBox.SluiceBoxClassifier.java

License:Apache License

@Override
public void trainOnInstanceImpl(Instance inst) {
    Instance pseudoPoint = augmentInstance(inst);

    if (inst.weight() < weka.core.Utils.SMALL) {
        double[] votes = roughClassifier.getVotesForInstance(inst);
        pseudoPoint.setClassValue(weka.core.Utils.maxIndex(votes));
        pseudoPoint.setWeight(hypothesisWeightOption.getValue());
    } else {//from  w w  w.  j  a v a  2 s .c  o  m
        roughClassifier.trainOnInstance(inst);
    }

    if (this.warmupCache != null) {
        if (this.warmupCache.size() < this.warmupLengthOption.getValue()) {
            this.warmupCache.add(pseudoPoint);
        } else {
            this.dynamicStreamClustering.initialize(warmupCache);
            this.warmupCache.clear();
            this.warmupCache = null;
        }
    } else {
        this.dynamicStreamClustering.trainOnInstance(pseudoPoint);
    }
}

From source file:moa.classifiers.OzaBagAdwin.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    boolean Change = false;
    for (int i = 0; i < this.ensemble.length; i++) {
        int k = MiscUtils.poisson(1.0, this.classifierRandom);
        if (k > 0) {
            Instance weightedInst = (Instance) inst.copy();
            weightedInst.setWeight(inst.weight() * k);
            this.ensemble[i].trainOnInstance(weightedInst);
        }//from  w  w  w  .  ja  v  a2  s  . co  m
        boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(inst);
        double ErrEstim = this.ADError[i].getEstimation();
        if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1))
            if (this.ADError[i].getEstimation() > ErrEstim)
                Change = true;
    }
    if (Change) {
        double max = 0.0;
        int imax = -1;
        for (int i = 0; i < this.ensemble.length; i++) {
            if (max < this.ADError[i].getEstimation()) {
                max = this.ADError[i].getEstimation();
                imax = i;
            }
        }
        if (imax != -1) {
            this.ensemble[imax].resetLearning();
            //this.ensemble[imax].trainOnInstance(inst);
            this.ADError[imax] = new ADWIN();
        }
    }
}

From source file:moa.classifiers.rules.GeRules.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    // TODO Auto-generated method stub

    // add weight of respective class to classification distribution
    observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());

    // only add instances to be learnt if there are no rule coverd the instance
    if (RulesCoveredInstance(inst).isEmpty()) {
        slidingWindowsBuffer.add(inst);/*from w  w w .j av  a  2s.  c o m*/
    }
    // if there are rule(s) cover the instance, then update stattic in the rule
    else {

        // for each rule matched the instance,
        // update class distribution statistic
        for (Rule rule : RulesCoveredInstance(inst)) {
            rule.updateClassDistribution(inst);

            rule.noOfCovered++;

            // also update if the rule correctly cover an instance with it class
            if (inst.classValue() == rule.classification) {
                rule.noOfCorrectlyCovered++;
            } else { // validate the current rule
                if (rule.ruleShouldBeRemoved()) {
                    rulesList.remove(rule);
                }
            }
        }

    }

    // check if the sliding windows buffer is filled to the criteria
    if (slidingWindowsBuffer.size() == slidingWindowsSizeOption.getValue()) {

        // learn rules with the classifier
        ArrayList<Rule> learntRules = prismClassifier.learnRules(slidingWindowsBuffer);

        if (learntRules != null) {
            rulesList.addAll(learntRules);
        }

        // clear sliding window buffer to take more instances
        slidingWindowsBuffer.clear();
    }
}

From source file:moa.classifiers.rules.RuleClassifier.java

License:Apache License

@Override
public void trainOnInstanceImpl(Instance inst) {
    int countRuleFiredTrue = 0;
    boolean ruleFired = false;
    this.instance = inst;
    this.numAttributes = instance.numAttributes() - 1;
    this.numClass = instance.numClasses();
    this.numInstance = numInstance + 1;
    int conta1 = 0;
    for (int j = 0; j < ruleSet.size(); j++) {
        if (this.ruleSet.get(j).ruleEvaluate(inst) == true) {
            countRuleFiredTrue = countRuleFiredTrue + 1;

            double anomaly = 0.0;
            if (this.Supervised.isSet()) {
                anomaly = computeAnomalySupervised(this.ruleSet.get(j), j, inst); // compute anomaly (Supervised method)
            } else if (this.Unsupervised.isSet()) {
                anomaly = computeAnomalyUnsupervised(this.ruleSet.get(j), j, inst); // compute anomaly (Unsupervised method)
            }//from  w  w w  .j  a  v  a 2s.  c om

            if (anomaly >= this.anomalyProbabilityThresholdOption.getValue()) {
                conta1 = conta1 + 1;
            }
            //   System.out.print(numInstance+";"+anomaly+"\n");
            try {

                File dir = new File("SeaAnomaliesUnsupervised.txt");
                FileWriter fileWriter = new FileWriter(dir, true);
                PrintWriter printWriter = new PrintWriter(fileWriter);
                printWriter.println(numInstance + ";" + anomaly);
                printWriter.flush();
                printWriter.close();

            } catch (IOException e) {
                e.printStackTrace();
            }
            if ((this.ruleSet.get(j).instancesSeen <= this.anomalyNumInstThresholdOption.getValue())
                    || (anomaly < this.anomalyProbabilityThresholdOption.getValue()
                            && this.anomalyDetectionOption.isSet())
                    || !this.anomalyDetectionOption.isSet()) {
                this.ruleSet.get(j).obserClassDistrib.addToValue((int) inst.classValue(), inst.weight());
                for (int i = 0; i < inst.numAttributes() - 1; i++) {
                    int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);

                    if (!inst.isMissing(instAttIndex)) {
                        AttributeClassObserver obs = this.ruleSet.get(j).observers.get(i); // Nominal and binary tree.
                        AttributeClassObserver obsGauss = this.ruleSet.get(j).observersGauss.get(i); // Gaussian.
                        if (obs == null) {
                            obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                                    : newNumericClassObserver();
                            this.ruleSet.get(j).observers.set(i, obs);
                        }
                        if (obsGauss == null) {
                            obsGauss = inst.attribute(instAttIndex).isNumeric() ? newNumericClassObserver2()
                                    : null;
                            this.ruleSet.get(j).observersGauss.set(i, obsGauss);
                        }
                        obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(),
                                inst.weight());
                        if (inst.attribute(instAttIndex).isNumeric()) {
                            obsGauss.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(),
                                    inst.weight());
                        }
                    }
                }
                expandeRule(this.ruleSet.get(j), inst, j); // This function expands the rule
            }
            if (this.orderedRulesOption.isSet()) { // Ordered rules
                break;
            }
        }
    }
    if (countRuleFiredTrue > 0) {
        ruleFired = true;
    } else {
        ruleFired = false;
    }
    if (ruleFired == false) { //If none of the rules cover the example update sufficient statistics of the default rule
        this.observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());
        for (int i = 0; i < inst.numAttributes() - 1; i++) {
            int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
            if (!inst.isMissing(instAttIndex)) {
                AttributeClassObserver obs = this.attributeObservers.get(i);
                AttributeClassObserver obsGauss = this.attributeObserversGauss.get(i);
                if (obs == null) {
                    obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                            : newNumericClassObserver();
                    this.attributeObservers.set(i, obs);
                }
                if (obsGauss == null) {
                    obsGauss = inst.attribute(instAttIndex).isNumeric() ? newNumericClassObserver2() : null;
                    this.attributeObserversGauss.set(i, obsGauss);
                }
                obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(), inst.weight());
                if (inst.attribute(instAttIndex).isNumeric()) {
                    obsGauss.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(),
                            inst.weight());
                }
            }
        }
        createRule(inst); //This function creates a rule
    }
}