Example usage for weka.core Instance attribute

List of usage examples for weka.core Instance attribute

Introduction

In this page you can find the example usage for weka.core Instance attribute.

Prototype

public Attribute attribute(int index);

Source Link

Document

Returns the attribute with the given index.

Usage

From source file:moa.classifiers.bayes.NaiveBayes.java

License:Open Source License

public static double[] doNaiveBayesPredictionLog(Instance inst, DoubleVector observedClassDistribution,
        AutoExpandVector<AttributeClassObserver> observers,
        AutoExpandVector<AttributeClassObserver> observers2) {
    AttributeClassObserver obs;/*from w  w w. j  a v  a  2s .  co  m*/
    double[] votes = new double[observedClassDistribution.numValues()];
    double observedClassSum = observedClassDistribution.sumOfValues();
    for (int classIndex = 0; classIndex < votes.length; classIndex++) {
        votes[classIndex] = Math.log10(observedClassDistribution.getValue(classIndex) / observedClassSum);
        for (int attIndex = 0; attIndex < inst.numAttributes() - 1; attIndex++) {
            int instAttIndex = modelAttIndexToInstanceAttIndex(attIndex, inst);
            if (inst.attribute(instAttIndex).isNominal()) {
                obs = observers.get(attIndex);
            } else {
                obs = observers2.get(attIndex);
            }

            if ((obs != null) && !inst.isMissing(instAttIndex)) {
                votes[classIndex] += Math
                        .log10(obs.probabilityOfAttributeValueGivenClass(inst.value(instAttIndex), classIndex));

            }
        }
    }
    return votes;

}

From source file:moa.classifiers.DecisionStump.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    this.observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());
    for (int i = 0; i < inst.numAttributes() - 1; i++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
        AttributeClassObserver obs = this.attributeObservers.get(i);
        if (obs == null) {
            obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                    : newNumericClassObserver();
            this.attributeObservers.set(i, obs);
        }/*from   w  w w .  j  a v  a2s  .c  o m*/
        obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(), inst.weight());
    }
    if (this.trainingWeightSeenByModel - this.weightSeenAtLastSplit >= this.gracePeriodOption.getValue()) {
        this.bestSplit = findBestSplit((SplitCriterion) getPreparedClassOption(this.splitCriterionOption));
        this.weightSeenAtLastSplit = this.trainingWeightSeenByModel;
    }
}

From source file:moa.classifiers.meta.RandomRules.java

License:Open Source License

private Instance transformInstance(Instance inst, int classifierIndex) {
    if (this.listAttributes == null) {
        this.numAttributes = (int) (this.numAttributesPercentageOption.getValue() * inst.numAttributes()
                / 100.0);/* w w w  . j  a  v  a  2s .c o  m*/
        this.listAttributes = new int[this.numAttributes][this.ensemble.length];
        this.dataset = new InstancesHeader[this.ensemble.length];
        for (int ensembleIndex = 0; ensembleIndex < this.ensemble.length; ensembleIndex++) {
            for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
                boolean isUnique = false;
                while (isUnique == false) {
                    this.listAttributes[attributeIndex][ensembleIndex] = this.classifierRandom
                            .nextInt(inst.numAttributes() - 1);
                    isUnique = true;
                    for (int k = 0; k < attributeIndex; k++) {
                        if (this.listAttributes[attributeIndex][ensembleIndex] == this.listAttributes[k][ensembleIndex]) {
                            isUnique = false;
                            break;
                        }
                    }
                }
                //this.listAttributes[attributeIndex][ensembleIndex] = attributeIndex;
            }
            //Create Header
            FastVector attributes = new FastVector();
            for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
                attributes.addElement(inst.attribute(this.listAttributes[attributeIndex][ensembleIndex]));
                System.out.print(this.listAttributes[attributeIndex][ensembleIndex]);
            }
            System.out.println("Number of attributes: " + this.numAttributes + "," + inst.numAttributes());
            attributes.addElement(inst.classAttribute());
            this.dataset[ensembleIndex] = new InstancesHeader(
                    new Instances(getCLICreationString(InstanceStream.class), attributes, 0));
            this.dataset[ensembleIndex].setClassIndex(this.numAttributes);
            this.ensemble[ensembleIndex].setModelContext(this.dataset[ensembleIndex]);
        }
    }
    //Instance instance = new DenseInstance(this.numAttributes+1);
    //instance.setDataset(dataset[classifierIndex]);
    double[] attVals = new double[this.numAttributes + 1];
    for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
        //instance.setValue(attributeIndex, inst.value(this.listAttributes[attributeIndex][classifierIndex]));
        attVals[attributeIndex] = inst.value(this.listAttributes[attributeIndex][classifierIndex]);
    }
    Instance instance = new DenseInstance(1.0, attVals);
    instance.setDataset(dataset[classifierIndex]);
    instance.setClassValue(inst.classValue());
    // System.out.println(inst.toString());
    // System.out.println(instance.toString());
    // System.out.println("============");
    return instance;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

@Override
public void trainOnInstanceImpl(Instance inst) {
    this.numInstance = this.numInstance + 1;
    int countRuleFiredTrue = 0;
    boolean ruleFired = false;
    this.instance = inst;
    for (int j = 0; j < this.ruleSet.size(); j++) {
        if (this.ruleSet.get(j).ruleEvaluate(inst) == true) {
            countRuleFiredTrue = countRuleFiredTrue + 1;
            this.saveBestValGlobalSDR = new ArrayList<ArrayList<Double>>();
            this.saveBestGlobalSDR = new DoubleVector();
            this.saveTheBest = new ArrayList<Double>();
            double anomaly = computeAnomaly(this.ruleSet.get(j), j, inst); // compute anomaly
            if ((this.ruleSet.get(j).instancesSeen <= this.anomalyNumInstThresholdOption.getValue())
                    || (anomaly < this.anomalyProbabilityThresholdOption.getValue()
                            && this.anomalyDetectionOption.isSet())
                    || !this.anomalyDetectionOption.isSet()) {
                for (int i = 0; i < inst.numAttributes() - 1; i++) {
                    int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
                    AttributeClassObserver obs = this.ruleSet.get(j).observers.get(i);
                    if (obs == null) {
                        obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                                : newNumericClassObserverRegression();
                        this.ruleSet.get(j).observers.set(i, obs);
                    }/* www .  ja  va  2s  .c o m*/
                    obs.observeAttributeTarget(inst.value(instAttIndex), inst.classValue());
                }

                double RuleError = computeRuleError(inst, this.ruleSet.get(j), j); // compute rule error
                boolean ph = PageHinckleyTest(RuleError, this.pageHinckleyThresholdOption.getValue(),
                        this.ruleSet.get(j));
                if (ph == true) { //Page Hinckley test.
                    //Pruning rule set.
                    //   System.out.print("Pruning rule set \n");
                    this.ruleSet.remove(j);
                    this.targetValue.remove(j);
                    this.numTargetValue.remove(j);
                    this.ruleTargetMean.remove(j);
                } else {
                    this.expandeRule(this.ruleSet.get(j), j, inst); //Expand the rule.
                }
            }
            if (this.orderedRulesOption.isSet()) { // Ordered rules
                break;
            }
        }
    }
    if (countRuleFiredTrue > 0) {
        ruleFired = true;
    } else {
        ruleFired = false;
    }
    if (ruleFired == false) { //Default rule
        this.saveBestValGlobalSDR = new ArrayList<ArrayList<Double>>();
        this.saveBestGlobalSDR = new DoubleVector();
        this.saveTheBest = new ArrayList<Double>();
        double anomalies = computeAnomalyDefaultRules(inst);
        if ((instancesSeenDefault <= this.anomalyNumInstThresholdOption.getValue())
                || (anomalies < this.anomalyProbabilityThresholdOption.getValue()
                        && this.anomalyDetectionOption.isSet())
                || !this.anomalyDetectionOption.isSet()) {
            for (int i = 0; i < inst.numAttributes() - 1; i++) {
                int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
                AttributeClassObserver obs = this.attributeObservers.get(i);
                if (obs == null) {
                    obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                            : newNumericClassObserverRegression();
                    this.attributeObservers.set(i, obs);
                }
                obs.observeAttributeTarget(inst.value(instAttIndex), inst.classValue());
            }
            initialyPerceptron(inst); // Initialize Perceptron if necessary.  
            this.updateAttWeight(inst, this.weightAttributeDefault, this.squaredActualClassStatisticsDefault,
                    this.actualClassStatisticsDefault, this.squaredAttributeStatisticsDefault,
                    this.attributeStatisticsDefault, this.instancesSeenDefault, resetDefault); // Update weights. Ensure actual class and the predicted class are normalised first.
            this.updatedefaultRuleStatistics(inst); //Update the default rule statistics.
            this.createRule(inst);//This function creates a rule
        }
    }
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public void theBestAttributes(Instance instance, AutoExpandVector<AttributeClassObserver> observersParameter) {
    for (int z = 0; z < instance.numAttributes() - 1; z++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(z, instance);
        if (instance.attribute(instAttIndex).isNumeric()) {
            this.root = ((BinaryTreeNumericAttributeClassObserverRegression) observersParameter.get(z)).root1;
            this.sumTotalLeft = 0.0;
            this.sumTotalRight = this.root.lessThan[0] + this.root.greaterThan[0];
            this.sumSqTotalLeft = 0.0;
            this.sumSqTotalRight = this.root.lessThan[1] + this.root.greaterThan[1];
            this.rightTotal = this.total = this.root.lessThan[2] + this.root.greaterThan[2];
            this.maxSDR = 0.0;
            this.symbol = 0.0;
            this.sumTotal = 0.0;
            this.numSomaTotal = 0.0;
            findBestSplit(this.root); // The best value (SDR) of a numeric attribute.
            ArrayList<Double> saveTheBestAtt = new ArrayList<Double>(); // Contains the best attribute.
            saveTheBestAtt.add(this.splitpoint);
            saveTheBestAtt.add(this.maxSDR);
            saveTheBestAtt.add(this.symbol);
            saveTheBestAtt.add(this.sumTotal);
            saveTheBestAtt.add(this.numSomaTotal);
            this.saveBestValGlobalSDR.add(saveTheBestAtt);
            this.saveBestGlobalSDR.setValue(z, this.maxSDR);
        }/*from w  w w.  j  av  a 2s  . c o  m*/
    }
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public double computeAnomaly(Rule rl, int ruleIndex, Instance inst) {
    ArrayList<Integer> caseAnomalyTemp = new ArrayList<Integer>();
    ArrayList<ArrayList<Double>> AttribAnomalyStatisticTemp2 = new ArrayList<ArrayList<Double>>();
    double D = 0.0;
    double N = 0.0;
    if (rl.instancesSeen > this.anomalyNumInstThresholdOption.getValue()
            && this.anomalyDetectionOption.isSet()) {
        for (int x = 0; x < inst.numAttributes() - 1; x++) {
            ArrayList<Double> AttribAnomalyStatisticTemp = new ArrayList<Double>();
            if (inst.attribute(x).isNumeric()) {
                double mean = computeMean(rl.attributeStatistics.getValue(x), rl.instancesSeen);
                double sd = computeSD(rl.squaredAttributeStatistics.getValue(x),
                        rl.attributeStatistics.getValue(x), rl.instancesSeen);
                double probability = computeProbability(mean, sd, inst.value(x));
                if (probability != 0.0) {
                    D = D + Math.log(probability);
                    if (probability < this.probabilityThresholdOption.getValue()) { //0.10
                        N = N + Math.log(probability);
                        AttribAnomalyStatisticTemp.add((double) x);
                        AttribAnomalyStatisticTemp.add(inst.value(x));
                        AttribAnomalyStatisticTemp.add(mean);
                        AttribAnomalyStatisticTemp.add(sd);
                        AttribAnomalyStatisticTemp.add(probability);
                        AttribAnomalyStatisticTemp2.add(AttribAnomalyStatisticTemp);
                    }/*from w  w w.  j  a v  a2  s.  c o m*/
                }
            }
        }
    }
    double anomaly = Math.abs(N / D);
    if (anomaly >= this.anomalyProbabilityThresholdOption.getValue()) {
        caseAnomalyTemp.add(this.numInstance);
        double val = anomaly * 100;
        caseAnomalyTemp.add((int) val);
        this.caseAnomaly.add(caseAnomalyTemp);
        this.ruleSetAnomalies.add(this.ruleSet.get(ruleIndex));
        this.ruleTargetMeanAnomalies.add(this.ruleTargetMean.get(ruleIndex));
        this.ruleAnomaliesIndex.add(ruleIndex + 1);
        this.ruleAttribAnomalyStatistics.add(AttribAnomalyStatisticTemp2);
    }
    return anomaly;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public double computeAnomalyDefaultRules(Instance inst) {
    double D = 0.0;
    double N = 0.0;
    ArrayList<Integer> caseAnomalyTemp = new ArrayList<Integer>();
    ArrayList<ArrayList<Double>> AttribAnomalyStatisticTemp2 = new ArrayList<ArrayList<Double>>();
    if (this.instancesSeenDefault > this.anomalyNumInstThresholdOption.getValue()
            && this.anomalyDetectionOption.isSet()) {
        for (int x = 0; x < inst.numAttributes() - 1; x++) {
            ArrayList<Double> AttribAnomalyStatisticTemp = new ArrayList<Double>();
            if (inst.attribute(x).isNumeric()) {
                double mean = computeMean(this.attributeStatisticsDefault.getValue(x),
                        this.instancesSeenDefault);
                double sd = computeSD(this.squaredAttributeStatisticsDefault.getValue(x),
                        this.attributeStatisticsDefault.getValue(x), this.instancesSeenDefault);
                double probability = computeProbability(mean, sd, inst.value(x));
                if (probability != 0.0) {
                    D = D + Math.log(probability);
                    if (probability < this.probabilityThresholdOption.getValue()) { //0.10
                        N = N + Math.log(probability);
                        AttribAnomalyStatisticTemp.add((double) x);
                        AttribAnomalyStatisticTemp.add(inst.value(x));
                        AttribAnomalyStatisticTemp.add(mean);
                        AttribAnomalyStatisticTemp.add(sd);
                        AttribAnomalyStatisticTemp.add(probability);
                        AttribAnomalyStatisticTemp2.add(AttribAnomalyStatisticTemp);
                    }/*  w  w  w.  java2s  .c o  m*/
                }
            }
        }
    }
    double anomalies = Math.abs(N / D);
    if (anomalies >= this.anomalyProbabilityThresholdOption.getValue()) {
        caseAnomalyTemp.add(this.numInstance);
        double val = anomalies * 100;
        caseAnomalyTemp.add((int) val);
        this.caseAnomaly.add(caseAnomalyTemp);
        Rule rule = new Rule();
        this.ruleSetAnomalies.add(rule);
        this.ruleTargetMeanAnomalies.add(observersDistrib(this.instance, this.attributeObservers));
        this.ruleAnomaliesIndex.add(-1);
        this.ruleAttribAnomalyStatistics.add(AttribAnomalyStatisticTemp2);
    }
    return anomalies;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

protected int observersNumberInstance(Instance inst, AutoExpandVector<AttributeClassObserver> observerss) {
    int numberInstance = 0;
    for (int z = 0; z < inst.numAttributes() - 1; z++) {
        numberInstance = 0;//ww  w.  jav  a2s  .c o m
        int instAttIndex = modelAttIndexToInstanceAttIndex(z, inst);
        if (inst.attribute(instAttIndex).isNumeric()) {
            Node rootNode = ((BinaryTreeNumericAttributeClassObserverRegression) observerss.get(z)).root1;
            if (rootNode != null) {
                numberInstance = (int) (rootNode.lessThan[2] + rootNode.greaterThan[2]);
                break;
            }
        }
    }
    return numberInstance;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public double prediction(Instance inst, double[] weightAtt, double squaredActualClassStatistics,
        double actualClassStatistics, int instancesSeen, boolean reset) {
    double prediction = 0;
    if (reset == false) {
        for (int j = 0; j < inst.numAttributes() - 1; j++) {
            if (inst.attribute(j).isNumeric()) {
                prediction += weightAtt[j] * inst.value(j);
            }/*  w  w  w  . j  a  va 2  s .co  m*/
        }
        prediction += weightAtt[inst.numAttributes() - 1];
    }
    double sdPredictedClass = computeSD(squaredActualClassStatistics, actualClassStatistics, instancesSeen);
    double outputDesnorm = 0;
    if (sdPredictedClass > 0.0000001) {
        outputDesnorm = 3 * prediction * sdPredictedClass + (actualClassStatistics / instancesSeen);
    }
    return outputDesnorm;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public double updateAttWeight(Instance inst, double[] weightAtt, double squaredActualClassStatistics,
        double actualClassStatistics, DoubleVector squaredAttributeStatistics, DoubleVector attributeStatistics,
        int instancesSeen, boolean reset) {
    double learningRatio = 0.0;
    if (this.learningRatio_Decay_or_Const_Option.isSet()) { //Decaying learning rate option
        learningRatio = this.learningRatioOption.getValue();
    } else {//from   w  ww .  j a v a2s .c o m
        learningRatio = initLearnRate / (1 + instancesSeen * this.learnRateDecay);
    }

    double predict = 0.0;
    if (instancesSeen > 30) {
        predict = this.prediction(inst, weightAtt, squaredActualClassStatistics, actualClassStatistics,
                instancesSeen, reset);
        double sdClass = computeSD(squaredActualClassStatistics, actualClassStatistics, instancesSeen);
        double actualClass = 0.0;
        double predictedClass = 0.0;
        if (sdClass > 0.0000001) {
            actualClass = (inst.classValue() - (actualClassStatistics / instancesSeen)) / (3 * sdClass);
            predictedClass = (predict - (actualClassStatistics / instancesSeen)) / (3 * sdClass);
        }
        double delta = actualClass - predictedClass;
        for (int x = 0; x < inst.numAttributes() - 1; x++) {
            if (inst.attribute(x).isNumeric()) {
                // Update weights. Ensure attribute values are normalised first.
                double sd = Math.sqrt((squaredAttributeStatistics.getValue(x)
                        - ((attributeStatistics.getValue(x) * attributeStatistics.getValue(x)) / instancesSeen))
                        / instancesSeen);
                double instanceValue = 0;
                instanceValue = (inst.value(x) - (attributeStatistics.getValue(x) / instancesSeen));
                if (sd > 0.0000001) {
                    instanceValue = instanceValue / (3 * sd);
                }
                if (sd == 0.0) {
                    weightAtt[x] = 0.0;
                } else {
                    weightAtt[x] += learningRatio * delta * instanceValue;
                }
            }
        }
        weightAtt[inst.numAttributes() - 1] += learningRatio * delta;
    }
    return predict;
}