Example usage for weka.core Instances numAttributes

List of usage examples for weka.core Instances numAttributes

Introduction

In this page you can find the example usage for weka.core Instances numAttributes.

Prototype


publicint numAttributes() 

Source Link

Document

Returns the number of attributes.

Usage

From source file:ANN.MultilayerPerceptron.java

public MultilayerPerceptron(Instances i, double rate, int itter, int numHidden) {
    learningRate = rate;//from w ww  .j a va 2 s .  c  o m
    listHidden = new ArrayList<>();

    for (int num = 0; num < numHidden + 1; num++) {
        listHidden.add(new Node(i.numAttributes()));
    }

    listOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listOutput.add(new Node(listHidden.size()));
    }
    itteration = itter;
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}

From source file:ANN.MultilayerPerceptron.java

@Override
public void buildClassifier(Instances i) {
    //       System.out.println(listOutput.get(0).getWeightSize() + " "+ listHidden.size());
    int cnt = 0;/*from   w  w w.ja  va  2 s .c  om*/
    while (true) {//ulang iterasi
        //            System.out.println();
        //            System.out.println("iterasi "+itt);
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            //buat list input
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0);
            for (int idx = 0; idx < i.numAttributes() - 1; idx++) {
                listInput.add(i.get(idxInstance).value(idx));
            }

            //hitung output hidden
            ArrayList<Double> hiddenOutput = new ArrayList<>();
            hiddenOutput.add(1.0);
            for (int idxOutput = 1; idxOutput < listHidden.size(); idxOutput++) {
                output(listHidden, listInput, idxOutput);
                hiddenOutput.add(listHidden.get(idxOutput).getValue());
                //                    System.out.println(outputVal);
            }
            //hitung output layer
            for (int idxOutput = 0; idxOutput < listOutput.size(); idxOutput++) {
                output(listOutput, hiddenOutput, idxOutput);
                //                    System.out.println(outputVal);
            }

            //hitung error
            calculateError(idxInstance);
            //update bobot
            updateWeight(listInput);
        }
        double error = 0;
        for (int idxErr = 0; idxErr < i.numInstances(); idxErr++) {
            for (int idx = 0; idx < listOutput.size(); idx++) {
                error += Math.pow(listOutput.get(idx).getError(), 2) / 2;
                //                    System.out.println(listOutput.get(idx).getError());
            }
            //                System.out.println(error);
        }
        if (cnt == 1000) {
            System.out.println(error);
            System.out.println();
            cnt = 0;
        }
        cnt++;
        if (error <= 0.3)
            break;
    }
    //        for (int idx=0;idx<listOutput.size();idx++) {
    //            System.out.println("Output value "+listOutput.get(idx).getValue());
    //            System.out.println("Output error "+listOutput.get(idx).getError());
    //            for (int idx2=0; idx2<listOutput.get(idx).getWeightSize();idx2++)
    //                System.out.println("Output weight"+listOutput.get(idx).getWeightFromList(idx2));
    //        }
}

From source file:ANN.MultilayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\iris.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);/* w  ww  .  j  a  v a  2 s. c o  m*/
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    System.out.println();
    //                System.out.println(i + " "+0.8);
    MultilayerPerceptron slp = new MultilayerPerceptron(train, 0.1, 5000, 14);
    slp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(slp, train);
    System.out.println(eval.toSummaryString());
    System.out.print(eval.toMatrixString());
}

From source file:ANN.MultiplePerceptron.java

public MultiplePerceptron(Instances i, int numNode, double rate) {
    listNodeHidden = new ArrayList<>();
    for (int num = 0; num < numNode + 1; num++) {
        listNodeHidden.add(new Node(i.numAttributes()));
    }//from  w w  w.j  av  a 2  s.  c o m

    listNodeOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listNodeOutput.add(new Node(listNodeHidden.size()));
    }

    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
    learningRate = rate;
}

From source file:ANN.MultiplePerceptron.java

@Override
public void buildClassifier(Instances i) {
    //        System.out.println(listNodeHidden.get(0).getWeightSize()+" "+listNodeOutput.get(0).getWeightSize());
    for (int itt = 0; itt < 5000; itt++) {
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0);//from  w ww .java  2s  . c  o  m
            for (int idxInstanceVal = 0; idxInstanceVal < i.numAttributes() - 1; idxInstanceVal++) {
                listInput.add(i.get(idxInstance).value(idxInstanceVal));
            }

            ArrayList<Double> listOutputHidden = new ArrayList<>();
            listOutputHidden.add(1.0);

            //set output hidden layer
            //                System.out.println("Hidden layer\n");
            for (int idxNodeHidden = 1; idxNodeHidden < listNodeHidden.size(); idxNodeHidden++) {
                double outputVal = listNodeHidden.get(idxNodeHidden).output(listInput);
                listNodeHidden.get(idxNodeHidden).setValue(outputVal);
                listOutputHidden.add(outputVal);
                //                    System.out.println(outputVal);
            }

            //                System.out.println("Output layer\n");
            //set output layer
            for (int idxNodeHidden = 0; idxNodeHidden < listNodeOutput.size(); idxNodeHidden++) {
                double outputVal = listNodeOutput.get(idxNodeHidden).output(listOutputHidden);
                listNodeOutput.get(idxNodeHidden).setValue(outputVal);
                //                    System.out.println(outputVal);
            }

            //calculate error (back propagation)
            calculateError(idxInstance);
            //re-calculate weight
            calculateWeight(i.instance(idxInstance));
        }
    }
    for (int idx = 0; idx < listNodeHidden.size(); idx++) {
        System.out.println("Hidden value " + listNodeHidden.get(idx).getValue());
        System.out.println("Hidden error " + listNodeHidden.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeHidden.get(idx).getWeightSize(); idx2++)
            System.out.println("Hidden weight" + listNodeHidden.get(idx).getWeightFromList(idx2));
    }
    System.out.println();
    for (int idx = 0; idx < listNodeOutput.size(); idx++) {
        System.out.println("Output value " + listNodeOutput.get(idx).getValue());
        System.out.println("Output error " + listNodeOutput.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeOutput.get(idx).getWeightSize(); idx2++)
            System.out.println("Output weight" + listNodeOutput.get(idx).getWeightFromList(idx2));
    }
}

From source file:ANN.MultiplePerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\iris.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);/*  www.  j ava 2  s.  c  o m*/
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    MultiplePerceptron mlp = new MultiplePerceptron(train, 20, 0.3);
    mlp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(mlp, train);
    System.out.println(eval.toSummaryString());
    System.out.print(eval.toMatrixString());
}

From source file:ann.SingleLayerPerceptron.java

public void doPerceptron(Instances data) {
    for (int epoch = 0; epoch < annOptions.maxIteration; epoch++) {
        double deltaWeight = 0.0;
        double[] deltaWeightUpdate = new double[data.numAttributes()];
        for (int i = 0; i < data.numAttributes(); i++) {
            deltaWeightUpdate[i] = 0;/*from www  .  j  a v  a  2 s .co m*/
        }
        for (int i = 0; i < data.numInstances(); i++) {
            // do sum xi.wi (nilai data * bobot)
            for (int j = 0; j < output.size(); j++) {
                double sum = 0;
                double weight, input;
                for (int k = 0; k < data.numAttributes(); k++) {
                    if (k == data.numAttributes() - 1) { // bias
                        input = 1;
                    } else {
                        input = data.instance(i).value(k);
                    }
                    weight = output.get(j).weights.get(k);
                    sum += weight * input;
                }

                // Update input weight
                for (int k = 0; k < data.numAttributes(); k++) {
                    if (k == data.numAttributes() - 1) { // bias
                        input = 1;
                    } else {
                        input = data.instance(i).value(k);
                    }

                    // lewati fungsi aktivasi
                    double newOutput = Util.activationFunction(sum, annOptions);
                    double target;
                    if (output.size() > 1) {
                        if (data.instance(i).classValue() == j) {
                            target = 1;
                        } else {
                            target = 0;
                        }
                    } else {
                        target = data.instance(i).classValue();
                    }
                    weight = output.get(j).weights.get(k);

                    // hitung delta weight -> learning rate * (T-O) * xi
                    if (annOptions.topologyOpt == 2) // batch
                    {
                        deltaWeightUpdate[k] += (target - newOutput) * input;
                        if (i == data.numInstances() - 1) { // update weight
                            output.get(j).weights.set(k,
                                    annOptions.learningRate * (weight + deltaWeightUpdate[k]));
                        }
                    } else {
                        deltaWeight = annOptions.learningRate * (target - newOutput) * input;
                        output.get(j).weights.set(k, weight + deltaWeight);
                    }
                }
            }
        }

        // hitung error
        double errorEpoch = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            double sum = 0;
            for (int j = 0; j < output.size(); j++) {
                for (int k = 0; k < data.numAttributes(); k++) {
                    double input;
                    if (k == data.numAttributes() - 1) { // bias
                        input = 1;
                    } else {
                        input = data.instance(i).value(k);
                    }
                    double weight = output.get(j).weights.get(k);
                    sum += weight * input;
                }
                // lewati fungsi aktivasi
                sum = Util.activationFunction(sum, annOptions);
                double target;
                if (output.size() > 1) {
                    if (data.instance(i).classValue() == j) {
                        target = 1;
                    } else {
                        target = 0;
                    }
                } else {
                    target = data.instance(i).classValue();
                }
                double error = target - sum;
                errorEpoch += error * error;
            }
        }
        errorEpoch *= 0.5;
        // Convergent
        if (errorEpoch <= annOptions.threshold) {
            break;
        }
    }
}

From source file:ann.SingleLayerPerceptron.java

public int[] classifyInstances(Instances data) throws Exception {
    int[] classValue = new int[data.numInstances()];
    // remove instances with missing class
    data = new Instances(data);
    data.deleteWithMissingClass();//from   w  w w  .j  av  a 2  s. c o m

    //nominal to binary filter
    ntb.setInputFormat(data);
    data = new Instances(Filter.useFilter(data, ntb));
    int right = 0;

    for (int i = 0; i < data.numInstances(); i++) {
        int outputSize = output.size();
        double[] result = new double[outputSize];
        for (int j = 0; j < outputSize; j++) {
            result[j] = 0.0;
            for (int k = 0; k < data.numAttributes(); k++) {
                double input = 1;
                if (k < data.numAttributes() - 1) {
                    input = data.instance(i).value(k);
                }
                result[j] += output.get(j).weights.get(k) * input;
            }
            result[j] = Util.activationFunction(result[j], annOptions);
        }

        if (outputSize >= 2) {
            for (int j = 0; j < outputSize; j++) {
                if (result[j] > result[classValue[i]]) {
                    classValue[i] = j;
                }
            }
        } else {
            classValue[i] = (int) result[0];
        }
        double target = data.instance(i).classValue();
        double output = classValue[i];
        System.out.println("Intance-" + i + " target: " + target + " output: " + output);
        if (target == output) {
            right = right + 1;
        }
    }

    System.out.println("Percentage: " + ((double) right / (double) data.numInstances()));

    return classValue;
}

From source file:anndl.Anndl.java

private static void buildModel(InputStream input) throws Exception {
    ANNDLLexer lexer = new ANNDLLexer(new ANTLRInputStream(input));
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ANNDLParser parser = new ANNDLParser(tokens);
    ParseTree tree = parser.model();/*w ww . j  av a  2s  .com*/

    ModelVisitor visitor = new ModelVisitor();

    ModelClassifier themodel = (ModelClassifier) visitor.visit(tree);
    //themodel.PrintInfo();
    themodel.extracthidden();

    System.out.println("Membaca File Training...");
    DataSource trainingsoure = new DataSource(themodel.filetraining);
    Instances trainingdata = trainingsoure.getDataSet();
    if (trainingdata.classIndex() == -1) {
        trainingdata.setClassIndex(trainingdata.numAttributes() - 1);
    }

    System.out.println("Melakukan konfigurasi ANN ... ");
    MultilayerPerceptron mlp = new MultilayerPerceptron();
    mlp.setLearningRate(themodel.learningrate);
    mlp.setMomentum(themodel.momentum);
    mlp.setTrainingTime(themodel.epoch);
    mlp.setHiddenLayers(themodel.hidden);

    System.out.println("Melakukan Training data ...");
    mlp.buildClassifier(trainingdata);

    Debug.saveToFile(themodel.namamodel + ".model", mlp);

    System.out.println("\n~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ ..");
    System.out.println("Model ANN Berhasil Diciptakan dengan nama file : " + themodel.namamodel + ".model");
    System.out.println("~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. \n");

}

From source file:ANN_Single.SinglelayerPerceptron.java

@Override
public void buildClassifier(Instances i) {
    listOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listOutput.add(new Node(i.numAttributes()));
    }/*from ww w  . j a  v a2s.  com*/
    while (true) {//ulang iterasi
        //            System.out.println();
        //            System.out.println("iterasi "+itt);
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            //buat list input
            //                 System.out.print(idxInstance+" ");
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0);
            for (int idx = 0; idx < i.numAttributes() - 1; idx++) {
                listInput.add(i.get(idxInstance).value(idx));
            }

            //hitung output layer
            for (int idxOutput = 0; idxOutput < listOutput.size(); idxOutput++) {
                output(listInput, idxOutput);
                //                    listOutput.get(idxOutput).setValue(outputVal);
                //                    System.out.print(listOutput.get(idxOutput).getValue()+" ");
            }
            //                System.out.println();
            //hitung error
            calculateError(idxInstance);
            //update bobot
            updateWeight(listInput);
        }
        double error = 0;
        for (int idxErr = 0; idxErr < i.numInstances(); idxErr++) {
            for (int idx = 0; idx < listOutput.size(); idx++) {
                error += Math.pow(listOutput.get(idx).getError(), 2) / 2;
                //                    System.out.println(listOutput.get(idx).getError());
            }
            //                System.out.println(error);
        }
        System.out.println(error);
        System.out.println();
        if (error <= 0)
            break;
    }
    fold++;
    System.out.println("Fold ke-" + fold);
    double error = 0;
    for (int idxErr = 0; idxErr < i.numInstances(); idxErr++) {
        for (Node listOutput1 : listOutput) {
            error += Math.pow(listOutput1.getError(), 2) / 2;
            //                    System.out.println(listOutput1.getError());
        }
        //                System.out.println(error);
    }
    System.out.println("error " + error);
    for (int idx = 0; idx < listOutput.size(); idx++) {
        System.out.println("Output value " + listOutput.get(idx).getValue());
        System.out.println("Output error " + listOutput.get(idx).getError());
        for (int idx2 = 0; idx2 < listOutput.get(idx).getWeightSize(); idx2++)
            System.out.println("Output weight" + listOutput.get(idx).getWeightFromList(idx2));
    }
}