Example usage for weka.core Instances instance

List of usage examples for weka.core Instances instance

Introduction

In this page you can find the example usage for weka.core Instances instance.

Prototype



publicInstance instance(int index) 

Source Link

Document

Returns the instance at the given position.

Usage

From source file:ann.ANN.java

public void classify(String data_address, Classifier model) {
    try {/*from  w w  w . ja  v  a 2s. c  o m*/
        Instances test = ConverterUtils.DataSource.read(data_address);
        test.setClassIndex(test.numAttributes() - 1);
        System.out.println("====================================");
        System.out.println("=== Predictions on user test set ===");
        System.out.println("====================================");
        System.out.println("# - actual - predicted - distribution");
        for (int i = 0; i < test.numInstances(); i++) {
            double pred = model.classifyInstance(test.instance(i));
            double[] dist = model.distributionForInstance(test.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(test.instance(i).toString(test.classIndex()) + " - ");
            System.out.print(test.classAttribute().value((int) pred) + " - ");
            System.out.println(Utils.arrayToString(dist));
        }
        System.out.println("\n");
    } catch (Exception ex) {
        System.out.println("Tidak berhasil memprediksi hasil\n");
    }
}

From source file:ann.Main.java

public static void main(String[] args) {
    String trainPath = null;/*from w  w  w.  j  a v a2  s .  com*/
    String testPath = null;
    String weights = null;
    String predictPath = null;
    char activationFunction = MyANN.SIGMOID_FUNCTION, terminateCondition = MyANN.TERMINATE_MAX_ITERATION,
            learningRule = MyANN.PERCEPTRON_TRAINING_RULE, topology = MyANN.ONE_PERCEPTRON;
    double deltaMSE = 0.01;
    int maxIteration = 500;
    double learningRate = 0.3;
    double momentum = 0.2;
    int nbHidden = 0;
    int[] hiddenConf = null;
    boolean isCV = false;
    int numFolds = 10;
    boolean isEvaluate = false;

    if (args.length < 1 || args.length % 2 == 0) {

        System.out.println("Usage: ANN [-I <path>] [-t O|M] [-r P|B|D] [-h <layer>]"
                + "\n\t [-a N|G|T] [-L <rate>] [-m <momentum>] [-E D|I|B] [-d <mse>]"
                + "\n\t [-i <iteration>] [-e <path>|<n>] [-p <path>] <trainDataPath>");
        System.out.println("");
        System.out.println("-a N|G|T \t set activation function for OnePerceptron");
        System.out.println("\t\t   N=SIGN, G=SIGMOID, T=STEP");
        System.out.println("-d <mse> \t set MSE = <mse> for terminate condition");
        System.out.println("-E D|I|B \t\t set terminate condition, D=by MSE, I=by iteration");
        System.out.println("-e <path>|<n> \t set test data using <path> or cross-validation w/ folds = <n>");
        System.out.println("-h <layer> \t set hidden layer. <layer>=0 no hidden layer");
        System.out.println("\t\t   <layer>=2 => 1 hidden layer with 2 nodes");
        System.out.println("\t\t   <layer>=2,3 => 2 hidden layer with 2 nodes on first and 3 on second layer");
        System.out.println("-I <path> \t set initial weight from <path>");
        System.out.println("-i <iteration> \t set max iteration for terminate condition");
        System.out.println("-L <rate> \t set learning rate = <rate>");
        System.out.println("-m <momentum> \t set momentum = <momentum>");
        System.out.println("-p <path> \t set data to predict");
        System.out.println("-r P|B|D \t set learning rule for OnePerceptron ");
        System.out.println("\t\t   P=Perceptron training rule,B=Batch, D=DeltaRule");
        System.out.println("-t O|M \t\t set topology, O=OnePerceptron, M=MLP");
        return;
    } else {
        trainPath = args[args.length - 1];

        int i = 0;
        while (i < args.length - 1) {
            switch (args[i]) {
            case "-a":
                switch (args[i + 1]) {
                case "N":
                    activationFunction = MyANN.SIGN_FUNCTION;
                    break;
                case "G":
                    activationFunction = MyANN.SIGMOID_FUNCTION;
                    break;
                case "T":
                    activationFunction = MyANN.STEP_FUNCTION;
                    break;
                default:
                    break;
                }
                break;
            case "-d":
                deltaMSE = Double.valueOf(args[i + 1]);
                break;
            case "-E":
                switch (args[i + 1]) {
                case "D":
                    terminateCondition = MyANN.TERMINATE_MSE;
                    break;
                case "I":
                    terminateCondition = MyANN.TERMINATE_MAX_ITERATION;
                    break;
                case "B":
                    terminateCondition = MyANN.TERMINATE_BOTH;
                default:
                    break;
                }
                break;
            case "-e":
                if (args[i + 1].length() <= 2) {
                    numFolds = Integer.parseInt(args[i + 1]);
                    isCV = true;
                } else {
                    isEvaluate = true;
                    testPath = args[i + 1];
                }
                break;
            case "-h":
                String[] nbl = args[i + 1].split(",");
                if (nbl.length == 1) {
                    nbHidden = Integer.parseInt(nbl[0]);
                    if (nbHidden != 0) {
                        hiddenConf = new int[1];
                        hiddenConf[0] = nbHidden;
                        nbHidden = 1;
                    }
                } else {
                    nbHidden = nbl.length;
                    hiddenConf = new int[nbHidden];
                    for (int j = 0; j < nbHidden; j++) {
                        hiddenConf[j] = Integer.parseInt(nbl[j]);
                    }
                }
                break;
            case "-I":
                weights = args[i + 1];
                break;
            case "-i":
                maxIteration = Integer.parseInt(args[i + 1]);
                break;
            case "-L":
                learningRate = Double.parseDouble(args[i + 1]);
                break;
            case "-m":
                momentum = Double.parseDouble(args[i + 1]);
                break;
            case "-p":
                predictPath = args[i + 1];
                break;
            case "-r":
                switch (args[i + 1]) {
                case "P":
                    learningRule = MyANN.PERCEPTRON_TRAINING_RULE;
                    break;
                case "B":
                    learningRule = MyANN.BATCH_GRADIENT_DESCENT;
                    break;
                case "D":
                    learningRule = MyANN.DELTA_RULE;
                    break;
                default:
                    break;
                }
                break;
            case "-t":
                switch (args[i + 1]) {
                case "O":
                    topology = MyANN.ONE_PERCEPTRON;
                    break;
                case "M":
                    topology = MyANN.MULTILAYER_PERCEPTRON;
                    break;
                default:
                    break;
                }
                break;
            default:
                break;
            }
            i += 2;
        }
    }

    // persiapkan data
    Instances trainData = null;
    Instances testData = null;
    Instances predictData = null;
    try {
        ConverterUtils.DataSource source = new ConverterUtils.DataSource(trainPath);
        trainData = source.getDataSet();
        if (trainData.classIndex() == -1) {
            trainData.setClassIndex(trainData.numAttributes() - 1);
        }
        if (testPath != null) {
            source = new ConverterUtils.DataSource(testPath);
            testData = source.getDataSet();
            if (testData.classIndex() == -1) {
                testData.setClassIndex(testData.numAttributes() - 1);
            }
        }
        if (predictPath != null) {
            source = new ConverterUtils.DataSource(predictPath);
            predictData = source.getDataSet();
            if (predictData.classIndex() == -1) {
                predictData.setClassIndex(predictData.numAttributes() - 1);
            }
        }
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }

    // persiapkan model dan parameter
    MyANN myAnn = new MyANN();
    WeightParser wp = null;
    if (weights != null) {
        wp = new WeightParser(weights);
        myAnn.setInitialWeight(wp.weight);
    }
    myAnn.setActivationFunction(activationFunction);
    myAnn.setDeltaMSE(deltaMSE);
    myAnn.setLearningRate(learningRate);
    myAnn.setLearningRule(learningRule);
    myAnn.setMaxIteration(maxIteration);
    myAnn.setMomentum(momentum);
    myAnn.setTerminationCondition(terminateCondition);
    myAnn.setThreshold(momentum);
    myAnn.setTopology(topology);

    int[] nbLayer = new int[2];
    if (nbHidden != 0) {
        nbLayer = new int[2 + nbHidden];
        for (int j = 1; j < nbLayer.length - 1; j++) {
            nbLayer[j] = hiddenConf[j - 1];
        }
    }
    nbLayer[0] = trainData.numAttributes() - 1;
    if (trainData.classAttribute().isNominal())
        nbLayer[nbLayer.length - 1] = trainData.classAttribute().numValues();
    else
        nbLayer[nbLayer.length - 1] = 1;

    myAnn.setNbLayers(nbLayer);

    // debug: cek kondigurasi
    System.out.println("training data: " + trainPath);
    System.out.println("settings:");
    myAnn.printSetting();
    System.out.println("");

    // klasifikasi
    System.out.println("start classifiying...");
    try {
        myAnn.buildClassifier(trainData);
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    myAnn.printSummary();
    System.out.println("done");

    System.out.println("-------------------------------------------------");

    System.out.print("evaluating ");
    int[][] result = null;
    int nbData = trainData.numInstances();
    if (isCV) {
        System.out.println("using " + numFolds + "-folds cross validation");
        result = myAnn.crossValidation(trainData, numFolds, new Random(1));
    } else if (isEvaluate) {
        System.out.println("using testData: " + testPath);
        result = myAnn.evaluate(testData);
        nbData = testData.numInstances();
    } else {
        System.out.println("using trainData");
        result = myAnn.evaluate(trainData);
    }
    System.out.println("");

    System.out.println("result:");

    double accuracy = 0.0; // a+d/total
    double[] precision = new double[result.length]; // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[result[0].length]; // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])

    for (int i = 0; i < result.length; i++) {
        for (int j = 0; j < result[0].length; j++) {
            System.out.print(result[i][j] + " ");
            if (i == j) {
                accuracy += result[i][j];
            }
        }
        System.out.println("");
    }

    // precision
    for (int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[j][i];
        }
        precision[i] = result[i][i] / sum;
    }

    // recall
    for (int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[i][j];
        }
        recall[i] = result[i][i] / sum;
    }

    accuracy /= nbData;
    System.out.println("");
    System.out.println("accuracy: " + accuracy);
    System.out.println("precision: ");
    for (double p : precision) {
        System.out.println(p);
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall)
        System.out.println(r);
    System.out.println("");
    System.out.println("-------------------------------------------------");

    if (predictPath != null) {
        System.out.println("predicting: " + predictPath);
        for (int i = 0; i < predictData.numInstances(); i++) {
            try {
                int idx = myAnn.predictClassIndex(myAnn.distributionForInstance(predictData.instance(i)));
                System.out.println("instance[" + (i) + "]: " + trainData.classAttribute().value(idx));
            } catch (Exception ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        System.out.println("done");
    }
    /*
    try {
    File file = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.arff");
    File unlabel = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.unlabeled.arff");
    Instances data, test;
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(file.getPath());
    data = source.getDataSet();
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }
    source = new ConverterUtils.DataSource(unlabel.getPath());
    test = source.getDataSet();
    if (test.classIndex() == -1) {
        test.setClassIndex(data.numAttributes() - 1);
    }
            
    WeightParser wp = new WeightParser("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/khaidzir_myANN/initial.weight");
    MyANN myANN = new MyANN();
    int[] nbLayers = {4, 3, 2};
    myANN.setNbLayers(nbLayers);
    myANN.setDeltaMSE(0.001);
    //myANN.setMomentum(0.2);
    myANN.setLearningRate(0.1);
    myANN.setTopology(MyANN.MULTILAYER_PERCEPTRON);
    myANN.setLearningRule(MyANN.PERCEPTRON_TRAINING_RULE);
    myANN.setActivationFunction(MyANN.SIGMOID_FUNCTION);
    myANN.setMaxIteration(10000);
    myANN.setTerminationCondition(MyANN.TERMINATE_MAX_ITERATION);
    //myANN.setInitialWeight(wp.weight);
            
            
    myANN.buildClassifier(data);
    int[][] ev = myANN.evaluate(data);
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
    //ev = myANN.crossValidation(data, 10, new Random(1));
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
            
    /*
    myANN.buildClassifier(data);
    int[][] cm = myANN.evaluate(data);
    double accuracy = 0.0;      // a+d/total
    double[] precision = new double[cm.length];     // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[cm[0].length];        // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])
            
    for (int i = 0; i < cm.length; i++) {
        for (int j = 0; j < cm[0].length; j++) {
            System.out.print(cm[i][j] + " ");
            if (i==j) {
                accuracy += cm[i][j];
            }
        }
        System.out.println("");
    }
            
    // precision
    for(int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[j][i];
        }
        precision[i] = cm[i][i] / sum;
    }
            
    // recall
    for(int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[i][j];
        }
        recall[i] = cm[i][i] / sum;
    }
            
    accuracy /= data.numInstances();
    System.out.println("accuracy: "+accuracy);
    System.out.println("precision: ");
    for(double p : precision) {
        System.out.print(p+", ");
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall) System.out.print(r+", ");
    System.out.println("");
            
    } catch (Exception ex) {
    Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    */
}

From source file:ANN.MultilayerPerceptron.java

public MultilayerPerceptron(Instances i, double rate, int itter, int numHidden) {
    learningRate = rate;//from  ww w . ja v  a 2  s. co m
    listHidden = new ArrayList<>();

    for (int num = 0; num < numHidden + 1; num++) {
        listHidden.add(new Node(i.numAttributes()));
    }

    listOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listOutput.add(new Node(listHidden.size()));
    }
    itteration = itter;
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}

From source file:ANN.MultiplePerceptron.java

public MultiplePerceptron(Instances i, int numNode, double rate) {
    listNodeHidden = new ArrayList<>();
    for (int num = 0; num < numNode + 1; num++) {
        listNodeHidden.add(new Node(i.numAttributes()));
    }/*  ww w . j  a  va2s  .  c  o m*/

    listNodeOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listNodeOutput.add(new Node(listNodeHidden.size()));
    }

    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
    learningRate = rate;
}

From source file:ANN.MultiplePerceptron.java

@Override
public void buildClassifier(Instances i) {
    //        System.out.println(listNodeHidden.get(0).getWeightSize()+" "+listNodeOutput.get(0).getWeightSize());
    for (int itt = 0; itt < 5000; itt++) {
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0);/*from w  w  w.  j a  v a2  s .  com*/
            for (int idxInstanceVal = 0; idxInstanceVal < i.numAttributes() - 1; idxInstanceVal++) {
                listInput.add(i.get(idxInstance).value(idxInstanceVal));
            }

            ArrayList<Double> listOutputHidden = new ArrayList<>();
            listOutputHidden.add(1.0);

            //set output hidden layer
            //                System.out.println("Hidden layer\n");
            for (int idxNodeHidden = 1; idxNodeHidden < listNodeHidden.size(); idxNodeHidden++) {
                double outputVal = listNodeHidden.get(idxNodeHidden).output(listInput);
                listNodeHidden.get(idxNodeHidden).setValue(outputVal);
                listOutputHidden.add(outputVal);
                //                    System.out.println(outputVal);
            }

            //                System.out.println("Output layer\n");
            //set output layer
            for (int idxNodeHidden = 0; idxNodeHidden < listNodeOutput.size(); idxNodeHidden++) {
                double outputVal = listNodeOutput.get(idxNodeHidden).output(listOutputHidden);
                listNodeOutput.get(idxNodeHidden).setValue(outputVal);
                //                    System.out.println(outputVal);
            }

            //calculate error (back propagation)
            calculateError(idxInstance);
            //re-calculate weight
            calculateWeight(i.instance(idxInstance));
        }
    }
    for (int idx = 0; idx < listNodeHidden.size(); idx++) {
        System.out.println("Hidden value " + listNodeHidden.get(idx).getValue());
        System.out.println("Hidden error " + listNodeHidden.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeHidden.get(idx).getWeightSize(); idx2++)
            System.out.println("Hidden weight" + listNodeHidden.get(idx).getWeightFromList(idx2));
    }
    System.out.println();
    for (int idx = 0; idx < listNodeOutput.size(); idx++) {
        System.out.println("Output value " + listNodeOutput.get(idx).getValue());
        System.out.println("Output error " + listNodeOutput.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeOutput.get(idx).getWeightSize(); idx2++)
            System.out.println("Output weight" + listNodeOutput.get(idx).getWeightFromList(idx2));
    }
}

From source file:ann.MyANN.java

/**
 * mengevaluasi model dengan testSet dan mengembalikan Confusion Matrix
 * buildClassifier harus dipanggil terlebih dahulu
 * @param testSet testSet untuk menguji model
 * @return confusion Matrix, nominal = matrix persegi berukuran NxN dengan
 * N adalah jumlah kelas. numerik = matrix 1x2 dengan elemen pertama adalah 
 * jumlah prediksi yang benar dan elemen kedua adalah jumlah prediksi yang salah
 *//* w  w  w . j  a va 2s  .  co m*/
public int[][] evaluate(Instances testSet) {
    int[][] confusionMatrix;
    if (testSet.classAttribute().isNominal()) {
        confusionMatrix = new int[testSet.classAttribute().numValues()][testSet.classAttribute().numValues()];
    } else {
        confusionMatrix = new int[1][2];
    }
    // debug
    for (int i = 0; i < testSet.numInstances(); i++) {
        //            System.out.println("cv: "+testSet.instance(i).classValue());
    }

    for (int i = 0; i < testSet.numInstances(); i++) {
        try {
            double[] prob = distributionForInstance(testSet.instance(i));
            //                System.out.println("probl:"+prob.length);
            //                System.out.println("i: "+testSet.instance(i));
            if (testSet.classAttribute().isNominal()) {
                int idx = predictClassIndex(prob);
                confusionMatrix[(int) testSet.instance(i).classValue()][idx]++;
            } else {
                if (Math.abs(prob[0] - testSet.instance(i).classValue()) <= 0.001)
                    confusionMatrix[0][0]++;
                else
                    confusionMatrix[0][1]++;
            }
        } catch (Exception ex) {
            Logger.getLogger(MyANN.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return confusionMatrix;
}

From source file:ann.MyANN.java

/**
 * mengubah instances ke dalam array of data dan disimpan ke variabel datas
 * @param instances input yang akan diubah ke dalam array of data
 *//*  ww  w .j  a v a  2s  .c o m*/
private void instancesToDatas(Instances instances) {
    datas = new ArrayList<>();

    for (int i = 0; i < instances.numInstances(); i++) {
        datas.add(instanceToData(instances.instance(i)));
    }
}

From source file:ann.SingleLayerPerceptron.java

public void doPerceptron(Instances data) {
    for (int epoch = 0; epoch < annOptions.maxIteration; epoch++) {
        double deltaWeight = 0.0;
        double[] deltaWeightUpdate = new double[data.numAttributes()];
        for (int i = 0; i < data.numAttributes(); i++) {
            deltaWeightUpdate[i] = 0;/*from ww  w. j a  v  a  2s . co  m*/
        }
        for (int i = 0; i < data.numInstances(); i++) {
            // do sum xi.wi (nilai data * bobot)
            for (int j = 0; j < output.size(); j++) {
                double sum = 0;
                double weight, input;
                for (int k = 0; k < data.numAttributes(); k++) {
                    if (k == data.numAttributes() - 1) { // bias
                        input = 1;
                    } else {
                        input = data.instance(i).value(k);
                    }
                    weight = output.get(j).weights.get(k);
                    sum += weight * input;
                }

                // Update input weight
                for (int k = 0; k < data.numAttributes(); k++) {
                    if (k == data.numAttributes() - 1) { // bias
                        input = 1;
                    } else {
                        input = data.instance(i).value(k);
                    }

                    // lewati fungsi aktivasi
                    double newOutput = Util.activationFunction(sum, annOptions);
                    double target;
                    if (output.size() > 1) {
                        if (data.instance(i).classValue() == j) {
                            target = 1;
                        } else {
                            target = 0;
                        }
                    } else {
                        target = data.instance(i).classValue();
                    }
                    weight = output.get(j).weights.get(k);

                    // hitung delta weight -> learning rate * (T-O) * xi
                    if (annOptions.topologyOpt == 2) // batch
                    {
                        deltaWeightUpdate[k] += (target - newOutput) * input;
                        if (i == data.numInstances() - 1) { // update weight
                            output.get(j).weights.set(k,
                                    annOptions.learningRate * (weight + deltaWeightUpdate[k]));
                        }
                    } else {
                        deltaWeight = annOptions.learningRate * (target - newOutput) * input;
                        output.get(j).weights.set(k, weight + deltaWeight);
                    }
                }
            }
        }

        // hitung error
        double errorEpoch = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            double sum = 0;
            for (int j = 0; j < output.size(); j++) {
                for (int k = 0; k < data.numAttributes(); k++) {
                    double input;
                    if (k == data.numAttributes() - 1) { // bias
                        input = 1;
                    } else {
                        input = data.instance(i).value(k);
                    }
                    double weight = output.get(j).weights.get(k);
                    sum += weight * input;
                }
                // lewati fungsi aktivasi
                sum = Util.activationFunction(sum, annOptions);
                double target;
                if (output.size() > 1) {
                    if (data.instance(i).classValue() == j) {
                        target = 1;
                    } else {
                        target = 0;
                    }
                } else {
                    target = data.instance(i).classValue();
                }
                double error = target - sum;
                errorEpoch += error * error;
            }
        }
        errorEpoch *= 0.5;
        // Convergent
        if (errorEpoch <= annOptions.threshold) {
            break;
        }
    }
}

From source file:ann.SingleLayerPerceptron.java

public int[] classifyInstances(Instances data) throws Exception {
    int[] classValue = new int[data.numInstances()];
    // remove instances with missing class
    data = new Instances(data);
    data.deleteWithMissingClass();/*from  w ww.  j a v  a2s .c om*/

    //nominal to binary filter
    ntb.setInputFormat(data);
    data = new Instances(Filter.useFilter(data, ntb));
    int right = 0;

    for (int i = 0; i < data.numInstances(); i++) {
        int outputSize = output.size();
        double[] result = new double[outputSize];
        for (int j = 0; j < outputSize; j++) {
            result[j] = 0.0;
            for (int k = 0; k < data.numAttributes(); k++) {
                double input = 1;
                if (k < data.numAttributes() - 1) {
                    input = data.instance(i).value(k);
                }
                result[j] += output.get(j).weights.get(k) * input;
            }
            result[j] = Util.activationFunction(result[j], annOptions);
        }

        if (outputSize >= 2) {
            for (int j = 0; j < outputSize; j++) {
                if (result[j] > result[classValue[i]]) {
                    classValue[i] = j;
                }
            }
        } else {
            classValue[i] = (int) result[0];
        }
        double target = data.instance(i).classValue();
        double output = classValue[i];
        System.out.println("Intance-" + i + " target: " + target + " output: " + output);
        if (target == output) {
            right = right + 1;
        }
    }

    System.out.println("Percentage: " + ((double) right / (double) data.numInstances()));

    return classValue;
}

From source file:ANN_Single.SinglelayerPerceptron.java

public SinglelayerPerceptron(Instances i, double rate, int itter) {
    learningRate = rate;/*  w  w w  .j a  va 2  s  .c  o m*/
    //        listOutput = new ArrayList<>();
    //        for (int num =0; num<i.numClasses(); num++) {
    //            listOutput.add(new Node(i.numAttributes()));
    //        }
    itteration = itter;
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}