Example usage for weka.core Instances classIndex

List of usage examples for weka.core Instances classIndex

Introduction

In this page you can find the example usage for weka.core Instances classIndex.

Prototype


publicint classIndex() 

Source Link

Document

Returns the class attribute's index.

Usage

From source file:adams.opt.cso.Measure.java

License:Open Source License

/**
 * Checks whether the data can be used with this measure.
 *
 * @param data   the data to check// ww  w . j  a  v  a  2  s. com
 * @return      true if the measure can be obtain for this kind of data
 */
public boolean isValid(Instances data) {
    if (data.classIndex() == -1)
        throw new UnassignedClassException("No class attribute set!");

    if (data.classAttribute().isNominal())
        return m_Nominal;
    else if (data.classAttribute().isNumeric())
        return m_Numeric;
    else
        throw new IllegalStateException("Class attribute '" + data.classAttribute().type() + "' not handled!");
}

From source file:ai.GiniFunction.java

License:GNU General Public License

/**
 * Create split function based on Gini coefficient
 * //from  ww  w .ja  va  2 s  . c  o  m
 * @param data original data
 * @param indices indices of the samples to use
 */
public void init(Instances data, ArrayList<Integer> indices) {
    if (indices.size() == 0) {
        this.index = 0;
        this.threshold = 0;
        this.allSame = true;
        return;
    }

    final int len = data.numAttributes();
    final int numElements = indices.size();
    final int numClasses = data.numClasses();
    final int classIndex = data.classIndex();

    /** Attribute-class pair comparator (by attribute value) */
    final Comparator<AttributeClassPair> comp = new Comparator<AttributeClassPair>() {
        public int compare(AttributeClassPair o1, AttributeClassPair o2) {
            final double diff = o2.attributeValue - o1.attributeValue;
            if (diff < 0)
                return 1;
            else if (diff == 0)
                return 0;
            else
                return -1;
        }

        public boolean equals(Object o) {
            return false;
        }
    };

    // Create and shuffle indices of features to use
    ArrayList<Integer> allIndices = new ArrayList<Integer>();
    for (int i = 0; i < len; i++)
        if (i != classIndex)
            allIndices.add(i);

    double minimumGini = Double.MAX_VALUE;

    for (int i = 0; i < numOfFeatures; i++) {
        // Select the random feature
        final int index = random.nextInt(allIndices.size());
        final int featureToUse = allIndices.get(index);
        allIndices.remove(index); // remove that element to prevent from repetitions

        // Get the smallest Gini coefficient

        // Create list with pairs attribute-class
        final ArrayList<AttributeClassPair> list = new ArrayList<AttributeClassPair>();
        for (int j = 0; j < numElements; j++) {
            final Instance ins = data.get(indices.get(j));
            list.add(new AttributeClassPair(ins.value(featureToUse), (int) ins.value(classIndex)));
        }

        // Sort pairs in increasing order
        Collections.sort(list, comp);

        final double[] probLeft = new double[numClasses];
        final double[] probRight = new double[numClasses];
        // initial probabilities (all samples on the right)
        for (int n = 0; n < list.size(); n++)
            probRight[list.get(n).classValue]++;

        // Try all splitting points, from position 0 to the end
        for (int splitPoint = 0; splitPoint < numElements; splitPoint++) {
            // Calculate Gini coefficient
            double giniLeft = 0;
            double giniRight = 0;
            final int rightNumElements = numElements - splitPoint;

            for (int nClass = 0; nClass < numClasses; nClass++) {
                // left set
                double prob = probLeft[nClass];
                // Divide by the number of elements to get probabilities
                if (splitPoint != 0)
                    prob /= (double) splitPoint;
                giniLeft += prob * prob;

                // right set
                prob = probRight[nClass];
                // Divide by the number of elements to get probabilities
                if (rightNumElements != 0)
                    prob /= (double) rightNumElements;
                giniRight += prob * prob;
            }

            // Total Gini value
            final double gini = ((1.0 - giniLeft) * splitPoint + (1.0 - giniRight) * rightNumElements)
                    / (double) numElements;

            // Save values of minimum Gini coefficient
            if (gini < minimumGini) {
                minimumGini = gini;
                this.index = featureToUse;
                this.threshold = list.get(splitPoint).attributeValue;
            }

            // update probabilities for next iteration
            probLeft[list.get(splitPoint).classValue]++;
            probRight[list.get(splitPoint).classValue]--;
        }
    }

    // free list of possible indices to help garbage collector
    //allIndices.clear();
    //allIndices = null;
}

From source file:ann.ANN.java

public void classify(String data_address, Classifier model) {
    try {//from   ww w  . j a v a  2  s.co m
        Instances test = ConverterUtils.DataSource.read(data_address);
        test.setClassIndex(test.numAttributes() - 1);
        System.out.println("====================================");
        System.out.println("=== Predictions on user test set ===");
        System.out.println("====================================");
        System.out.println("# - actual - predicted - distribution");
        for (int i = 0; i < test.numInstances(); i++) {
            double pred = model.classifyInstance(test.instance(i));
            double[] dist = model.distributionForInstance(test.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(test.instance(i).toString(test.classIndex()) + " - ");
            System.out.print(test.classAttribute().value((int) pred) + " - ");
            System.out.println(Utils.arrayToString(dist));
        }
        System.out.println("\n");
    } catch (Exception ex) {
        System.out.println("Tidak berhasil memprediksi hasil\n");
    }
}

From source file:ann.Main.java

public static void main(String[] args) {
    String trainPath = null;//from  ww  w.  j a  va2 s . c  o m
    String testPath = null;
    String weights = null;
    String predictPath = null;
    char activationFunction = MyANN.SIGMOID_FUNCTION, terminateCondition = MyANN.TERMINATE_MAX_ITERATION,
            learningRule = MyANN.PERCEPTRON_TRAINING_RULE, topology = MyANN.ONE_PERCEPTRON;
    double deltaMSE = 0.01;
    int maxIteration = 500;
    double learningRate = 0.3;
    double momentum = 0.2;
    int nbHidden = 0;
    int[] hiddenConf = null;
    boolean isCV = false;
    int numFolds = 10;
    boolean isEvaluate = false;

    if (args.length < 1 || args.length % 2 == 0) {

        System.out.println("Usage: ANN [-I <path>] [-t O|M] [-r P|B|D] [-h <layer>]"
                + "\n\t [-a N|G|T] [-L <rate>] [-m <momentum>] [-E D|I|B] [-d <mse>]"
                + "\n\t [-i <iteration>] [-e <path>|<n>] [-p <path>] <trainDataPath>");
        System.out.println("");
        System.out.println("-a N|G|T \t set activation function for OnePerceptron");
        System.out.println("\t\t   N=SIGN, G=SIGMOID, T=STEP");
        System.out.println("-d <mse> \t set MSE = <mse> for terminate condition");
        System.out.println("-E D|I|B \t\t set terminate condition, D=by MSE, I=by iteration");
        System.out.println("-e <path>|<n> \t set test data using <path> or cross-validation w/ folds = <n>");
        System.out.println("-h <layer> \t set hidden layer. <layer>=0 no hidden layer");
        System.out.println("\t\t   <layer>=2 => 1 hidden layer with 2 nodes");
        System.out.println("\t\t   <layer>=2,3 => 2 hidden layer with 2 nodes on first and 3 on second layer");
        System.out.println("-I <path> \t set initial weight from <path>");
        System.out.println("-i <iteration> \t set max iteration for terminate condition");
        System.out.println("-L <rate> \t set learning rate = <rate>");
        System.out.println("-m <momentum> \t set momentum = <momentum>");
        System.out.println("-p <path> \t set data to predict");
        System.out.println("-r P|B|D \t set learning rule for OnePerceptron ");
        System.out.println("\t\t   P=Perceptron training rule,B=Batch, D=DeltaRule");
        System.out.println("-t O|M \t\t set topology, O=OnePerceptron, M=MLP");
        return;
    } else {
        trainPath = args[args.length - 1];

        int i = 0;
        while (i < args.length - 1) {
            switch (args[i]) {
            case "-a":
                switch (args[i + 1]) {
                case "N":
                    activationFunction = MyANN.SIGN_FUNCTION;
                    break;
                case "G":
                    activationFunction = MyANN.SIGMOID_FUNCTION;
                    break;
                case "T":
                    activationFunction = MyANN.STEP_FUNCTION;
                    break;
                default:
                    break;
                }
                break;
            case "-d":
                deltaMSE = Double.valueOf(args[i + 1]);
                break;
            case "-E":
                switch (args[i + 1]) {
                case "D":
                    terminateCondition = MyANN.TERMINATE_MSE;
                    break;
                case "I":
                    terminateCondition = MyANN.TERMINATE_MAX_ITERATION;
                    break;
                case "B":
                    terminateCondition = MyANN.TERMINATE_BOTH;
                default:
                    break;
                }
                break;
            case "-e":
                if (args[i + 1].length() <= 2) {
                    numFolds = Integer.parseInt(args[i + 1]);
                    isCV = true;
                } else {
                    isEvaluate = true;
                    testPath = args[i + 1];
                }
                break;
            case "-h":
                String[] nbl = args[i + 1].split(",");
                if (nbl.length == 1) {
                    nbHidden = Integer.parseInt(nbl[0]);
                    if (nbHidden != 0) {
                        hiddenConf = new int[1];
                        hiddenConf[0] = nbHidden;
                        nbHidden = 1;
                    }
                } else {
                    nbHidden = nbl.length;
                    hiddenConf = new int[nbHidden];
                    for (int j = 0; j < nbHidden; j++) {
                        hiddenConf[j] = Integer.parseInt(nbl[j]);
                    }
                }
                break;
            case "-I":
                weights = args[i + 1];
                break;
            case "-i":
                maxIteration = Integer.parseInt(args[i + 1]);
                break;
            case "-L":
                learningRate = Double.parseDouble(args[i + 1]);
                break;
            case "-m":
                momentum = Double.parseDouble(args[i + 1]);
                break;
            case "-p":
                predictPath = args[i + 1];
                break;
            case "-r":
                switch (args[i + 1]) {
                case "P":
                    learningRule = MyANN.PERCEPTRON_TRAINING_RULE;
                    break;
                case "B":
                    learningRule = MyANN.BATCH_GRADIENT_DESCENT;
                    break;
                case "D":
                    learningRule = MyANN.DELTA_RULE;
                    break;
                default:
                    break;
                }
                break;
            case "-t":
                switch (args[i + 1]) {
                case "O":
                    topology = MyANN.ONE_PERCEPTRON;
                    break;
                case "M":
                    topology = MyANN.MULTILAYER_PERCEPTRON;
                    break;
                default:
                    break;
                }
                break;
            default:
                break;
            }
            i += 2;
        }
    }

    // persiapkan data
    Instances trainData = null;
    Instances testData = null;
    Instances predictData = null;
    try {
        ConverterUtils.DataSource source = new ConverterUtils.DataSource(trainPath);
        trainData = source.getDataSet();
        if (trainData.classIndex() == -1) {
            trainData.setClassIndex(trainData.numAttributes() - 1);
        }
        if (testPath != null) {
            source = new ConverterUtils.DataSource(testPath);
            testData = source.getDataSet();
            if (testData.classIndex() == -1) {
                testData.setClassIndex(testData.numAttributes() - 1);
            }
        }
        if (predictPath != null) {
            source = new ConverterUtils.DataSource(predictPath);
            predictData = source.getDataSet();
            if (predictData.classIndex() == -1) {
                predictData.setClassIndex(predictData.numAttributes() - 1);
            }
        }
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }

    // persiapkan model dan parameter
    MyANN myAnn = new MyANN();
    WeightParser wp = null;
    if (weights != null) {
        wp = new WeightParser(weights);
        myAnn.setInitialWeight(wp.weight);
    }
    myAnn.setActivationFunction(activationFunction);
    myAnn.setDeltaMSE(deltaMSE);
    myAnn.setLearningRate(learningRate);
    myAnn.setLearningRule(learningRule);
    myAnn.setMaxIteration(maxIteration);
    myAnn.setMomentum(momentum);
    myAnn.setTerminationCondition(terminateCondition);
    myAnn.setThreshold(momentum);
    myAnn.setTopology(topology);

    int[] nbLayer = new int[2];
    if (nbHidden != 0) {
        nbLayer = new int[2 + nbHidden];
        for (int j = 1; j < nbLayer.length - 1; j++) {
            nbLayer[j] = hiddenConf[j - 1];
        }
    }
    nbLayer[0] = trainData.numAttributes() - 1;
    if (trainData.classAttribute().isNominal())
        nbLayer[nbLayer.length - 1] = trainData.classAttribute().numValues();
    else
        nbLayer[nbLayer.length - 1] = 1;

    myAnn.setNbLayers(nbLayer);

    // debug: cek kondigurasi
    System.out.println("training data: " + trainPath);
    System.out.println("settings:");
    myAnn.printSetting();
    System.out.println("");

    // klasifikasi
    System.out.println("start classifiying...");
    try {
        myAnn.buildClassifier(trainData);
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    myAnn.printSummary();
    System.out.println("done");

    System.out.println("-------------------------------------------------");

    System.out.print("evaluating ");
    int[][] result = null;
    int nbData = trainData.numInstances();
    if (isCV) {
        System.out.println("using " + numFolds + "-folds cross validation");
        result = myAnn.crossValidation(trainData, numFolds, new Random(1));
    } else if (isEvaluate) {
        System.out.println("using testData: " + testPath);
        result = myAnn.evaluate(testData);
        nbData = testData.numInstances();
    } else {
        System.out.println("using trainData");
        result = myAnn.evaluate(trainData);
    }
    System.out.println("");

    System.out.println("result:");

    double accuracy = 0.0; // a+d/total
    double[] precision = new double[result.length]; // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[result[0].length]; // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])

    for (int i = 0; i < result.length; i++) {
        for (int j = 0; j < result[0].length; j++) {
            System.out.print(result[i][j] + " ");
            if (i == j) {
                accuracy += result[i][j];
            }
        }
        System.out.println("");
    }

    // precision
    for (int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[j][i];
        }
        precision[i] = result[i][i] / sum;
    }

    // recall
    for (int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[i][j];
        }
        recall[i] = result[i][i] / sum;
    }

    accuracy /= nbData;
    System.out.println("");
    System.out.println("accuracy: " + accuracy);
    System.out.println("precision: ");
    for (double p : precision) {
        System.out.println(p);
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall)
        System.out.println(r);
    System.out.println("");
    System.out.println("-------------------------------------------------");

    if (predictPath != null) {
        System.out.println("predicting: " + predictPath);
        for (int i = 0; i < predictData.numInstances(); i++) {
            try {
                int idx = myAnn.predictClassIndex(myAnn.distributionForInstance(predictData.instance(i)));
                System.out.println("instance[" + (i) + "]: " + trainData.classAttribute().value(idx));
            } catch (Exception ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        System.out.println("done");
    }
    /*
    try {
    File file = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.arff");
    File unlabel = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.unlabeled.arff");
    Instances data, test;
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(file.getPath());
    data = source.getDataSet();
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }
    source = new ConverterUtils.DataSource(unlabel.getPath());
    test = source.getDataSet();
    if (test.classIndex() == -1) {
        test.setClassIndex(data.numAttributes() - 1);
    }
            
    WeightParser wp = new WeightParser("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/khaidzir_myANN/initial.weight");
    MyANN myANN = new MyANN();
    int[] nbLayers = {4, 3, 2};
    myANN.setNbLayers(nbLayers);
    myANN.setDeltaMSE(0.001);
    //myANN.setMomentum(0.2);
    myANN.setLearningRate(0.1);
    myANN.setTopology(MyANN.MULTILAYER_PERCEPTRON);
    myANN.setLearningRule(MyANN.PERCEPTRON_TRAINING_RULE);
    myANN.setActivationFunction(MyANN.SIGMOID_FUNCTION);
    myANN.setMaxIteration(10000);
    myANN.setTerminationCondition(MyANN.TERMINATE_MAX_ITERATION);
    //myANN.setInitialWeight(wp.weight);
            
            
    myANN.buildClassifier(data);
    int[][] ev = myANN.evaluate(data);
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
    //ev = myANN.crossValidation(data, 10, new Random(1));
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
            
    /*
    myANN.buildClassifier(data);
    int[][] cm = myANN.evaluate(data);
    double accuracy = 0.0;      // a+d/total
    double[] precision = new double[cm.length];     // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[cm[0].length];        // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])
            
    for (int i = 0; i < cm.length; i++) {
        for (int j = 0; j < cm[0].length; j++) {
            System.out.print(cm[i][j] + " ");
            if (i==j) {
                accuracy += cm[i][j];
            }
        }
        System.out.println("");
    }
            
    // precision
    for(int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[j][i];
        }
        precision[i] = cm[i][i] / sum;
    }
            
    // recall
    for(int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[i][j];
        }
        recall[i] = cm[i][i] / sum;
    }
            
    accuracy /= data.numInstances();
    System.out.println("accuracy: "+accuracy);
    System.out.println("precision: ");
    for(double p : precision) {
        System.out.print(p+", ");
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall) System.out.print(r+", ");
    System.out.println("");
            
    } catch (Exception ex) {
    Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    */
}

From source file:ANN.MultilayerPerceptron.java

public MultilayerPerceptron(Instances i, double rate, int itter, int numHidden) {
    learningRate = rate;/*from w  ww.  jav a2 s  .c  om*/
    listHidden = new ArrayList<>();

    for (int num = 0; num < numHidden + 1; num++) {
        listHidden.add(new Node(i.numAttributes()));
    }

    listOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listOutput.add(new Node(listHidden.size()));
    }
    itteration = itter;
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}

From source file:ANN.MultiplePerceptron.java

public MultiplePerceptron(Instances i, int numNode, double rate) {
    listNodeHidden = new ArrayList<>();
    for (int num = 0; num < numNode + 1; num++) {
        listNodeHidden.add(new Node(i.numAttributes()));
    }/*  w  w w . j av  a2s .c  om*/

    listNodeOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listNodeOutput.add(new Node(listNodeHidden.size()));
    }

    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
    learningRate = rate;
}

From source file:anndl.Anndl.java

private static void buildModel(InputStream input) throws Exception {
    ANNDLLexer lexer = new ANNDLLexer(new ANTLRInputStream(input));
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ANNDLParser parser = new ANNDLParser(tokens);
    ParseTree tree = parser.model();/*www  . j a  v  a  2  s.co  m*/

    ModelVisitor visitor = new ModelVisitor();

    ModelClassifier themodel = (ModelClassifier) visitor.visit(tree);
    //themodel.PrintInfo();
    themodel.extracthidden();

    System.out.println("Membaca File Training...");
    DataSource trainingsoure = new DataSource(themodel.filetraining);
    Instances trainingdata = trainingsoure.getDataSet();
    if (trainingdata.classIndex() == -1) {
        trainingdata.setClassIndex(trainingdata.numAttributes() - 1);
    }

    System.out.println("Melakukan konfigurasi ANN ... ");
    MultilayerPerceptron mlp = new MultilayerPerceptron();
    mlp.setLearningRate(themodel.learningrate);
    mlp.setMomentum(themodel.momentum);
    mlp.setTrainingTime(themodel.epoch);
    mlp.setHiddenLayers(themodel.hidden);

    System.out.println("Melakukan Training data ...");
    mlp.buildClassifier(trainingdata);

    Debug.saveToFile(themodel.namamodel + ".model", mlp);

    System.out.println("\n~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ ..");
    System.out.println("Model ANN Berhasil Diciptakan dengan nama file : " + themodel.namamodel + ".model");
    System.out.println("~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. \n");

}

From source file:ANN_Single.SinglelayerPerceptron.java

public SinglelayerPerceptron(Instances i, double rate, int itter) {
    learningRate = rate;/*from  www . j a  v  a  2s. co m*/
    //        listOutput = new ArrayList<>();
    //        for (int num =0; num<i.numClasses(); num++) {
    //            listOutput.add(new Node(i.numAttributes()));
    //        }
    itteration = itter;
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}

From source file:ANN_single2.MultilayerPerceptron.java

@Override
public void buildClassifier(Instances i) {

    //mengubah class menjadi numeric (diambil indexnya)
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }/* w w w. j a  va2 s  .  c o  m*/
    int cnt = 0;
    for (int itt = 0; itt < 10000; itt++) {
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            //buat list input
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0); //ini untuk bias
            for (int ins = 0; ins < i.get(idxInstance).numAttributes() - 1; ins++) {
                listInput.add(i.get(idxInstance).value(ins));
            }

            ArrayList<Double> listHide = new ArrayList<>();
            listHide.add(1.0);
            //Hitung output hidden layer
            for (int idxHidden = 1; idxHidden < listHidden.size(); idxHidden++) {
                output(listHidden, listInput, idxHidden);
                listHide.add(listHidden.get(idxHidden).getValue());
            }

            //Hitung ouput output lyer
            for (int idxOutput = 0; idxOutput < listOutput.size(); idxOutput++) {
                output(listOutput, listHide, idxOutput);
            }

            //Hitung error
            calculateError(idxInstance);
            //update bobot
            updateBobot(listInput);
        }
        //Hitung seluruh error untuk menentukan kapan harus berhenti
        //            double error = 0;
        //            for (int idx =0; idx < i.numInstances(); idx++) {
        //                for (int idxOut=0; idxOut < listOutput.size(); idxOut++) {
        //                    error += Math.pow(listOutput.get(idxOut).getError(), 2)/2;
        //                }
        //            }
        //            cnt++;
        //            if (cnt==1000) {
        //                System.out.println("error " + error);
        //                cnt=0;
        //            }
        //            if (error <= threshold) break;
    }
    double error = 0;
    fold++;
    for (int idx = 0; idx < i.numInstances(); idx++) {
        for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) {
            error += Math.pow(listOutput.get(idxOut).getError(), 2) / 2;
        }
    }
    System.out.println("Fold " + fold);
    System.out.println("error " + error);

}

From source file:ANN_single2.SinglelayerPerceptron.java

@Override
public void buildClassifier(Instances i) {
    listOutput = new ArrayList<>();
    for (int idx = 0; idx < i.numClasses(); idx++) {
        listOutput.add(new Node(i.numAttributes()));
    }//from w  w  w  .  j a v  a  2  s  .  c o  m

    //mengubah class menjadi numeric (diambil indexnya)
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }

    double error = 0;
    for (int iter = 0; iter < itteration; iter++) {
        double errorThres = 0;
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {

            //buat list input
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0); //ini bias
            for (int idx = 0; idx < i.numAttributes() - 1; idx++) {
                listInput.add(i.get(idxInstance).value(idx));
            }

            //Hitung output rumus = sigmoid dari sigma
            for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) {
                output(listInput, idxOut);
            }

            //Hitung error
            calculateError(idxInstance);
            //update bobot
            updateBobot(listInput);

        }
        for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) {
            errorThres += Math.pow(listOutput.get(idxOut).getError(), 2) / 2;
        }
        if (errorThres <= threshold)
            break;
        //            System.out.println(errorThres);
    }
    //        fold++;
    //        for (int idx =0; idx < i.numInstances(); idx++) {
    //            for (int idxOut=0; idxOut < listOutput.size(); idxOut++) {
    //                error += Math.pow(listOutput.get(idxOut).getError(), 2)/2;
    //            }
    //        }
    //        System.out.println("Fold " + fold);
    //        System.out.println("error " + error);
}