Example usage for weka.core Instances numAttributes

List of usage examples for weka.core Instances numAttributes

Introduction

In this page you can find the example usage for weka.core Instances numAttributes.

Prototype


publicint numAttributes() 

Source Link

Document

Returns the number of attributes.

Usage

From source file:algoritmogeneticocluster.Cromossomo.java

private void classifica() {
    //SMO classifier = new SMO();
    //HyperPipes classifier = new HyperPipes();
    IBk classifier = new IBk(5);
    BufferedReader datafile = readDataFile(inId + ".arff");

    Instances data;
    Evaluation eval;/* www . j a va 2  s  .c o  m*/
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // usando semente = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        //this.fitness = eval.pctCorrect();
        //fitness = new BigDecimal(fitness).setScale(2, RoundingMode.HALF_UP).doubleValue();//arredondamento para duas casas
        pctAcerto = eval.pctCorrect();
        pctAcerto = new BigDecimal(pctAcerto).setScale(2, RoundingMode.HALF_UP).doubleValue();
        microAverage = getMicroAverage(eval, data);
        microAverage = new BigDecimal(microAverage).setScale(2, RoundingMode.HALF_UP).doubleValue();
        macroAverage = getMacroAverage(eval, data);
        macroAverage = new BigDecimal(macroAverage).setScale(2, RoundingMode.HALF_UP).doubleValue();

    } catch (Exception ex) {
        System.out.println("Erro ao tentar fazer a classificacao");
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

    switch (metodoFitness) {
    case 1:
        fitness = pctAcerto;
        break;
    case 2:
        fitness = microAverage;
        break;
    case 3:
        fitness = macroAverage;
        break;
    default:
        break;
    }

}

From source file:algoritmogeneticocluster.NewClass.java

public static void main(String[] args) throws Exception {
    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data = new Instances(datafile);
    data.setClassIndex(data.numAttributes() - 1);

    // Do 10-split cross validation
    Instances[][] split = crossValidationSplit(data, 10);

    // Separate split into training and testing arrays
    Instances[] trainingSplits = split[0];
    Instances[] testingSplits = split[1];

    // Use a set of classifiers
    Classifier[] models = { new SMO(), new J48(), // a decision tree
            new PART(), new DecisionTable(), //decision table majority classifier
            new DecisionStump() //one-level decision tree

    };//from   ww  w.  ja  v  a  2s.  co  m

    // Run for each model
    for (int j = 0; j < models.length; j++) {

        // Collect every group of predictions for current model in a FastVector
        FastVector predictions = new FastVector();

        // For each training-testing split pair, train and test the classifier
        for (int i = 0; i < trainingSplits.length; i++) {
            Evaluation validation = classify(models[j], trainingSplits[i], testingSplits[i]);

            predictions.appendElements(validation.predictions());

            // Uncomment to see the summary for each training-testing pair.
            //System.out.println(models[j].toString());
        }

        // Calculate overall accuracy of current classifier on all splits
        double accuracy = calculateAccuracy(predictions);

        // Print current classifier's name and accuracy in a complicated,
        // but nice-looking way.
        System.out.println("Accuracy of " + models[j].getClass().getSimpleName() + ": "
                + String.format("%.2f%%", accuracy) + "\n---------------------------------");
    }

}

From source file:algoritmogeneticocluster.WekaSimulation.java

/**
 * @param args the command line arguments
 *//*  w w  w  .  j  ava2 s.  c  o  m*/
public static void main(String[] args) {
    SMO classifier = new SMO();
    HyperPipes hy = new HyperPipes();
    //        classifier.buildClassifier(trainset);

    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data;
    Evaluation eval;
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // using seed = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        System.out.println(eval.toString());
        System.out.println(eval.numInstances());
        System.out.println(eval.correct());
        System.out.println(eval.incorrect());
        System.out.println(eval.pctCorrect());
        System.out.println(eval.pctIncorrect());

    } catch (Exception ex) {
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:analysis.SilhouetteIndex.java

public double calculateIndex(SimpleKMeans sk, Instances inst, int c) throws Exception {
    //Map<Integer, Instances> clustermap = sk.clusterInstance;
    sk.setNumClusters(c);/*w w  w  . j a  v a 2s  .co m*/
    sk.buildClusterer(inst);
    EuclideanDistance ed = new EuclideanDistance();
    double avgSilhouetteOverAllPoints = 0.d;

    if (sk.getNumClusters() == 1) {
        //Index is not defined for k=1. needs at least 2 clusters
        return Double.NaN;
    }

    for (int i = 0; i < inst.numInstances(); i++) {
        //for the current element get its cluster
        int currentcluster = sk.clusterInstance(inst.instance(i));
        //System.out.println(inst.instance(i).value(2));
        double[] current_attr = new double[inst.numAttributes()];
        double[] other_attr = new double[inst.numAttributes()];
        //get attributes of the current instance
        for (int attr = 0; attr < inst.numAttributes(); attr++) {
            current_attr[attr] = inst.instance(i).value(attr);
        }
        // int counter
        double[] distances = new double[sk.getNumClusters()];
        int[] counters = new int[sk.getNumClusters()];
        //System.out.println("distances: "+distances.length);
        double avgInClusterDist = 0, dist = 0;
        int countsamecluster = 0;
        distances[currentcluster] = Double.MAX_VALUE;
        for (int j = 0; j < inst.numInstances(); j++) {
            for (int attr = 0; attr < inst.numAttributes(); attr++) {
                other_attr[attr] = inst.instance(j).value(attr);
            }
            //get cluster number of j th element
            int clusternumber = sk.clusterInstance(inst.instance(j));
            //check if j and i in the same cluster
            if (clusternumber == currentcluster) {
                if (inst.instance(i) != inst.instance(j)) {
                    //calculate average dist to other elements in the cluster
                    //inst.

                    dist = ed.compute(current_attr, other_attr);
                    avgInClusterDist = avgInClusterDist + dist;
                    countsamecluster++;
                }
            } else {
                dist = ed.compute(current_attr, other_attr);
                distances[clusternumber] = distances[clusternumber] + dist;
                counters[clusternumber]++;
            }
        }
        //calculate value ai
        if (countsamecluster > 0) {
            avgInClusterDist = avgInClusterDist / countsamecluster; //this is value ai
        }
        //find average distances to other clusters
        for (int k = 0; k < distances.length; k++) {
            if (k != currentcluster) {
                distances[k] = distances[k] / counters[k];
            }
        }
        //Find the min value of average distance to other clusters
        double min = distances[0];
        for (int k = 1; k < distances.length; k++) {
            if (min > distances[k]) {
                min = distances[k];
            }
        }

        //si for current element:
        double si;
        // if we only have one element in our cluster it makes sense to set
        // si = 0
        if (countsamecluster == 1) {
            si = 0.0d;
        } else {
            si = (min - avgInClusterDist) / Math.max(min, avgInClusterDist);
        }
        avgSilhouetteOverAllPoints = avgSilhouetteOverAllPoints + si;
    }
    //System.out.println(inst.numInstances());
    return avgSilhouetteOverAllPoints / inst.numInstances();

}

From source file:AnDE.wdAnDEonline.java

License:Open Source License

@Override
public void buildClassifier(Instances instances) throws Exception {

    // can classifier handle the data?
    getCapabilities().testWithFail(instances);

    // remove instances with missing class
    instances.deleteWithMissingClass();//  w ww .j a v  a2  s .c  o m
    nInstances = instances.numInstances();
    nAttributes = instances.numAttributes() - 1;
    nc = instances.numClasses();

    probs = new double[nc];

    paramsPerAtt = new int[nAttributes];
    for (int u = 0; u < nAttributes; u++) {
        paramsPerAtt[u] = instances.attribute(u).numValues();
    }

    /*
     * Initialize structure array based on m_S
     */
    if (m_S.equalsIgnoreCase("A0DE")) {
        // A0DE
        numTuples = 0;
    } else if (m_S.equalsIgnoreCase("A1DE")) {
        // A1DE         
        numTuples = 1;
    } else if (m_S.equalsIgnoreCase("A2DE")) {
        // A2DE         
        numTuples = 2;
    }

    /* 
     * ----------------------------------------------------------------------------------------
     * Start Parameter Learning Process
     * ----------------------------------------------------------------------------------------
     */

    int scheme = 1;

    /*
     * ---------------------------------------------------------------------------------------------
     * Intitialize data structure
     * ---------------------------------------------------------------------------------------------
     */

    scheme = plTechniques.MAP;

    logDComputer = LogDistributionComputerAnDE.getDistributionComputer(numTuples, scheme);

    dParameters_ = new wdAnDEParametersFlat(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples,
            m_MVerb);

    if (m_MVerb)
        System.out.println("All data structures are initialized. Starting to estimate parameters.");

    if (nInstances > 0) {
        for (int i = 0; i < nInstances; i++) {
            Instance instance = instances.instance(i);
            dParameters_.updateFirstPass(instance);
        }
    }
}

From source file:ann.ANN.java

/**
 * @param args the command line arguments
 *//*  ww w .j a v a  2s.  c  o m*/
public static void main(String[] args) throws Exception {
    // TODO code application logic here
    Scanner sc = new Scanner(System.in);
    ANNOptions annOptions = new ANNOptions();
    String datasetURL;

    //        System.out.println("Topology");
    //        System.out.println("1. Perceptron Training Rule");
    //        System.out.println("2. Delta Rule - Batch");
    //        System.out.println("3. Delta Rule - Incremental");
    //        System.out.println("4. Multi Layer Perceptron");
    //        annOptions.topologyOpt = sc.nextInt();

    //        System.out.println("Initial Weight");
    //        System.out.println("1. Random");
    //        System.out.println("2. Given");
    //        annOptions.weightOpt = sc.nextInt();

    //        if(annOptions.topologyOpt == 4){ // Multi Layer Perceptron
    //            System.out.print("Hidden Layer: ");
    //            annOptions.hiddenLayer = sc.nextInt();
    //            for (int i = 0 ; i < annOptions.hiddenLayer ; i++) {
    //                System.out.print("Neuron in Layer " + i+1 + ": ");
    //                int nNeuron = sc.nextInt();
    //                annOptions.layerNeuron.add(nNeuron);
    //            }
    //            System.out.print("Momentum: ");
    //            annOptions.momentum = sc.nextDouble();
    //        }
    //        
    //        System.out.print("Learning Rate: ");
    //        annOptions.learningRate = sc.nextDouble();
    //        
    //        System.out.print("Threshold: ");
    //        annOptions.threshold = sc.nextDouble();
    //        
    //        System.out.print("MaxIteration: ");
    //        annOptions.maxIteration = sc.nextInt();
    //        
    //        System.out.println("Dataset URL: ");
    //        datasetURL = sc.next();

    datasetURL = "dataset/weather.nominal.arff";
    //        datasetURL = "dataset/weather.numeric.arff";
    //        datasetURL = "dataset/iris.arff";

    Instances data = loadDataset(datasetURL);

    Classifier model = null;
    data.setClassIndex(data.numAttributes() - 1);
    if (annOptions.topologyOpt < 4) { // Perceptron Training Rule
        annOptions.initWeightsSLP(data);
        annOptions.saveConfiguration(annOptions);
        try {
            SingleLayerPerceptron singleLayerPerceptron = new SingleLayerPerceptron();
            singleLayerPerceptron.buildClassifier(data);
            model = singleLayerPerceptron;
            crossValidation(model, data);
        } catch (Exception ex) {
            Logger.getLogger(ANN.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if (annOptions.topologyOpt == 4) { // Multi Layer Perceptron
        annOptions.initWeightsMLP(data);
        annOptions.saveConfiguration(annOptions);
        try {
            MultiLayerPerceptron multiLayerPerceptron = new MultiLayerPerceptron();
            multiLayerPerceptron.buildClassifier(data);
            model = multiLayerPerceptron;
            crossValidation(model, data);
        } catch (Exception ex) {
            Logger.getLogger(ANN.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:ann.ANN.java

public void classify(String data_address, Classifier model) {
    try {//  w  w  w. j  a va  2s  .  co  m
        Instances test = ConverterUtils.DataSource.read(data_address);
        test.setClassIndex(test.numAttributes() - 1);
        System.out.println("====================================");
        System.out.println("=== Predictions on user test set ===");
        System.out.println("====================================");
        System.out.println("# - actual - predicted - distribution");
        for (int i = 0; i < test.numInstances(); i++) {
            double pred = model.classifyInstance(test.instance(i));
            double[] dist = model.distributionForInstance(test.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(test.instance(i).toString(test.classIndex()) + " - ");
            System.out.print(test.classAttribute().value((int) pred) + " - ");
            System.out.println(Utils.arrayToString(dist));
        }
        System.out.println("\n");
    } catch (Exception ex) {
        System.out.println("Tidak berhasil memprediksi hasil\n");
    }
}

From source file:ann.ANNOptions.java

public void initWeightsSLP(Instances data) throws Exception {
    ntb.setInputFormat(data);/* w  ww . ja va 2s. c o m*/
    data = new Instances(Filter.useFilter(data, ntb));

    //normalize filter
    normalize.setInputFormat(data);
    data = new Instances(Filter.useFilter(data, normalize));

    int nAttr = data.numAttributes();
    Scanner sc = new Scanner(System.in);
    int nOutput;
    if (data.numClasses() <= 2 && topologyOpt == 1) {
        nOutput = 1;
    } else {
        nOutput = data.numClasses();
    }

    for (int j = 0; j < nOutput; j++) {
        Neuron temp = new Neuron();
        if (weightOpt == 1) { // Random
            for (int i = 0; i < nAttr; i++) {
                Random random = new Random();
                temp.weights.add(random.nextDouble());
                //                    temp.weights.add(0.0);
            }
        } else { // Given
            System.out.println("Output-" + j);
            for (int i = 0; i < nAttr - 1; i++) {
                System.out.print("Weight-" + (i + 1) + ": ");
                temp.weights.add(sc.nextDouble());
            }
            System.out.print("Bias weight: ");
            temp.weights.add(sc.nextDouble());
        }

        output.add(temp);
    }
}

From source file:ann.ANNOptions.java

public void initWeightsMLP(Instances data) throws Exception {
    ntb.setInputFormat(data);/*from w  w  w . java 2  s .com*/
    data = new Instances(Filter.useFilter(data, ntb));

    //normalize filter
    normalize.setInputFormat(data);
    data = new Instances(Filter.useFilter(data, normalize));

    int nAttr = data.numAttributes();
    Scanner sc = new Scanner(System.in);

    int nOutput = data.numClasses();

    for (int i = 0; i < hiddenLayer; i++) {
        if (weightOpt == 2) {
            System.out.println("Layer-" + (i + 1));
        }
        List<Neuron> neuronLayer = new ArrayList<Neuron>();
        for (int j = 0; j < layerNeuron.get(i) + 1; j++) {
            if (weightOpt == 2)
                if (weightOpt == 2) {
                    System.out.println("Neuron-" + (j + 1));
                }
            Neuron neuron = new Neuron();
            if (i == 0) { // weight from input layer
                for (int k = 0; k < nAttr; k++) {
                    if (weightOpt == 1) { // random 
                        Random random = new Random();
                        neuron.weights.add(random.nextDouble());
                        //                            neuron.weights.add(0.0);
                    } else { // given
                        if (k < nAttr - 1) {
                            if (weightOpt == 2) {
                                System.out.print("Weight input-" + (k + 1) + ": ");
                            }
                        } else {
                            if (weightOpt == 2) {
                                System.out.print("Weight bias: ");
                            }
                        }
                        neuron.weights.add(sc.nextDouble());
                    }
                }
                neuronLayer.add(neuron);
            } else if (j < layerNeuron.get(i)) { // weight from hidden layer
                for (int k = 0; k < layerNeuron.get(i - 1) + 1; k++) { // layer neuron + 1, 1 for bias
                    if (weightOpt == 1) { // random 
                        Random random = new Random();
                        neuron.weights.add(random.nextDouble());
                        //                            neuron.weights.add(0.0);
                    } else { // given
                        if (k < layerNeuron.get(i - 1)) {
                            if (weightOpt == 2) {
                                System.out.print("Weight neuron-" + (k + 1) + ": ");
                            }
                        } else {
                            if (weightOpt == 2) {
                                System.out.print("Weight bias: ");
                            }
                        }
                        neuron.weights.add(sc.nextDouble());
                    }
                }
                neuronLayer.add(neuron);
            }

        }
        if (i != 0) {
            Neuron bias = new Neuron();
            neuronLayer.add(bias);
        }
        layer.add(neuronLayer);
    }

    //last hidden layer to output
    List<Neuron> neuronLayer = new ArrayList<Neuron>();
    for (int i = 0; i < nOutput; i++) {
        Neuron neuron = new Neuron();
        for (int j = 0; j < layerNeuron.get(layerNeuron.size() - 1) + 1; j++) {
            if (weightOpt == 1) { // random 
                Random random = new Random();
                //                    neuron.weights.add(random.nextDouble());
                neuron.weights.add(0.0);
            } else { // given
                if (j < layerNeuron.get(layerNeuron.size() - 1)) {
                    System.out.print("Weight neuron-" + (j + 1) + ": ");
                } else {
                    System.out.print("Bias: ");
                }
                neuron.weights.add(sc.nextDouble());
            }
        }
        neuronLayer.add(neuron);
    }
    layer.add(neuronLayer);
}

From source file:ann.Main.java

public static void main(String[] args) {
    String trainPath = null;//ww w.  j a  v  a 2s.  c  o m
    String testPath = null;
    String weights = null;
    String predictPath = null;
    char activationFunction = MyANN.SIGMOID_FUNCTION, terminateCondition = MyANN.TERMINATE_MAX_ITERATION,
            learningRule = MyANN.PERCEPTRON_TRAINING_RULE, topology = MyANN.ONE_PERCEPTRON;
    double deltaMSE = 0.01;
    int maxIteration = 500;
    double learningRate = 0.3;
    double momentum = 0.2;
    int nbHidden = 0;
    int[] hiddenConf = null;
    boolean isCV = false;
    int numFolds = 10;
    boolean isEvaluate = false;

    if (args.length < 1 || args.length % 2 == 0) {

        System.out.println("Usage: ANN [-I <path>] [-t O|M] [-r P|B|D] [-h <layer>]"
                + "\n\t [-a N|G|T] [-L <rate>] [-m <momentum>] [-E D|I|B] [-d <mse>]"
                + "\n\t [-i <iteration>] [-e <path>|<n>] [-p <path>] <trainDataPath>");
        System.out.println("");
        System.out.println("-a N|G|T \t set activation function for OnePerceptron");
        System.out.println("\t\t   N=SIGN, G=SIGMOID, T=STEP");
        System.out.println("-d <mse> \t set MSE = <mse> for terminate condition");
        System.out.println("-E D|I|B \t\t set terminate condition, D=by MSE, I=by iteration");
        System.out.println("-e <path>|<n> \t set test data using <path> or cross-validation w/ folds = <n>");
        System.out.println("-h <layer> \t set hidden layer. <layer>=0 no hidden layer");
        System.out.println("\t\t   <layer>=2 => 1 hidden layer with 2 nodes");
        System.out.println("\t\t   <layer>=2,3 => 2 hidden layer with 2 nodes on first and 3 on second layer");
        System.out.println("-I <path> \t set initial weight from <path>");
        System.out.println("-i <iteration> \t set max iteration for terminate condition");
        System.out.println("-L <rate> \t set learning rate = <rate>");
        System.out.println("-m <momentum> \t set momentum = <momentum>");
        System.out.println("-p <path> \t set data to predict");
        System.out.println("-r P|B|D \t set learning rule for OnePerceptron ");
        System.out.println("\t\t   P=Perceptron training rule,B=Batch, D=DeltaRule");
        System.out.println("-t O|M \t\t set topology, O=OnePerceptron, M=MLP");
        return;
    } else {
        trainPath = args[args.length - 1];

        int i = 0;
        while (i < args.length - 1) {
            switch (args[i]) {
            case "-a":
                switch (args[i + 1]) {
                case "N":
                    activationFunction = MyANN.SIGN_FUNCTION;
                    break;
                case "G":
                    activationFunction = MyANN.SIGMOID_FUNCTION;
                    break;
                case "T":
                    activationFunction = MyANN.STEP_FUNCTION;
                    break;
                default:
                    break;
                }
                break;
            case "-d":
                deltaMSE = Double.valueOf(args[i + 1]);
                break;
            case "-E":
                switch (args[i + 1]) {
                case "D":
                    terminateCondition = MyANN.TERMINATE_MSE;
                    break;
                case "I":
                    terminateCondition = MyANN.TERMINATE_MAX_ITERATION;
                    break;
                case "B":
                    terminateCondition = MyANN.TERMINATE_BOTH;
                default:
                    break;
                }
                break;
            case "-e":
                if (args[i + 1].length() <= 2) {
                    numFolds = Integer.parseInt(args[i + 1]);
                    isCV = true;
                } else {
                    isEvaluate = true;
                    testPath = args[i + 1];
                }
                break;
            case "-h":
                String[] nbl = args[i + 1].split(",");
                if (nbl.length == 1) {
                    nbHidden = Integer.parseInt(nbl[0]);
                    if (nbHidden != 0) {
                        hiddenConf = new int[1];
                        hiddenConf[0] = nbHidden;
                        nbHidden = 1;
                    }
                } else {
                    nbHidden = nbl.length;
                    hiddenConf = new int[nbHidden];
                    for (int j = 0; j < nbHidden; j++) {
                        hiddenConf[j] = Integer.parseInt(nbl[j]);
                    }
                }
                break;
            case "-I":
                weights = args[i + 1];
                break;
            case "-i":
                maxIteration = Integer.parseInt(args[i + 1]);
                break;
            case "-L":
                learningRate = Double.parseDouble(args[i + 1]);
                break;
            case "-m":
                momentum = Double.parseDouble(args[i + 1]);
                break;
            case "-p":
                predictPath = args[i + 1];
                break;
            case "-r":
                switch (args[i + 1]) {
                case "P":
                    learningRule = MyANN.PERCEPTRON_TRAINING_RULE;
                    break;
                case "B":
                    learningRule = MyANN.BATCH_GRADIENT_DESCENT;
                    break;
                case "D":
                    learningRule = MyANN.DELTA_RULE;
                    break;
                default:
                    break;
                }
                break;
            case "-t":
                switch (args[i + 1]) {
                case "O":
                    topology = MyANN.ONE_PERCEPTRON;
                    break;
                case "M":
                    topology = MyANN.MULTILAYER_PERCEPTRON;
                    break;
                default:
                    break;
                }
                break;
            default:
                break;
            }
            i += 2;
        }
    }

    // persiapkan data
    Instances trainData = null;
    Instances testData = null;
    Instances predictData = null;
    try {
        ConverterUtils.DataSource source = new ConverterUtils.DataSource(trainPath);
        trainData = source.getDataSet();
        if (trainData.classIndex() == -1) {
            trainData.setClassIndex(trainData.numAttributes() - 1);
        }
        if (testPath != null) {
            source = new ConverterUtils.DataSource(testPath);
            testData = source.getDataSet();
            if (testData.classIndex() == -1) {
                testData.setClassIndex(testData.numAttributes() - 1);
            }
        }
        if (predictPath != null) {
            source = new ConverterUtils.DataSource(predictPath);
            predictData = source.getDataSet();
            if (predictData.classIndex() == -1) {
                predictData.setClassIndex(predictData.numAttributes() - 1);
            }
        }
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }

    // persiapkan model dan parameter
    MyANN myAnn = new MyANN();
    WeightParser wp = null;
    if (weights != null) {
        wp = new WeightParser(weights);
        myAnn.setInitialWeight(wp.weight);
    }
    myAnn.setActivationFunction(activationFunction);
    myAnn.setDeltaMSE(deltaMSE);
    myAnn.setLearningRate(learningRate);
    myAnn.setLearningRule(learningRule);
    myAnn.setMaxIteration(maxIteration);
    myAnn.setMomentum(momentum);
    myAnn.setTerminationCondition(terminateCondition);
    myAnn.setThreshold(momentum);
    myAnn.setTopology(topology);

    int[] nbLayer = new int[2];
    if (nbHidden != 0) {
        nbLayer = new int[2 + nbHidden];
        for (int j = 1; j < nbLayer.length - 1; j++) {
            nbLayer[j] = hiddenConf[j - 1];
        }
    }
    nbLayer[0] = trainData.numAttributes() - 1;
    if (trainData.classAttribute().isNominal())
        nbLayer[nbLayer.length - 1] = trainData.classAttribute().numValues();
    else
        nbLayer[nbLayer.length - 1] = 1;

    myAnn.setNbLayers(nbLayer);

    // debug: cek kondigurasi
    System.out.println("training data: " + trainPath);
    System.out.println("settings:");
    myAnn.printSetting();
    System.out.println("");

    // klasifikasi
    System.out.println("start classifiying...");
    try {
        myAnn.buildClassifier(trainData);
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    myAnn.printSummary();
    System.out.println("done");

    System.out.println("-------------------------------------------------");

    System.out.print("evaluating ");
    int[][] result = null;
    int nbData = trainData.numInstances();
    if (isCV) {
        System.out.println("using " + numFolds + "-folds cross validation");
        result = myAnn.crossValidation(trainData, numFolds, new Random(1));
    } else if (isEvaluate) {
        System.out.println("using testData: " + testPath);
        result = myAnn.evaluate(testData);
        nbData = testData.numInstances();
    } else {
        System.out.println("using trainData");
        result = myAnn.evaluate(trainData);
    }
    System.out.println("");

    System.out.println("result:");

    double accuracy = 0.0; // a+d/total
    double[] precision = new double[result.length]; // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[result[0].length]; // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])

    for (int i = 0; i < result.length; i++) {
        for (int j = 0; j < result[0].length; j++) {
            System.out.print(result[i][j] + " ");
            if (i == j) {
                accuracy += result[i][j];
            }
        }
        System.out.println("");
    }

    // precision
    for (int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[j][i];
        }
        precision[i] = result[i][i] / sum;
    }

    // recall
    for (int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[i][j];
        }
        recall[i] = result[i][i] / sum;
    }

    accuracy /= nbData;
    System.out.println("");
    System.out.println("accuracy: " + accuracy);
    System.out.println("precision: ");
    for (double p : precision) {
        System.out.println(p);
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall)
        System.out.println(r);
    System.out.println("");
    System.out.println("-------------------------------------------------");

    if (predictPath != null) {
        System.out.println("predicting: " + predictPath);
        for (int i = 0; i < predictData.numInstances(); i++) {
            try {
                int idx = myAnn.predictClassIndex(myAnn.distributionForInstance(predictData.instance(i)));
                System.out.println("instance[" + (i) + "]: " + trainData.classAttribute().value(idx));
            } catch (Exception ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        System.out.println("done");
    }
    /*
    try {
    File file = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.arff");
    File unlabel = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.unlabeled.arff");
    Instances data, test;
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(file.getPath());
    data = source.getDataSet();
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }
    source = new ConverterUtils.DataSource(unlabel.getPath());
    test = source.getDataSet();
    if (test.classIndex() == -1) {
        test.setClassIndex(data.numAttributes() - 1);
    }
            
    WeightParser wp = new WeightParser("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/khaidzir_myANN/initial.weight");
    MyANN myANN = new MyANN();
    int[] nbLayers = {4, 3, 2};
    myANN.setNbLayers(nbLayers);
    myANN.setDeltaMSE(0.001);
    //myANN.setMomentum(0.2);
    myANN.setLearningRate(0.1);
    myANN.setTopology(MyANN.MULTILAYER_PERCEPTRON);
    myANN.setLearningRule(MyANN.PERCEPTRON_TRAINING_RULE);
    myANN.setActivationFunction(MyANN.SIGMOID_FUNCTION);
    myANN.setMaxIteration(10000);
    myANN.setTerminationCondition(MyANN.TERMINATE_MAX_ITERATION);
    //myANN.setInitialWeight(wp.weight);
            
            
    myANN.buildClassifier(data);
    int[][] ev = myANN.evaluate(data);
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
    //ev = myANN.crossValidation(data, 10, new Random(1));
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
            
    /*
    myANN.buildClassifier(data);
    int[][] cm = myANN.evaluate(data);
    double accuracy = 0.0;      // a+d/total
    double[] precision = new double[cm.length];     // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[cm[0].length];        // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])
            
    for (int i = 0; i < cm.length; i++) {
        for (int j = 0; j < cm[0].length; j++) {
            System.out.print(cm[i][j] + " ");
            if (i==j) {
                accuracy += cm[i][j];
            }
        }
        System.out.println("");
    }
            
    // precision
    for(int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[j][i];
        }
        precision[i] = cm[i][i] / sum;
    }
            
    // recall
    for(int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[i][j];
        }
        recall[i] = cm[i][i] / sum;
    }
            
    accuracy /= data.numInstances();
    System.out.println("accuracy: "+accuracy);
    System.out.println("precision: ");
    for(double p : precision) {
        System.out.print(p+", ");
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall) System.out.print(r+", ");
    System.out.println("");
            
    } catch (Exception ex) {
    Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    */
}