ffnn.FFNNTubesAI.java Source code

Java tutorial

Introduction

Here is the source code for ffnn.FFNNTubesAI.java

Source

/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package ffnn;

import static java.lang.Math.exp;
import static java.lang.Math.floor;
import static java.lang.Math.random;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.math3.linear.BlockRealMatrix;
import org.apache.commons.math3.linear.RealMatrix;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.Evaluation;
import weka.core.Capabilities;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SerializationHelper;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Add;
import weka.filters.unsupervised.attribute.NominalToBinary;

/**
 *
 * @author MaximaXL
 */
public class FFNNTubesAI extends AbstractClassifier {

    /**
     *
     */
    public RealMatrix weight1; // input ke hidden
    public RealMatrix weight2; // hidden ke output
    public RealMatrix bias2; // output
    public int input_layer; //Jumlah Nodes
    public int hidden_layer;
    public int output_layer;
    public double learning_rate = 0.01;
    public static final long serialVersionUID = -6079756312492915625L;

    public static Instance filterNominalNumeric(Instance i) throws Exception {
        NominalToBinary filter = new NominalToBinary();
        filter.setInputFormat(i.dataset());
        filter.input(i);
        return filter.output();
    }

    public static Instances filterNominalNumeric(Instances i) {
        NominalToBinary filter = new NominalToBinary();
        Instances temp_instances = new Instances(i);
        if (temp_instances.classIndex() > -1) { //Jika ada classs index
            temp_instances.setClassIndex(-1); //Unset
        }
        try {
            filter.setInputFormat(temp_instances);
            temp_instances = Filter.useFilter(temp_instances, filter);
        } catch (Exception ex) {
            Logger.getLogger(FFNN.class.getName()).log(Level.SEVERE, null, ex);
        }
        return temp_instances;
    }

    public double sig(double x) {
        return 1 / (1 + exp(-x));
    }

    public static void printMatrix(RealMatrix m, int row, int col) {
        for (int j = 0; j < row; j++) {
            for (int i = 0; i < col; i++) {
                System.out.print(m.getEntry(j, i) + " ");
            }
            System.out.println();
        }
        System.out.println();
    }

    @Override
    public void buildClassifier(Instances i) throws Exception {
        Instance temp_instance = null;
        RealMatrix error_output;
        RealMatrix error_hidden;
        RealMatrix input_matrix;
        RealMatrix hidden_matrix;
        RealMatrix output_matrix;
        Instances temp_instances;
        int r = 0;
        Scanner scan = new Scanner(System.in);

        output_layer = i.numDistinctValues(i.classIndex()); //3
        temp_instances = filterNominalNumeric(i);

        if (output_layer == 2) {
            Add filter = new Add();
            filter.setAttributeIndex("last");
            filter.setAttributeName("dummy");
            filter.setInputFormat(temp_instances);
            temp_instances = Filter.useFilter(temp_instances, filter);
            //            System.out.println(temp_instances);
            for (int j = 0; j < temp_instances.numInstances(); j++) {
                if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) {
                    temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1);
                    temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0);
                } else {
                    temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0);
                    temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1);
                }
            }
        }

        //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1));
        //System.out.println(temp_instances);
        input_layer = temp_instances.numAttributes() - output_layer; //4
        hidden_layer = 0;
        while (hidden_layer < 1) {
            System.out.print("Hidden layer : ");
            hidden_layer = scan.nextInt();
        }
        int init_hidden = hidden_layer;
        error_hidden = new BlockRealMatrix(1, hidden_layer);
        error_output = new BlockRealMatrix(1, output_layer);
        input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias

        buildWeight(input_layer, hidden_layer, output_layer);

        long last_time = System.nanoTime();
        double last_error_rate = 1;
        double best_error_rate = 1;

        double last_update = System.nanoTime();

        // brp iterasi
        //        for( long itr = 0; last_error_rate > 0.001; ++ itr ){
        for (long itr = 0; itr < 50000; ++itr) {
            if (r == 10) {
                break;
            }
            long time = System.nanoTime();
            if (time - last_time > 2000000000) {
                Evaluation eval = new Evaluation(i);
                eval.evaluateModel(this, i);

                double accry = eval.correct() / eval.numInstances();
                if (eval.errorRate() < last_error_rate) {
                    last_update = System.nanoTime();
                    if (eval.errorRate() < best_error_rate)
                        SerializationHelper.write(accry + "-" + time + ".model", this);
                }

                if (accry > 0)
                    last_error_rate = eval.errorRate();

                // 2 minute without improvement restart
                if (time - last_update > 30000000000L) {
                    last_update = System.nanoTime();
                    learning_rate = random() * 0.05;
                    hidden_layer = (int) (10 + floor(random() * 15));
                    hidden_layer = (int) floor((hidden_layer / 25) * init_hidden);
                    if (hidden_layer == 0) {
                        hidden_layer = 1;
                    }
                    itr = 0;
                    System.out.println("RESTART " + learning_rate + " " + hidden_layer);
                    buildWeight(input_layer, hidden_layer, output_layer);
                    r++;
                }

                System.out.println(accry + " " + itr);
                last_time = time;
            }

            for (int j = 0; j < temp_instances.numInstances(); j++) {
                // foward !!
                temp_instance = temp_instances.instance(j);

                for (int k = 0; k < input_layer; k++) {
                    input_matrix.setEntry(0, k, temp_instance.value(k));
                }
                input_matrix.setEntry(0, input_layer, 1.0); // bias

                hidden_matrix = input_matrix.multiply(weight1);
                for (int y = 0; y < hidden_layer; ++y) {
                    hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
                }

                output_matrix = hidden_matrix.multiply(weight2).add(bias2);
                for (int y = 0; y < output_layer; ++y) {
                    output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
                }

                // backward <<

                // error layer 2
                double total_err = 0;
                for (int k = 0; k < output_layer; k++) {
                    double o = output_matrix.getEntry(0, k);
                    double t = temp_instance.value(input_layer + k);
                    double err = o * (1 - o) * (t - o);
                    total_err += err * err;
                    error_output.setEntry(0, k, err);
                }

                // back propagation layer 2
                for (int y = 0; y < hidden_layer; y++) {
                    for (int x = 0; x < output_layer; ++x) {
                        double wold = weight2.getEntry(y, x);
                        double correction = learning_rate * error_output.getEntry(0, x)
                                * hidden_matrix.getEntry(0, y);
                        weight2.setEntry(y, x, wold + correction);
                    }
                }

                for (int x = 0; x < output_layer; ++x) {
                    double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya
                    bias2.setEntry(0, x, bias2.getEntry(0, x) + correction);
                }

                // error layer 1
                for (int k = 0; k < hidden_layer; ++k) {
                    double o = hidden_matrix.getEntry(0, k);
                    double t = 0;
                    for (int x = 0; x < output_layer; ++x) {
                        t += error_output.getEntry(0, x) * weight2.getEntry(k, x);
                    }
                    double err = o * (1 - o) * t;
                    error_hidden.setEntry(0, k, err);
                }

                // back propagation layer 1
                for (int y = 0; y < input_layer + 1; ++y) {
                    for (int x = 0; x < hidden_layer; ++x) {
                        double wold = weight1.getEntry(y, x);
                        double correction = learning_rate * error_hidden.getEntry(0, x)
                                * input_matrix.getEntry(0, y);
                        weight1.setEntry(y, x, wold + correction);
                    }
                }
            }
        }
    }

    /**
     *
     * @param input_layer
     * @param hidden_layer
     * @param output_layer
     */
    public void buildWeight(int input_layer, int hidden_layer, int output_layer) {
        weight1 = new BlockRealMatrix(input_layer + 1, hidden_layer);
        weight2 = new BlockRealMatrix(hidden_layer, output_layer);
        bias2 = new BlockRealMatrix(1, output_layer);
        for (int y = 0; y < input_layer + 1; ++y)
            for (int x = 0; x < hidden_layer; ++x)
                weight1.setEntry(y, x, random());
        for (int y = 0; y < hidden_layer; ++y)
            for (int x = 0; x < output_layer; ++x)
                weight2.setEntry(y, x, random());
        for (int x = 0; x < output_layer; ++x)
            bias2.setEntry(0, x, random());
    }

    @Override
    public double classifyInstance(Instance instance) throws Exception {
        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
    }

    @Override
    public double[] distributionForInstance(Instance instance) throws Exception {
        RealMatrix input_matrix = new BlockRealMatrix(1, input_layer + 1);

        instance = filterNominalNumeric(instance);

        for (int k = 0; k < input_layer; k++) {
            input_matrix.setEntry(0, k, instance.value(k));
        }
        input_matrix.setEntry(0, input_layer, 1.0); // bias

        RealMatrix hidden_matrix = input_matrix.multiply(weight1);
        for (int y = 0; y < hidden_layer; ++y) {
            hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
        }
        RealMatrix output_matrix = hidden_matrix.multiply(weight2).add(bias2);
        for (int y = 0; y < output_layer; ++y) {
            output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
        }
        double[][] m = output_matrix.getData();

        return m[0];
    }

    @Override
    public Capabilities getCapabilities() {
        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
    }

    public static void main(String[] args) {

    }

}