Example usage for weka.core Instances numInstances

List of usage examples for weka.core Instances numInstances

Introduction

In this page you can find the example usage for weka.core Instances numInstances.

Prototype


publicint numInstances() 

Source Link

Document

Returns the number of instances in the dataset.

Usage

From source file:ffnn.FFNNTubesAI.java

@Override
public void buildClassifier(Instances i) throws Exception {
    Instance temp_instance = null;/* w  w w .  j av a2s . com*/
    RealMatrix error_output;
    RealMatrix error_hidden;
    RealMatrix input_matrix;
    RealMatrix hidden_matrix;
    RealMatrix output_matrix;
    Instances temp_instances;
    int r = 0;
    Scanner scan = new Scanner(System.in);

    output_layer = i.numDistinctValues(i.classIndex()); //3
    temp_instances = filterNominalNumeric(i);

    if (output_layer == 2) {
        Add filter = new Add();
        filter.setAttributeIndex("last");
        filter.setAttributeName("dummy");
        filter.setInputFormat(temp_instances);
        temp_instances = Filter.useFilter(temp_instances, filter);
        //            System.out.println(temp_instances);
        for (int j = 0; j < temp_instances.numInstances(); j++) {
            if (temp_instances.instance(j).value(temp_instances.numAttributes() - 2) == 0) {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 1);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 0);
            } else {
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 2, 0);
                temp_instances.instance(j).setValue(temp_instances.numAttributes() - 1, 1);
            }
        }
    }

    //temp_instances.randomize(temp_instances.getRandomNumberGenerator(1));
    //System.out.println(temp_instances);
    input_layer = temp_instances.numAttributes() - output_layer; //4
    hidden_layer = 0;
    while (hidden_layer < 1) {
        System.out.print("Hidden layer : ");
        hidden_layer = scan.nextInt();
    }
    int init_hidden = hidden_layer;
    error_hidden = new BlockRealMatrix(1, hidden_layer);
    error_output = new BlockRealMatrix(1, output_layer);
    input_matrix = new BlockRealMatrix(1, input_layer + 1); //Menambahkan bias

    buildWeight(input_layer, hidden_layer, output_layer);

    long last_time = System.nanoTime();
    double last_error_rate = 1;
    double best_error_rate = 1;

    double last_update = System.nanoTime();

    // brp iterasi
    //        for( long itr = 0; last_error_rate > 0.001; ++ itr ){
    for (long itr = 0; itr < 50000; ++itr) {
        if (r == 10) {
            break;
        }
        long time = System.nanoTime();
        if (time - last_time > 2000000000) {
            Evaluation eval = new Evaluation(i);
            eval.evaluateModel(this, i);

            double accry = eval.correct() / eval.numInstances();
            if (eval.errorRate() < last_error_rate) {
                last_update = System.nanoTime();
                if (eval.errorRate() < best_error_rate)
                    SerializationHelper.write(accry + "-" + time + ".model", this);
            }

            if (accry > 0)
                last_error_rate = eval.errorRate();

            // 2 minute without improvement restart
            if (time - last_update > 30000000000L) {
                last_update = System.nanoTime();
                learning_rate = random() * 0.05;
                hidden_layer = (int) (10 + floor(random() * 15));
                hidden_layer = (int) floor((hidden_layer / 25) * init_hidden);
                if (hidden_layer == 0) {
                    hidden_layer = 1;
                }
                itr = 0;
                System.out.println("RESTART " + learning_rate + " " + hidden_layer);
                buildWeight(input_layer, hidden_layer, output_layer);
                r++;
            }

            System.out.println(accry + " " + itr);
            last_time = time;
        }

        for (int j = 0; j < temp_instances.numInstances(); j++) {
            // foward !!
            temp_instance = temp_instances.instance(j);

            for (int k = 0; k < input_layer; k++) {
                input_matrix.setEntry(0, k, temp_instance.value(k));
            }
            input_matrix.setEntry(0, input_layer, 1.0); // bias

            hidden_matrix = input_matrix.multiply(weight1);
            for (int y = 0; y < hidden_layer; ++y) {
                hidden_matrix.setEntry(0, y, sig(hidden_matrix.getEntry(0, y)));
            }

            output_matrix = hidden_matrix.multiply(weight2).add(bias2);
            for (int y = 0; y < output_layer; ++y) {
                output_matrix.setEntry(0, y, sig(output_matrix.getEntry(0, y)));
            }

            // backward <<

            // error layer 2
            double total_err = 0;
            for (int k = 0; k < output_layer; k++) {
                double o = output_matrix.getEntry(0, k);
                double t = temp_instance.value(input_layer + k);
                double err = o * (1 - o) * (t - o);
                total_err += err * err;
                error_output.setEntry(0, k, err);
            }

            // back propagation layer 2
            for (int y = 0; y < hidden_layer; y++) {
                for (int x = 0; x < output_layer; ++x) {
                    double wold = weight2.getEntry(y, x);
                    double correction = learning_rate * error_output.getEntry(0, x)
                            * hidden_matrix.getEntry(0, y);
                    weight2.setEntry(y, x, wold + correction);
                }
            }

            for (int x = 0; x < output_layer; ++x) {
                double correction = learning_rate * error_output.getEntry(0, x); // anggap 1 inputnya
                bias2.setEntry(0, x, bias2.getEntry(0, x) + correction);
            }

            // error layer 1
            for (int k = 0; k < hidden_layer; ++k) {
                double o = hidden_matrix.getEntry(0, k);
                double t = 0;
                for (int x = 0; x < output_layer; ++x) {
                    t += error_output.getEntry(0, x) * weight2.getEntry(k, x);
                }
                double err = o * (1 - o) * t;
                error_hidden.setEntry(0, k, err);
            }

            // back propagation layer 1
            for (int y = 0; y < input_layer + 1; ++y) {
                for (int x = 0; x < hidden_layer; ++x) {
                    double wold = weight1.getEntry(y, x);
                    double correction = learning_rate * error_hidden.getEntry(0, x)
                            * input_matrix.getEntry(0, y);
                    weight1.setEntry(y, x, wold + correction);
                }
            }
        }
    }
}

From source file:ffnn.MultilayerPerceptron.java

License:Open Source License

/**
 * This function sets what the m_numeric flag to represent the passed class it
 * also performs the normalization of the attributes if applicable and sets up
 * the info to normalize the class. (note that regardless of the options it
 * will fill an array with the range and base, set to normalize all attributes
 * and the class to be between -1 and 1)
 * //from  w  ww  .  ja  v a2 s .  com
 * @param inst the instances.
 * @return The modified instances. This needs to be done. If the attributes
 *         are normalized then deep copies will be made of all the instances
 *         which will need to be passed back out.
 */
private Instances setClassType(Instances inst) throws Exception {
    if (inst != null) {
        // x bounds
        m_attributeRanges = new double[inst.numAttributes()];
        m_attributeBases = new double[inst.numAttributes()];
        for (int noa = 0; noa < inst.numAttributes(); noa++) {
            double min = Double.POSITIVE_INFINITY;
            double max = Double.NEGATIVE_INFINITY;
            for (int i = 0; i < inst.numInstances(); i++) {
                if (!inst.instance(i).isMissing(noa)) {
                    double value = inst.instance(i).value(noa);
                    if (value < min) {
                        min = value;
                    }
                    if (value > max) {
                        max = value;
                    }
                }
            }
            m_attributeRanges[noa] = (max - min) / 2;
            m_attributeBases[noa] = (max + min) / 2;
        }

        if (m_normalizeAttributes) {
            for (int i = 0; i < inst.numInstances(); i++) {
                Instance currentInstance = inst.instance(i);
                double[] instance = new double[inst.numAttributes()];
                for (int noa = 0; noa < inst.numAttributes(); noa++) {
                    if (noa != inst.classIndex()) {
                        if (m_attributeRanges[noa] != 0) {
                            instance[noa] = (currentInstance.value(noa) - m_attributeBases[noa])
                                    / m_attributeRanges[noa];
                        } else {
                            instance[noa] = currentInstance.value(noa) - m_attributeBases[noa];
                        }
                    } else {
                        instance[noa] = currentInstance.value(noa);
                    }
                }
                inst.set(i, new DenseInstance(currentInstance.weight(), instance));
            }
        }

        if (inst.classAttribute().isNumeric()) {
            m_numeric = true;
        } else {
            m_numeric = false;
        }
    }
    return inst;
}

From source file:FFNN.MultiplePerceptron.java

public MultiplePerceptron(int itt, double learn, int numHLayer, Instances i) {
    listNodeHidden = new ArrayList<>();//inisialisasis listNodeHidden
    listNodeOutput = new ArrayList<>();
    itteration = itt;/*from ww w . j  av a 2s .  c om*/
    learningRate = learn;
    numHiddenLayer = numHLayer;
    for (int hiddenLayer = 0; hiddenLayer < numHiddenLayer + 1; hiddenLayer++) {//buat neuron untuk hidden layer
        //ditambah 1 untuk neuron bias
        listNodeHidden.add(new Node(i.numAttributes()));

    }

    for (int numInstance = 0; numInstance < i.numClasses(); numInstance++) {//buat neuron untuk output
        listNodeOutput.add(new Node(listNodeHidden.size()));
    }
    target = new ArrayList<>();
    instancesToDouble = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        instancesToDouble[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}

From source file:FFNN.MultiplePerceptron.java

@Override
public void buildClassifier(Instances i) {
    //iterasi// w  w  w .ja v  a  2 s  .  co m
    for (int itt = 0; itt < itteration; itt++) {
        //            System.out.println("Iterasi ke "+ itt);
        for (int indexInstance = 0; indexInstance < i.numInstances(); indexInstance++) {
            ArrayList<Double> listInput = new ArrayList<>();

            //mengisi nilai listInput dengan nilai di instances
            listInput.add(1.0);//ini bias input
            for (int index = 0; index < i.numAttributes() - 1; index++)
                listInput.add(i.get(indexInstance).value(index));

            ArrayList<Double> listOutputHidden = new ArrayList<>();
            listOutputHidden.add(1.0);//input bias
            //                System.out.println();
            //                System.out.println("Hidden layer");
            listNodeHidden.get(0).setValue(1.0);//bias gak boleh ganti output
            //menghitung output hidden layer
            for (int index = 1; index < listNodeHidden.size(); index++) {//output bias tidak boleh ganti
                double value = listNodeHidden.get(index).output(listInput);
                listNodeHidden.get(index).setValue(value);
                listOutputHidden.add(value);
                //                    System.out.println("neuron "+index+" "+value);
            }

            //                System.out.println();
            //                System.out.println("Output layer");
            //menghitung output output layer
            for (int index = 0; index < listNodeOutput.size(); index++) {
                double value = listNodeOutput.get(index).output(listOutputHidden);
                listNodeOutput.get(index).setValue(value);
                //                    System.out.print(value+" ");

            }

            //            System.out.println(listNodeHidden.get(1).getWeightFromList(0));   
            calculateError(indexInstance);

            updateBobot(i.instance(indexInstance));
        }
    }
    for (int idx = 0; idx < listNodeHidden.size(); idx++) {
        System.out.println("Hidden value " + listNodeHidden.get(idx).getValue());
        System.out.println("Hidden error " + listNodeHidden.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeHidden.get(idx).getWeightSize(); idx2++)
            System.out.println("Hidden weight" + listNodeHidden.get(idx).getWeightFromList(idx2));
    }
    System.out.println();
    for (int idx = 0; idx < listNodeOutput.size(); idx++) {
        System.out.println("Output value " + listNodeOutput.get(idx).getValue());
        System.out.println("Output error " + listNodeOutput.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeOutput.get(idx).getWeightSize(); idx2++)
            System.out.println("Output weight" + listNodeOutput.get(idx).getWeightFromList(idx2));
    }
}

From source file:FinalMineria.Reconocimiento.java

/**
 * Processes requests for both HTTP <code>GET</code> and <code>POST</code>
 * methods./*  w w  w  .jav  a2 s  .  c  o m*/
 *
 * @param request servlet request
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException, Exception {

    String accion = request.getParameter("accion");
    BufferedReader br = null;
    String ruta = request.getServletContext().getRealPath("/Recursos");
    br = new BufferedReader(new FileReader(ruta + "/nombres.txt"));
    linea = br.readLine();
    br.close();
    if ("Detener".equals(accion)) {
        grabar.finish();
        try {
            Thread.sleep(4000);
        } catch (InterruptedException ex) {
            Logger.getLogger(GrabarAudio.class.getName()).log(Level.SEVERE, null, ex);
        }
        String comando = "cmd /c " + request.getServletContext().getRealPath("/Recursos/OpenSmile")
                + "\\SMILExtract_Release.exe -C " + request.getServletContext().getRealPath("/Recursos/config")
                + "\\IS12_speaker_trait.conf -I " + request.getServletContext().getRealPath("/Recursos/audios")
                + "\\prueba.wav -O " + request.getServletContext().getRealPath("/Recursos/arff")
                + "\\prueba.arff -classes {" + linea + "} -classlabel ?";
        Process proceso = Runtime.getRuntime().exec(comando);
        proceso.waitFor();
        Instances prueba, conocimiento;
        try (BufferedReader archivoBase = new BufferedReader(new FileReader(
                request.getServletContext().getRealPath("/Recursos/arff") + "\\baseDatos.arff"))) {
            conocimiento = new Instances(archivoBase);
        }
        try (BufferedReader archivoPrueba = new BufferedReader(
                new FileReader(request.getServletContext().getRealPath("/Recursos/arff") + "\\prueba.arff"))) {
            prueba = new Instances(archivoPrueba);
        }

        conocimiento.deleteStringAttributes();
        conocimiento.setClassIndex(981);
        prueba.deleteStringAttributes();
        prueba.setClassIndex(981);
        Classifier clasificadorModelo = (Classifier) new NaiveBayes();
        clasificadorModelo.buildClassifier(conocimiento);
        double valorP = clasificadorModelo.classifyInstance(prueba.instance(prueba.numInstances() - 1));
        String prediccion = prueba.classAttribute().value((int) valorP);
        System.out.println(prediccion);
        request.setAttribute("prediccion", prediccion);
        RequestDispatcher dispatcher = request.getRequestDispatcher("./Hablante.jsp");
        dispatcher.forward(request, response);
    } else if ("Grabar".equals(accion)) {
        ruta = request.getServletContext().getRealPath("/Recursos/audios");
        grabar = new Grabador(ruta + "\\" + "prueba");
        Thread stopper = new Thread(new Runnable() {
            public void run() {
                try {
                    Thread.sleep(tiempo);
                } catch (InterruptedException ex) {
                    ex.printStackTrace();
                }
                grabar.finish();
            }
        });

        stopper.start();

        // start recording
        grabar.start();
        response.sendRedirect("./grabar.jsp");
    }
}

From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java

License:Open Source License

private Instances appliquerFiltre(Filter filtre, Instances instances) throws Exception {
    Instances newInstances;//from ww w . j a  va2  s  . c om
    Instance temp;

    filtre.setInputFormat(instances);
    for (int i = 0; i < instances.numInstances(); i++) {
        filtre.input(instances.instance(i));
    }

    filtre.batchFinished();
    newInstances = filtre.getOutputFormat();
    while ((temp = filtre.output()) != null) {
        newInstances.add(temp);
    }

    return newInstances;
}

From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java

License:Open Source License

private double tester(Classifier res, String fichierTestARFF, Filter filtre) throws Exception {
    double nbOk = 0;
    double nbTotal = 0;

    if (res == null) {
        System.out.println("===============>" + fichierTestARFF);
        return -1;
    }//from ww  w .  j  a v a2 s.co  m

    DataSource source = new DataSource(fichierTestARFF);
    Instances instances = source.getDataSet();
    nbTotal = instances.numInstances();
    instances.setClassIndex(instances.numAttributes() - 1);
    instances = appliquerFiltre(filtre, instances); // !!!!!!!!!!!!!!!!!  SUPER IMPORTANT !!!!!!!!!!!!!
    for (int i = 0; i < instances.numInstances(); i++) {
        double numeroClass = res.classifyInstance(instances.instance(i));
        if (numeroClass == instances.instance(i).classValue()) {
            nbOk++;
        }

    }

    return nbOk / nbTotal * 100;
}

From source file:fr.unice.i3s.rockflows.experiments.main.IntermediateExecutor.java

private boolean checkMinInstances(Instances data, int min) {

    for (int iii = 0; iii < 4; iii++) {
        Instances train4 = data.trainCV(4, iii);
        if (train4.numInstances() < min) {
            return false;
        }//from   w  w w .j a  v a2s  .  c o m
    }
    for (int iii = 0; iii < 10; iii++) {
        Instances train10 = data.trainCV(10, iii);
        if (train10.numInstances() < min) {
            return false;
        }
    }
    return true;
}

From source file:function.PercentageSplit.java

public static void percentageSplit(Instances data, Classifier cls) throws Exception {
    int trainSize = (int) Math.round(data.numInstances() * 0.8);
    int testSize = data.numInstances() - trainSize;
    Instances train = new Instances(data, 0, trainSize);
    Instances test = new Instances(data, trainSize, testSize);

    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(cls, test);//from w  w w . j  ava 2s  . c o  m
    System.out.println(eval.toSummaryString());
}

From source file:function.PercentageSplit.java

public static double percentageSplitRate(Instances data, Classifier cls) throws Exception {
    int trainSize = (int) Math.round(data.numInstances() * 0.8);
    int testSize = data.numInstances() - trainSize;
    Instances train = new Instances(data, 0, trainSize);
    Instances test = new Instances(data, trainSize, testSize);

    Evaluation eval = new Evaluation(train);
    eval.evaluateModel(cls, test);//from   w  ww.j  av a 2 s  .  c  o m
    return eval.pctCorrect();
}