Example usage for weka.classifiers.functions MultilayerPerceptron MultilayerPerceptron

List of usage examples for weka.classifiers.functions MultilayerPerceptron MultilayerPerceptron

Introduction

In this page you can find the example usage for weka.classifiers.functions MultilayerPerceptron MultilayerPerceptron.

Prototype

public MultilayerPerceptron() 

Source Link

Document

The constructor.

Usage

From source file:ClassifierBuilder.java

public static MyClassifier buildClassifier(String name) {
    MyClassifier toReturn = new MyClassifier(name);
    switch (name) {
    case "Decision Table Majority":
        toReturn.setClassifier(new DecisionTable());
        break;//w ww  .j  a  va2  s . c  o m
    case "Logistic Regression":
        toReturn.setClassifier(new Logistic());
        break;
    case "Multi Layer Perceptron":
        toReturn.setClassifier(new MultilayerPerceptron());
        break;
    case "Naive Baesian":
        toReturn.setClassifier(new NaiveBayes());
        break;
    case "Random Forest":
        toReturn.setClassifier(new RandomForest());
        break;
    default:
        break;
    }
    return toReturn;
}

From source file:CopiaSeg3.java

public static void main(String[] args) throws Exception {

    BufferedReader datafile = readDataFile("breast-cancer-wisconsin.arff");

    Instances data = new Instances(datafile);
    data.setClassIndex(data.numAttributes() - 1);

    // Elije el nmero de particiones para la valicacin (4 = 75% Train, 25% Test)
    Instances[] split = split(data, 4);/*  ww  w  . ja va 2s . c  o  m*/

    // Separa los conjuntos en los arrays trainning y testing
    Instances trainingSplits = split[0];
    Instances testingSplits = split[1];

    // Elegir un conjunto de clasificadores
    Classifier[] models = { new MultilayerPerceptron()
            //, new J48 
            //, ...
    };

    FastVector fvWekaAttributes = new FastVector(9);

    // Ejecutar cada clasificador
    for (int j = 0; j < models.length; j++) {

        // Collect every group of predictions for current model in a FastVector
        FastVector predictions = new FastVector();

        // For each training-testing split pair, train and test the classifier
        Evaluation validation = simpleClassify(models[j], trainingSplits, testingSplits);
        predictions.appendElements(validation.predictions());

        // Uncomment to see the summary for each training-testing pair.
        System.out.println(models[j].toString());

        // Calculate overall accuracy of current classifier on all splits
        double accuracy = calculateAccuracy(predictions);

        //            // Print current classifier's name and accuracy in a complicated, but nice-looking way.
        System.out.println(models[j].getClass().getSimpleName() + " Accuracy: "
                + String.format("%.2f%%", accuracy) + "\n=====================");
        //            
        //            // Step 4: use the classifier
        //            // For real world applications, the actual use of the classifier is the ultimate goal. Heres the simplest way to achieve that. Lets say weve built an instance (named iUse) as explained in step 2:
        //            // Specify that the instance belong to the training set
        //            // in order to inherit from the set description

        Instance iUse = new DenseInstance(9);
        iUse.setValue((Attribute) predictions.elementAt(0), 4);
        iUse.setValue((Attribute) predictions.elementAt(1), 8);
        iUse.setValue((Attribute) predictions.elementAt(2), 8);
        iUse.setValue((Attribute) predictions.elementAt(3), 5);
        iUse.setValue((Attribute) predictions.elementAt(4), 4);
        iUse.setValue((Attribute) predictions.elementAt(5), 5);
        iUse.setValue((Attribute) predictions.elementAt(6), 10);
        iUse.setValue((Attribute) predictions.elementAt(7), 4);
        iUse.setValue((Attribute) predictions.elementAt(8), 1);

        iUse.setDataset(trainingSplits);
        //
        //            // Get the likelihood of each classes
        // fDistribution[0] is the probability of being positive?
        // fDistribution[1] is the probability of being negative?
        double[] fDistribution = models[j].distributionForInstance(iUse);

        System.out.println("Probabilidad positivo: " + fDistribution[0]);
        System.out.println("Probabilidad negativo: " + fDistribution[1]);
    }

}

From source file:MLP.java

MLP() {

    try {/*from www  . j  a v a2 s  .c o m*/
        FileReader trainreader = new FileReader("C:\\new.arff");
        FileReader testreader = new FileReader("C:\\new.arff");

        Instances train = new Instances(trainreader);
        Instances test = new Instances(testreader);
        train.setClassIndex(train.numAttributes() - 1);
        test.setClassIndex(test.numAttributes() - 1);

        MultilayerPerceptron mlp = new MultilayerPerceptron();
        mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 4"));

        mlp.buildClassifier(train);

        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(mlp, test);
        System.out.println(eval.toSummaryString("\nResults\n======\n", false));
        trainreader.close();
        testreader.close();

    } catch (Exception ex) {

        ex.printStackTrace();

    }

}

From source file:anndl.Anndl.java

private static void buildModel(InputStream input) throws Exception {
    ANNDLLexer lexer = new ANNDLLexer(new ANTLRInputStream(input));
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ANNDLParser parser = new ANNDLParser(tokens);
    ParseTree tree = parser.model();/*from w w  w  .j a  v a2s  . co  m*/

    ModelVisitor visitor = new ModelVisitor();

    ModelClassifier themodel = (ModelClassifier) visitor.visit(tree);
    //themodel.PrintInfo();
    themodel.extracthidden();

    System.out.println("Membaca File Training...");
    DataSource trainingsoure = new DataSource(themodel.filetraining);
    Instances trainingdata = trainingsoure.getDataSet();
    if (trainingdata.classIndex() == -1) {
        trainingdata.setClassIndex(trainingdata.numAttributes() - 1);
    }

    System.out.println("Melakukan konfigurasi ANN ... ");
    MultilayerPerceptron mlp = new MultilayerPerceptron();
    mlp.setLearningRate(themodel.learningrate);
    mlp.setMomentum(themodel.momentum);
    mlp.setTrainingTime(themodel.epoch);
    mlp.setHiddenLayers(themodel.hidden);

    System.out.println("Melakukan Training data ...");
    mlp.buildClassifier(trainingdata);

    Debug.saveToFile(themodel.namamodel + ".model", mlp);

    System.out.println("\n~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ ..");
    System.out.println("Model ANN Berhasil Diciptakan dengan nama file : " + themodel.namamodel + ".model");
    System.out.println("~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. \n");

}

From source file:cezeri.feature.selection.FeatureSelectionInfluence.java

public static void main(String[] args) throws Exception {
    String filePath = "C:\\Users\\BAP1\\Google Drive\\DataSet\\Weka_Files\\dental_florisis\\kayac_dental_2.arff";
    Classifier[] models = { new MultilayerPerceptron(), new Bagging(), new REPTree() };
    Influence[] dFeature = getMostDiscriminativeFeature(filePath, models[2]);
    System.out.println("Most Disciriminative Features are");
    for (int i = 0; i < dFeature.length; i++) {
        System.out.println(dFeature[i].attributeName + "=" + dFeature[i].infVal);
    }/*  w  w w .ja v a 2s. c o  m*/

}

From source file:Clases.RedNeuronal.RedNeuronal.java

public void redNeuronal(int puntaje, int tiempo, int error) throws Exception {
    //si puntaje >= 200 entonces aprendido
    //si tiempo <= 240 (4 minutos) entonces aprendido
    //si errores <= 3 entonces aprendido
    String[] dato = { obtnerPuntaje(puntaje), obtenerTiempo(tiempo), obtenerErrores(error) };

    ConverterUtils.DataSource con = new ConverterUtils.DataSource(
            "C:\\Users\\USUARIO\\Documents\\SILVIIS\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff");
    //        ConverterUtils.DataSource con = new ConverterUtils.DataSource("E:\\Unl\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff");

    Instances instances = con.getDataSet();
    System.out.println(instances);
    instances.setClassIndex(instances.numAttributes() - 1);

    MultilayerPerceptron mp = new MultilayerPerceptron();
    mp.buildClassifier(instances);//w  w w .jav a  2s  . co m

    Evaluation evalucion = new Evaluation(instances);
    evalucion.evaluateModel(mp, instances);
    System.out.println(evalucion.toSummaryString());
    System.out.println(evalucion.toMatrixString());

    String datosEntrada = null;
    String datosSalida = "no se puede predecir";
    for (int i = 0; i < instances.numInstances(); i++) {
        double predecido = mp.classifyInstance(instances.instance(i));
        datosEntrada = dato[0] + " " + dato[1] + " " + dato[2];
        if ((int) instances.instance(i).value(0) == Integer.parseInt(dato[0])
                && (int) instances.instance(i).value(1) == Integer.parseInt(dato[1])
                && (int) instances.instance(i).value(2) == Integer.parseInt(dato[2])) {
            datosSalida = instances.classAttribute().value((int) predecido);
        }
    }
    System.out.println("DATOS DE ENTRADA: " + datosEntrada);
    System.out.println("SALIDA PREDECIDA: " + datosSalida);

    switch (datosSalida) {
    case "0":
        resultado = "Excelente ha aprendido";
        imgResultado = "Excelente.jpg";
        imgREDneuronal = "0.png";
        System.out.println("Excelente ha aprendido");
        break;
    case "1":
        resultado = "Disminuir Errores";
        imgResultado = "Bueno.jpg";
        imgREDneuronal = "1.png";
        System.out.println("Disminuir Errores");
        break;
    case "2":
        resultado = "Disminuir Tiempo";
        imgResultado = "Bueno.jpg";
        imgREDneuronal = "2.png";
        System.out.println("Disminuir Tiempo");
        break;
    case "3":
        resultado = "Disminuir Errores y tiempo";
        imgResultado = "Bueno.jpg";
        imgREDneuronal = "3.png";
        System.out.println("Disminuir Errores y tiempo");
        break;
    case "4":
        resultado = "Subir Puntaje";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "4.png";
        System.out.println("Subir Puntaje");
        break;
    case "5":
        resultado = "Subir Puntaje y disminuir Errores";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "5.png";
        System.out.println("Subir Puntaje y disminuir Errores");
        break;
    case "6":
        resultado = "Subir Puntaje y disminuir Tiempo";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "6.png";
        System.out.println("Subir Puntaje y disminuir Tiempo");
        break;
    case "7":
        resultado = "Ponle mas Empeo";
        imgResultado = "pensando.jpg";
        imgREDneuronal = "7.png";
        System.out.println("Ponle mas Empeo");
        break;
    default:
        resultado = "Verifique entradas, no se puede predecir";
        imgResultado = "Error.jpg";
        System.out.println("Verifique entradas, no se puede predecir");
        break;
    }
}

From source file:clasificador.Perceptron.java

public void perceptron_multicapa() {
    try {/*from   w  ww.  ja  v a  2 s. com*/
        //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR
        ConverterUtils.DataSource converU = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff");
        Instances instancias = converU.getDataSet();
        instancias.setClassIndex(instancias.numAttributes() - 1);

        //INSTANCIAS PARA EL TEST DEL MODELO 
        ConverterUtils.DataSource convertest = new ConverterUtils.DataSource(
                "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff");
        Instances testInstance = convertest.getDataSet();
        testInstance.setClassIndex(testInstance.numAttributes() - 1);

        //CONTRUCCIN DEL CLASIFICADOR
        MultilayerPerceptron perceptron = new MultilayerPerceptron();
        perceptron.buildClassifier(instancias);
        //Evaluar las instancias
        Evaluation ev = new Evaluation(instancias);
        //EVALUAR MODELO DE ENTRENAMIENTO
        ev.evaluateModel(perceptron, instancias);
        //System.out.println(instancias);
        System.out.println("\n\nENTRENAMIENTO DEL MODELO PERCEPTRN MULTICAPA\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //EVALUACIN DEL MODELO
        ev.evaluateModel(perceptron, testInstance);
        //System.out.println(instancias);
        System.out.println("\n\nTEST DEL MODELO PERCEPTRN MULTICAPA\n\n");
        System.out.println(ev.toSummaryString("_____RESULTADO_____", true));
        System.out.println(ev.toMatrixString("_____Matriz confusion___"));

        //MOSTRAR VALORES 
        for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) {
            System.out.println("Se clasifica como:  " + ev.evaluateModel(perceptron, testInstance)[i]);
        }

    }

    catch (Exception ex) {
        Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:clasificador.RedNeuronal.java

public void Entrenamiento(String paramNN) {
    try {//w  w w.java2s . co m
        //aqui va a anetrenar la red neuronal con parametros para la red
        FileReader trainReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro.arff"));
        //FileReader trainReader = new FileReader("aqui va la ruta");
        //intancias
        //lo que vamoas a hacer en agarrar ese objeto y cargarlo dentro de nuestra clase instancias
        Instances trainInstance = new Instances(trainReader);
        trainInstance.setClassIndex(trainInstance.numAttributes() - 1);//esta fijando las etiquetas en el archivo las clases estan en el final es decir el total -1 esto es xk es un ambiento controlado 

        //construccion de la red perceptron multicapa 
        MultilayerPerceptron mlp = new MultilayerPerceptron(); // creo un objeto de  perceptron multicapaa
        mlp.setOptions(Utils.splitOptions(paramNN));
        //fijar los parametros de la red perceptron util es para q reciba toda la confiuguracion es proipio de weka
        mlp.buildClassifier(trainInstance);// la construccion se hace ya basadao en los parametron configurado 

        //Guardar el mlp en un archivo 
        Debug.saveToFile("TrainMLP.train", mlp);
        //evaluacion del entrenamiento despies solo se ocupa el trainMLp
        SerializedClassifier sc = new SerializedClassifier();
        sc.setModelFile(new File("TrainMLP.train"));
        Evaluation evaluarEntrenamiento = new Evaluation(trainInstance);
        evaluarEntrenamiento.evaluateModel(mlp, trainInstance);//evaluando el modelo
        System.out.println(evaluarEntrenamiento.toSummaryString("resultado", false));
        System.out.println(evaluarEntrenamiento.toMatrixString("*****************Matriz de confusion*******"));
        trainReader.close();
    } catch (FileNotFoundException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:core.classification.Classifiers.java

License:Open Source License

/**
 * Private constructor for the <code>Classifiers</code> object
 * @param train//w w w  .  ja  v  a2  s  . co  m
 */
private Classifiers(boolean train) {
    SCA = new BayesNet();
    SCB = new MultilayerPerceptron();
    SCC1 = new MultilayerPerceptron();
    SCC2 = new MultilayerPerceptron();
    SCC3 = new MultilayerPerceptron();

    RC = new CostSensitiveClassifier();
    YNC = new J48();

    if (train) {
        try {
            this.trainSC();
        } catch (Exception e) {
            System.out.println("The system encountered the following error while training SC:");
            e.printStackTrace();
        }
        try {
            this.trainRC();
        } catch (Exception e) {
            System.out.println("The system encountered the following error while training RC:");
            e.printStackTrace();
        }
        try {
            this.trainYNC();
        } catch (Exception e) {
            System.out.println("The system encountered the following error while training YNC:");
            e.printStackTrace();
        }
    } else {
        try {
            readSC("SCA.model", "SCB.model", "SCC1.model", "SCC2.model", "SCC3.model");
            readRC("RC.model");
            readYNC("YNC.model");
        } catch (Exception e) {
            System.out.println("Error while reading the classifiers: ");
            e.printStackTrace();
        }
    }

    // Strutures Creations
    FastVector labels = new FastVector();
    labels.addElement("0");
    labels.addElement("1");
    labels.addElement("2");
    labels.addElement("3");
    labels.addElement("4");

    FastVector clabels = new FastVector();
    clabels.addElement("1");
    clabels.addElement("2");
    clabels.addElement("3");
    clabels.addElement("4");

    FastVector clabels2 = new FastVector();
    clabels2.addElement("0");
    clabels2.addElement("1");
    clabels2.addElement("2");
    clabels2.addElement("3");
    clabels2.addElement("4");

    FastVector clabels3 = new FastVector();
    clabels3.addElement("Y");
    clabels3.addElement("N");

    // Creating the structure for SC
    FastVector attrs = new FastVector();
    attrs.addElement(new Attribute("RATIO"));
    attrs.addElement(new Attribute("CLASS", clabels));
    dataStructSCA = new Instances("SCA-STRUCT", attrs, 0);
    dataStructSCA.setClassIndex(1);

    FastVector attrsB = new FastVector();
    attrsB.addElement(new Attribute("H2"));
    attrsB.addElement(new Attribute("D2"));
    attrsB.addElement(new Attribute("DX"));
    attrsB.addElement(new Attribute("PCLASS", clabels));
    attrsB.addElement(new Attribute("CLASS", clabels));
    dataStructSCB = new Instances("SCB-STRUCT", attrsB, 0);
    dataStructSCB.setClassIndex(4);

    FastVector attrsC1 = new FastVector();
    FastVector attrsC2 = new FastVector();
    FastVector attrsC3 = new FastVector();
    attrsC1.addElement(new Attribute("LH"));
    attrsC1.addElement(new Attribute("LD"));
    attrsC1.addElement(new Attribute("LDX"));
    attrsC1.addElement(new Attribute("LCLASS", clabels));
    attrsC1.addElement(new Attribute("CLASS", clabels));

    attrsC2.addElement(new Attribute("EH"));
    attrsC2.addElement(new Attribute("ED"));
    attrsC2.addElement(new Attribute("EDX"));
    attrsC2.addElement(new Attribute("ECLASS", clabels));
    attrsC2.addElement(new Attribute("CLASS", clabels));

    attrsC3.addElement(new Attribute("SH"));
    attrsC3.addElement(new Attribute("SD"));
    attrsC3.addElement(new Attribute("SDX"));
    attrsC3.addElement(new Attribute("SCLASS", clabels));
    attrsC3.addElement(new Attribute("CLASS", clabels));

    dataStructSCC1 = new Instances("SCC1-STRUCT", attrsC1, 0);
    dataStructSCC1.setClassIndex(4);

    dataStructSCC2 = new Instances("SCC2-STRUCT", attrsC2, 0);
    dataStructSCC2.setClassIndex(4);

    dataStructSCC3 = new Instances("SCC3-STRUCT", attrsC3, 0);
    dataStructSCC3.setClassIndex(4);

    FastVector attrs2 = new FastVector();
    attrs2.addElement(new Attribute("H2"));
    attrs2.addElement(new Attribute("D2"));
    attrs2.addElement(new Attribute("DX"));
    attrs2.addElement(new Attribute("CLASS", clabels));
    attrs2.addElement(new Attribute("PCLASS", clabels));
    attrs2.addElement(new Attribute("RELID", clabels2));
    dataStructRC = new Instances("RC-STRUCT", attrs2, 0);
    dataStructRC.setClassIndex(5);

    FastVector attrs3 = new FastVector();
    attrs3.addElement(new Attribute("PCLASS", clabels));
    attrs3.addElement(new Attribute("CCLASS", clabels));
    attrs3.addElement(new Attribute("RAREA"));
    attrs3.addElement(new Attribute("H"));
    attrs3.addElement(new Attribute("D"));
    attrs3.addElement(new Attribute("V"));
    attrs3.addElement(new Attribute("YN", clabels3));
    dataStructYC = new Instances("YC-STRUCT", attrs3, 0);
    dataStructYC.setClassIndex(6);
}

From source file:core.Core.java

public String run() throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource("src/files/powerpuffgirls.arff");

    HashMap<String, Classifier> hash = new HashMap<>();

    hash.put("J48", new J48());
    hash.put("NaiveBayes", new NaiveBayes());
    hash.put("IBk=1", new IBk(1));
    hash.put("IBk=3", new IBk(3));
    hash.put("MultilayerPerceptron", new MultilayerPerceptron());

    LibSVM svm = new LibSVM();
    hash.put("LibSVM", svm);
    Instances ins = source.getDataSet();

    ins.setClassIndex(4);//w w w  .j av  a2  s .c  om

    StringBuilder sb = new StringBuilder();

    int blossom = 0;
    int bubbles = 0;

    Instance test = null;

    for (Map.Entry<String, Classifier> entry : hash.entrySet()) {
        Classifier c = entry.getValue();
        c.buildClassifier(ins);

        test = new Instance(5);

        float[] array = classifyImage();

        test.setDataset(ins);
        test.setValue(0, array[0]);
        test.setValue(1, array[1]);
        test.setValue(2, array[2]);
        test.setValue(3, array[3]);

        double prob[] = c.distributionForInstance(test);

        sb.append("<em>");
        sb.append(entry.getKey());
        sb.append(":</em>");
        sb.append("<br/>");

        for (int i = 0; i < prob.length; i++) {
            String value = test.classAttribute().value(i);

            if (getRoundedValue(prob[i]) >= CUT_NOTE) {
                if (getClassValue(value))
                    blossom++;
                else
                    bubbles++;
            }

            sb.append(getClassName(value));
            sb.append(": ");
            sb.append("<strong>");
            sb.append(getRoundedValue(prob[i]) < CUT_NOTE ? "Rejeitado!" : getValueFormatted(prob[i]));
            sb.append("</strong>");
            sb.append("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;");
        }

        sb.append("<br/>");

        System.out.println("blossom: " + blossom);
        System.out.println("bubbles: " + bubbles);
        System.out.println("=================\n");
    }

    sb.append(blossom > bubbles ? "<h3> a Florzinha!</h3>" : "<h3> a Lindinha!</h3>");

    blossom = 0;
    bubbles = 0;

    return sb.toString();
}