List of usage examples for weka.classifiers.functions MultilayerPerceptron buildClassifier
@Override public void buildClassifier(Instances i) throws Exception
From source file:MLP.java
MLP() { try {// ww w .ja va 2s . c o m FileReader trainreader = new FileReader("C:\\new.arff"); FileReader testreader = new FileReader("C:\\new.arff"); Instances train = new Instances(trainreader); Instances test = new Instances(testreader); train.setClassIndex(train.numAttributes() - 1); test.setClassIndex(test.numAttributes() - 1); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 4")); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); trainreader.close(); testreader.close(); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:anndl.Anndl.java
private static void buildModel(InputStream input) throws Exception { ANNDLLexer lexer = new ANNDLLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); ANNDLParser parser = new ANNDLParser(tokens); ParseTree tree = parser.model();/*from w w w .j av a2 s. c om*/ ModelVisitor visitor = new ModelVisitor(); ModelClassifier themodel = (ModelClassifier) visitor.visit(tree); //themodel.PrintInfo(); themodel.extracthidden(); System.out.println("Membaca File Training..."); DataSource trainingsoure = new DataSource(themodel.filetraining); Instances trainingdata = trainingsoure.getDataSet(); if (trainingdata.classIndex() == -1) { trainingdata.setClassIndex(trainingdata.numAttributes() - 1); } System.out.println("Melakukan konfigurasi ANN ... "); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.setLearningRate(themodel.learningrate); mlp.setMomentum(themodel.momentum); mlp.setTrainingTime(themodel.epoch); mlp.setHiddenLayers(themodel.hidden); System.out.println("Melakukan Training data ..."); mlp.buildClassifier(trainingdata); Debug.saveToFile(themodel.namamodel + ".model", mlp); System.out.println("\n~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .."); System.out.println("Model ANN Berhasil Diciptakan dengan nama file : " + themodel.namamodel + ".model"); System.out.println("~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. \n"); }
From source file:Clases.RedNeuronal.RedNeuronal.java
public void redNeuronal(int puntaje, int tiempo, int error) throws Exception { //si puntaje >= 200 entonces aprendido //si tiempo <= 240 (4 minutos) entonces aprendido //si errores <= 3 entonces aprendido String[] dato = { obtnerPuntaje(puntaje), obtenerTiempo(tiempo), obtenerErrores(error) }; ConverterUtils.DataSource con = new ConverterUtils.DataSource( "C:\\Users\\USUARIO\\Documents\\SILVIIS\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); // ConverterUtils.DataSource con = new ConverterUtils.DataSource("E:\\Unl\\10 Modulo\\2.ANTEPROYECTOS DE TESIS\\Proyecto\\Aplicacion\\redeAprendizaje.arff"); Instances instances = con.getDataSet(); System.out.println(instances); instances.setClassIndex(instances.numAttributes() - 1); MultilayerPerceptron mp = new MultilayerPerceptron(); mp.buildClassifier(instances); Evaluation evalucion = new Evaluation(instances); evalucion.evaluateModel(mp, instances); System.out.println(evalucion.toSummaryString()); System.out.println(evalucion.toMatrixString()); String datosEntrada = null;//from w ww . j a va 2s. c o m String datosSalida = "no se puede predecir"; for (int i = 0; i < instances.numInstances(); i++) { double predecido = mp.classifyInstance(instances.instance(i)); datosEntrada = dato[0] + " " + dato[1] + " " + dato[2]; if ((int) instances.instance(i).value(0) == Integer.parseInt(dato[0]) && (int) instances.instance(i).value(1) == Integer.parseInt(dato[1]) && (int) instances.instance(i).value(2) == Integer.parseInt(dato[2])) { datosSalida = instances.classAttribute().value((int) predecido); } } System.out.println("DATOS DE ENTRADA: " + datosEntrada); System.out.println("SALIDA PREDECIDA: " + datosSalida); switch (datosSalida) { case "0": resultado = "Excelente ha aprendido"; imgResultado = "Excelente.jpg"; imgREDneuronal = "0.png"; System.out.println("Excelente ha aprendido"); break; case "1": resultado = "Disminuir Errores"; imgResultado = "Bueno.jpg"; imgREDneuronal = "1.png"; System.out.println("Disminuir Errores"); break; case "2": resultado = "Disminuir Tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "2.png"; System.out.println("Disminuir Tiempo"); break; case "3": resultado = "Disminuir Errores y tiempo"; imgResultado = "Bueno.jpg"; imgREDneuronal = "3.png"; System.out.println("Disminuir Errores y tiempo"); break; case "4": resultado = "Subir Puntaje"; imgResultado = "pensando.jpg"; imgREDneuronal = "4.png"; System.out.println("Subir Puntaje"); break; case "5": resultado = "Subir Puntaje y disminuir Errores"; imgResultado = "pensando.jpg"; imgREDneuronal = "5.png"; System.out.println("Subir Puntaje y disminuir Errores"); break; case "6": resultado = "Subir Puntaje y disminuir Tiempo"; imgResultado = "pensando.jpg"; imgREDneuronal = "6.png"; System.out.println("Subir Puntaje y disminuir Tiempo"); break; case "7": resultado = "Ponle mas Empeo"; imgResultado = "pensando.jpg"; imgREDneuronal = "7.png"; System.out.println("Ponle mas Empeo"); break; default: resultado = "Verifique entradas, no se puede predecir"; imgResultado = "Error.jpg"; System.out.println("Verifique entradas, no se puede predecir"); break; } }
From source file:clasificador.Perceptron.java
public void perceptron_multicapa() { try {//ww w . ja v a 2s . com //INSTANCIAS PARA ENTRENAMIENTO DEL CLASIFICADOR ConverterUtils.DataSource converU = new ConverterUtils.DataSource( "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro.arff"); Instances instancias = converU.getDataSet(); instancias.setClassIndex(instancias.numAttributes() - 1); //INSTANCIAS PARA EL TEST DEL MODELO ConverterUtils.DataSource convertest = new ConverterUtils.DataSource( "C:\\Users\\Kathy\\Documents\\tutorial perl\\libro5.arff"); Instances testInstance = convertest.getDataSet(); testInstance.setClassIndex(testInstance.numAttributes() - 1); //CONTRUCCIN DEL CLASIFICADOR MultilayerPerceptron perceptron = new MultilayerPerceptron(); perceptron.buildClassifier(instancias); //Evaluar las instancias Evaluation ev = new Evaluation(instancias); //EVALUAR MODELO DE ENTRENAMIENTO ev.evaluateModel(perceptron, instancias); //System.out.println(instancias); System.out.println("\n\nENTRENAMIENTO DEL MODELO PERCEPTRN MULTICAPA\n\n"); System.out.println(ev.toSummaryString("_____RESULTADO_____", true)); System.out.println(ev.toMatrixString("_____Matriz confusion___")); //EVALUACIN DEL MODELO ev.evaluateModel(perceptron, testInstance); //System.out.println(instancias); System.out.println("\n\nTEST DEL MODELO PERCEPTRN MULTICAPA\n\n"); System.out.println(ev.toSummaryString("_____RESULTADO_____", true)); System.out.println(ev.toMatrixString("_____Matriz confusion___")); //MOSTRAR VALORES for (int i = 0; i < ev.evaluateModel(perceptron, testInstance).length; i++) { System.out.println("Se clasifica como: " + ev.evaluateModel(perceptron, testInstance)[i]); } } catch (Exception ex) { Logger.getLogger(Perceptron.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:clasificador.RedNeuronal.java
public void Entrenamiento(String paramNN) { try {// w ww .j av a 2s . co m //aqui va a anetrenar la red neuronal con parametros para la red FileReader trainReader = new FileReader( new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro.arff")); //FileReader trainReader = new FileReader("aqui va la ruta"); //intancias //lo que vamoas a hacer en agarrar ese objeto y cargarlo dentro de nuestra clase instancias Instances trainInstance = new Instances(trainReader); trainInstance.setClassIndex(trainInstance.numAttributes() - 1);//esta fijando las etiquetas en el archivo las clases estan en el final es decir el total -1 esto es xk es un ambiento controlado //construccion de la red perceptron multicapa MultilayerPerceptron mlp = new MultilayerPerceptron(); // creo un objeto de perceptron multicapaa mlp.setOptions(Utils.splitOptions(paramNN)); //fijar los parametros de la red perceptron util es para q reciba toda la confiuguracion es proipio de weka mlp.buildClassifier(trainInstance);// la construccion se hace ya basadao en los parametron configurado //Guardar el mlp en un archivo Debug.saveToFile("TrainMLP.train", mlp); //evaluacion del entrenamiento despies solo se ocupa el trainMLp SerializedClassifier sc = new SerializedClassifier(); sc.setModelFile(new File("TrainMLP.train")); Evaluation evaluarEntrenamiento = new Evaluation(trainInstance); evaluarEntrenamiento.evaluateModel(mlp, trainInstance);//evaluando el modelo System.out.println(evaluarEntrenamiento.toSummaryString("resultado", false)); System.out.println(evaluarEntrenamiento.toMatrixString("*****************Matriz de confusion*******")); trainReader.close(); } catch (FileNotFoundException ex) { Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:cs.man.ac.uk.predict.Predictor.java
License:Open Source License
public static void makePredictionsEnsembleNew(String trainPath, String testPath, String resultPath) { System.out.println("Training set: " + trainPath); System.out.println("Test set: " + testPath); /**//from w w w . j a va 2 s. c om * The ensemble classifiers. This is a heterogeneous ensemble. */ J48 learner1 = new J48(); SMO learner2 = new SMO(); NaiveBayes learner3 = new NaiveBayes(); MultilayerPerceptron learner5 = new MultilayerPerceptron(); System.out.println("Training Ensemble."); long startTime = System.nanoTime(); try { BufferedReader reader = new BufferedReader(new FileReader(trainPath)); Instances data = new Instances(reader); data.setClassIndex(data.numAttributes() - 1); System.out.println("Training data length: " + data.numInstances()); learner1.buildClassifier(data); learner2.buildClassifier(data); learner3.buildClassifier(data); learner5.buildClassifier(data); long endTime = System.nanoTime(); long nanoseconds = endTime - startTime; double seconds = (double) nanoseconds / 1000000000.0; System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s)."); } catch (IOException e) { System.out.println("Could not train Ensemble classifier IOException on training data file."); } catch (Exception e) { System.out.println("Could not train Ensemble classifier Exception building model."); } try { String line = ""; // Read the file and display it line by line. BufferedReader in = null; // Read in and store each positive prediction in the tree map. try { //open stream to file in = new BufferedReader(new FileReader(testPath)); while ((line = in.readLine()) != null) { if (line.toLowerCase().contains("@data")) break; } } catch (Exception e) { } // A different ARFF loader used here (compared to above) as // the ARFF file may be extremely large. In which case the whole // file cannot be read in. Instead it is read in incrementally. ArffLoader loader = new ArffLoader(); loader.setFile(new File(testPath)); Instances data = loader.getStructure(); data.setClassIndex(data.numAttributes() - 1); System.out.println("Ensemble Classifier is ready."); System.out.println("Testing on all instances avaialable."); startTime = System.nanoTime(); int instanceNumber = 0; // label instances Instance current; while ((current = loader.getNextInstance(data)) != null) { instanceNumber += 1; line = in.readLine(); double classification1 = learner1.classifyInstance(current); double classification2 = learner2.classifyInstance(current); double classification3 = learner3.classifyInstance(current); double classification5 = learner5.classifyInstance(current); // All classifiers must agree. This is a very primitive ensemble strategy! if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) { if (line != null) { //System.out.println("Instance: "+instanceNumber+"\t"+line); //System.in.read(); } Writer.append(resultPath, instanceNumber + "\n"); } } in.close(); System.out.println("Test set instances: " + instanceNumber); long endTime = System.nanoTime(); long duration = endTime - startTime; double seconds = (double) duration / 1000000000.0; System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s)."); } catch (Exception e) { System.out.println("Could not test Ensemble classifier due to an error."); } }
From source file:cs.man.ac.uk.predict.Predictor.java
License:Open Source License
public static void makePredictionsEnsembleStream(String trainPath, String testPath, String resultPath) { System.out.println("Training set: " + trainPath); System.out.println("Test set: " + testPath); /**//from w ww. j a va 2 s . c o m * The ensemble classifiers. This is a heterogeneous ensemble. */ J48 learner1 = new J48(); SMO learner2 = new SMO(); NaiveBayes learner3 = new NaiveBayes(); MultilayerPerceptron learner5 = new MultilayerPerceptron(); System.out.println("Training Ensemble."); long startTime = System.nanoTime(); try { BufferedReader reader = new BufferedReader(new FileReader(trainPath)); Instances data = new Instances(reader); data.setClassIndex(data.numAttributes() - 1); System.out.println("Training data length: " + data.numInstances()); learner1.buildClassifier(data); learner2.buildClassifier(data); learner3.buildClassifier(data); learner5.buildClassifier(data); long endTime = System.nanoTime(); long nanoseconds = endTime - startTime; double seconds = (double) nanoseconds / 1000000000.0; System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s)."); } catch (IOException e) { System.out.println("Could not train Ensemble classifier IOException on training data file."); } catch (Exception e) { System.out.println("Could not train Ensemble classifier Exception building model."); } try { // A different ARFF loader used here (compared to above) as // the ARFF file may be extremely large. In which case the whole // file cannot be read in. Instead it is read in incrementally. ArffLoader loader = new ArffLoader(); loader.setFile(new File(testPath)); Instances data = loader.getStructure(); data.setClassIndex(data.numAttributes() - 1); System.out.println("Ensemble Classifier is ready."); System.out.println("Testing on all instances avaialable."); startTime = System.nanoTime(); int instanceNumber = 0; // label instances Instance current; while ((current = loader.getNextInstance(data)) != null) { instanceNumber += 1; double classification1 = learner1.classifyInstance(current); double classification2 = learner2.classifyInstance(current); double classification3 = learner3.classifyInstance(current); double classification5 = learner5.classifyInstance(current); // All classifiers must agree. This is a very primitive ensemble strategy! if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) { Writer.append(resultPath, instanceNumber + "\n"); } } System.out.println("Test set instances: " + instanceNumber); long endTime = System.nanoTime(); long duration = endTime - startTime; double seconds = (double) duration / 1000000000.0; System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s)."); } catch (Exception e) { System.out.println("Could not test Ensemble classifier due to an error."); } }
From source file:cyber009.main.UDALNeuralNetwork.java
public static void main(String[] args) { UDALNeuralNetwork udal = new UDALNeuralNetwork(0.014013); Statistics statis = new Statistics(udal.v); long timeStart = 0, timeEnd = 0; for (int f = 2; f <= 2; f++) { udal.initUDAL(4, 5000);//from w w w .j a va 2 s . c om udal.activeLearning(0, 5000); udal.arraytoInstances(); udal.ann.weightReset(); timeStart = System.currentTimeMillis(); MultilayerPerceptron wekaNN = new MultilayerPerceptron(); wekaNN.setAutoBuild(true); //wekaNN.setGUI(true); try { wekaNN.buildClassifier(udal.dataSet); Evaluation eval = new Evaluation(udal.dataSet); System.out.println(wekaNN.toString()); eval.crossValidateModel(wekaNN, udal.dataSet, 4999, new Random(System.currentTimeMillis())); System.out.println(wekaNN.toString()); System.out.println(eval.toClassDetailsString()); // udal.ann.gradientDescent(10000L, 3, 100); // for (Double target : udal.v.CLASSES) { // statis.calMVMuSigma(target); // System.out.println(udal.v.N_DATA_IN_CLASS.get(target)); // System.out.println(statis.mu.get(target)); // System.out.println(statis.sigma.get(target)); // } // for(int d=0; d<udal.v.D; d++) { // if(udal.v.LABEL[d] == false) { // double [][] val = new double[udal.v.N-1][1]; // for(int n=1; n<udal.v.N; n++) { // val[n-1][0] = udal.v.X[d][n]; //// System.out.print(udal.v.X[d][n] + " "); //// System.out.println(val[n-1][0]); // } // Matrix mVal = new Matrix(val); // double pp = 0.0D; // for (Double target : udal.v.CLASSES) { // //System.out.println("-----------------------\nClass:"+ target); // pp += statis.posteriorDistribution(target, mVal); // System.out.println("conditional: Entropy: "+ // statis.conditionalEntropy(target, mVal, d)); // } // System.out.print("Sum posterior:"+ pp+ " for "+new Matrix(val).transpose()); // // } // } // System.out.println("-----------------------"); // timeEnd = System.currentTimeMillis(); // System.out.println("feature #:"+udal.v.N+" time:("+ (timeEnd - timeStart) +")"); // udal.v.showResult(); // } catch (Exception ex) { Logger.getLogger(UDALNeuralNetwork.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:es.bsc.autonomic.powermodeller.tools.classifiers.MultilayerPerceptronClassifier.java
License:Apache License
@Override protected Classifier buildClassifier(DataSet training_ds) { logger.debug("Building MultilayerPerceptron classifier."); MultilayerPerceptron model; // Get the independent variable index String independent = training_ds.getIndependent(); if (independent == null) throw new WekaWrapperException("Independent variable is not set in dataset."); try {/*from w w w .j a v a 2s .c o m*/ // Read all the instances in the file (ARFF, CSV, XRFF, ...) ConverterUtils.DataSource source = new ConverterUtils.DataSource(training_ds.getFilePath()); Instances instances = source.getDataSet(); // Set the independent variable (powerWatts). instances.setClassIndex(instances.attribute(independent).index()); // Builds a regression model for the given data. model = new weka.classifiers.functions.MultilayerPerceptron(); model.setHiddenLayers("4"); model.setTrainingTime(20); // Build Linear Regression model.buildClassifier(instances); } catch (WekaWrapperException e) { logger.error("Error while creating Linear Regression classifier.", e); throw new WekaWrapperException("Error while creating Linear Regression classifier."); } catch (Exception e) { logger.error("Error while applying Linear Regression to data set instances.", e); throw new WekaWrapperException("Error while applying Linear Regression to data set instances."); } return model; }
From source file:mlp.MLP.java
/** * build a multilayer perceptron using the given parameters and the training * set/* w w w .jav a 2 s . c o m*/ * * @param learningRate the learning rate for the training * @param numberEpochs number of training epochs * @param numberNeurons number of neurons in the hidden layer * @param trainingSet the training set * @return * @throws Exception */ public static MultilayerPerceptron buildMLP(double learningRate, int numberEpochs, int numberNeurons, Instances trainingSet) throws Exception { MultilayerPerceptron mlp = new MultilayerPerceptron(); //set parameters mlp.setLearningRate(learningRate); mlp.setTrainingTime(numberEpochs); mlp.setHiddenLayers("" + numberNeurons); //build multilayer perceptron mlp.buildClassifier(trainingSet); return mlp; }