List of usage examples for weka.classifiers.functions MultilayerPerceptron setHiddenLayers
public void setHiddenLayers(String h)
From source file:anndl.Anndl.java
private static void buildModel(InputStream input) throws Exception { ANNDLLexer lexer = new ANNDLLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); ANNDLParser parser = new ANNDLParser(tokens); ParseTree tree = parser.model();// w w w. ja va 2 s.com ModelVisitor visitor = new ModelVisitor(); ModelClassifier themodel = (ModelClassifier) visitor.visit(tree); //themodel.PrintInfo(); themodel.extracthidden(); System.out.println("Membaca File Training..."); DataSource trainingsoure = new DataSource(themodel.filetraining); Instances trainingdata = trainingsoure.getDataSet(); if (trainingdata.classIndex() == -1) { trainingdata.setClassIndex(trainingdata.numAttributes() - 1); } System.out.println("Melakukan konfigurasi ANN ... "); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.setLearningRate(themodel.learningrate); mlp.setMomentum(themodel.momentum); mlp.setTrainingTime(themodel.epoch); mlp.setHiddenLayers(themodel.hidden); System.out.println("Melakukan Training data ..."); mlp.buildClassifier(trainingdata); Debug.saveToFile(themodel.namamodel + ".model", mlp); System.out.println("\n~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .."); System.out.println("Model ANN Berhasil Diciptakan dengan nama file : " + themodel.namamodel + ".model"); System.out.println("~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. \n"); }
From source file:es.bsc.autonomic.powermodeller.tools.classifiers.MultilayerPerceptronClassifier.java
License:Apache License
@Override protected Classifier buildClassifier(DataSet training_ds) { logger.debug("Building MultilayerPerceptron classifier."); MultilayerPerceptron model; // Get the independent variable index String independent = training_ds.getIndependent(); if (independent == null) throw new WekaWrapperException("Independent variable is not set in dataset."); try {//from w w w .ja va 2s.c o m // Read all the instances in the file (ARFF, CSV, XRFF, ...) ConverterUtils.DataSource source = new ConverterUtils.DataSource(training_ds.getFilePath()); Instances instances = source.getDataSet(); // Set the independent variable (powerWatts). instances.setClassIndex(instances.attribute(independent).index()); // Builds a regression model for the given data. model = new weka.classifiers.functions.MultilayerPerceptron(); model.setHiddenLayers("4"); model.setTrainingTime(20); // Build Linear Regression model.buildClassifier(instances); } catch (WekaWrapperException e) { logger.error("Error while creating Linear Regression classifier.", e); throw new WekaWrapperException("Error while creating Linear Regression classifier."); } catch (Exception e) { logger.error("Error while applying Linear Regression to data set instances.", e); throw new WekaWrapperException("Error while applying Linear Regression to data set instances."); } return model; }
From source file:mlp.MLP.java
/** * build a multilayer perceptron using the given parameters and the training * set/*w w w. j av a2 s . c o m*/ * * @param learningRate the learning rate for the training * @param numberEpochs number of training epochs * @param numberNeurons number of neurons in the hidden layer * @param trainingSet the training set * @return * @throws Exception */ public static MultilayerPerceptron buildMLP(double learningRate, int numberEpochs, int numberNeurons, Instances trainingSet) throws Exception { MultilayerPerceptron mlp = new MultilayerPerceptron(); //set parameters mlp.setLearningRate(learningRate); mlp.setTrainingTime(numberEpochs); mlp.setHiddenLayers("" + numberNeurons); //build multilayer perceptron mlp.buildClassifier(trainingSet); return mlp; }
From source file:predictor.Predictor.java
public static void multilayerPerceptron() throws Exception { DataSource train = new DataSource(configuration.getWorkspace() + "train_common.arff"); DataSource test = new DataSource(configuration.getWorkspace() + "test_common.arff"); Instances trainInstances = train.getDataSet(); Instances testInstances = test.getDataSet(); //last attribute classify trainInstances.setClassIndex(trainInstances.numAttributes() - 1); testInstances.setClassIndex(testInstances.numAttributes() - 1); // //from w ww .j a va 2s . c o m // Classifier cModel = (Classifier)new MultilayerPerceptron(); // cModel.buildClassifier(trainInstances); // // weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel); // // Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model"); // // // Test the model // Evaluation eTest = new Evaluation(trainInstances); // eTest.evaluateModel(cls, testInstances); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.buildClassifier(trainInstances); mlp.setHiddenLayers(configuration.getHiddenLayers()); mlp.setLearningRate(configuration.getLearningRate()); mlp.setTrainingTime(configuration.getEpocs()); mlp.setMomentum(configuration.getMomentum()); // train classifier Classifier cls = new MultilayerPerceptron(); cls.buildClassifier(trainInstances); // evaluate classifier and print some statistics Evaluation eval = new Evaluation(trainInstances); eval.evaluateModel(cls, testInstances); System.out.println(eval.toSummaryString()); }