Example usage for weka.classifiers.functions MultilayerPerceptron setTrainingTime

List of usage examples for weka.classifiers.functions MultilayerPerceptron setTrainingTime

Introduction

In this page you can find the example usage for weka.classifiers.functions MultilayerPerceptron setTrainingTime.

Prototype

public void setTrainingTime(int n) 

Source Link

Document

Set the number of training epochs to perform.

Usage

From source file:anndl.Anndl.java

private static void buildModel(InputStream input) throws Exception {
    ANNDLLexer lexer = new ANNDLLexer(new ANTLRInputStream(input));
    CommonTokenStream tokens = new CommonTokenStream(lexer);
    ANNDLParser parser = new ANNDLParser(tokens);
    ParseTree tree = parser.model();//from   w  w w.  ja  v a  2  s .c om

    ModelVisitor visitor = new ModelVisitor();

    ModelClassifier themodel = (ModelClassifier) visitor.visit(tree);
    //themodel.PrintInfo();
    themodel.extracthidden();

    System.out.println("Membaca File Training...");
    DataSource trainingsoure = new DataSource(themodel.filetraining);
    Instances trainingdata = trainingsoure.getDataSet();
    if (trainingdata.classIndex() == -1) {
        trainingdata.setClassIndex(trainingdata.numAttributes() - 1);
    }

    System.out.println("Melakukan konfigurasi ANN ... ");
    MultilayerPerceptron mlp = new MultilayerPerceptron();
    mlp.setLearningRate(themodel.learningrate);
    mlp.setMomentum(themodel.momentum);
    mlp.setTrainingTime(themodel.epoch);
    mlp.setHiddenLayers(themodel.hidden);

    System.out.println("Melakukan Training data ...");
    mlp.buildClassifier(trainingdata);

    Debug.saveToFile(themodel.namamodel + ".model", mlp);

    System.out.println("\n~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ ..");
    System.out.println("Model ANN Berhasil Diciptakan dengan nama file : " + themodel.namamodel + ".model");
    System.out.println("~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. ~~ .. \n");

}

From source file:es.bsc.autonomic.powermodeller.tools.classifiers.MultilayerPerceptronClassifier.java

License:Apache License

@Override
protected Classifier buildClassifier(DataSet training_ds) {

    logger.debug("Building MultilayerPerceptron classifier.");

    MultilayerPerceptron model;

    // Get the independent variable index
    String independent = training_ds.getIndependent();

    if (independent == null)
        throw new WekaWrapperException("Independent variable is not set in dataset.");

    try {/*from   w w w.  j a  v a  2  s  .com*/

        // Read all the instances in the file (ARFF, CSV, XRFF, ...)
        ConverterUtils.DataSource source = new ConverterUtils.DataSource(training_ds.getFilePath());
        Instances instances = source.getDataSet();

        // Set the independent variable (powerWatts).
        instances.setClassIndex(instances.attribute(independent).index());

        // Builds a regression model for the given data.
        model = new weka.classifiers.functions.MultilayerPerceptron();
        model.setHiddenLayers("4");
        model.setTrainingTime(20);

        // Build Linear Regression
        model.buildClassifier(instances);

    } catch (WekaWrapperException e) {
        logger.error("Error while creating Linear Regression classifier.", e);
        throw new WekaWrapperException("Error while creating Linear Regression classifier.");

    } catch (Exception e) {
        logger.error("Error while applying Linear Regression to data set instances.", e);
        throw new WekaWrapperException("Error while applying Linear Regression to data set instances.");
    }

    return model;
}

From source file:mlp.MLP.java

/**
 * build a multilayer perceptron using the given parameters and the training
 * set//from ww w. ja va 2  s.  c  om
 *
 * @param learningRate the learning rate for the training
 * @param numberEpochs number of training epochs
 * @param numberNeurons number of neurons in the hidden layer
 * @param trainingSet the training set
 * @return
 * @throws Exception
 */
public static MultilayerPerceptron buildMLP(double learningRate, int numberEpochs, int numberNeurons,
        Instances trainingSet) throws Exception {
    MultilayerPerceptron mlp = new MultilayerPerceptron();
    //set parameters
    mlp.setLearningRate(learningRate);
    mlp.setTrainingTime(numberEpochs);
    mlp.setHiddenLayers("" + numberNeurons);
    //build multilayer perceptron
    mlp.buildClassifier(trainingSet);
    return mlp;
}

From source file:predictor.Predictor.java

public static void multilayerPerceptron() throws Exception {

    DataSource train = new DataSource(configuration.getWorkspace() + "train_common.arff");
    DataSource test = new DataSource(configuration.getWorkspace() + "test_common.arff");

    Instances trainInstances = train.getDataSet();
    Instances testInstances = test.getDataSet();

    //last attribute classify
    trainInstances.setClassIndex(trainInstances.numAttributes() - 1);
    testInstances.setClassIndex(testInstances.numAttributes() - 1);
    //        //from   www . j  av a 2s.c  om
    //        Classifier cModel = (Classifier)new MultilayerPerceptron();  
    //        cModel.buildClassifier(trainInstances);  
    //
    //        weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel);
    //
    //        Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model");
    //
    //        // Test the model
    //        Evaluation eTest = new Evaluation(trainInstances);
    //        eTest.evaluateModel(cls, testInstances);

    MultilayerPerceptron mlp = new MultilayerPerceptron();
    mlp.buildClassifier(trainInstances);
    mlp.setHiddenLayers(configuration.getHiddenLayers());
    mlp.setLearningRate(configuration.getLearningRate());
    mlp.setTrainingTime(configuration.getEpocs());
    mlp.setMomentum(configuration.getMomentum());

    // train classifier
    Classifier cls = new MultilayerPerceptron();
    cls.buildClassifier(trainInstances);

    // evaluate classifier and print some statistics
    Evaluation eval = new Evaluation(trainInstances);
    eval.evaluateModel(cls, testInstances);

    System.out.println(eval.toSummaryString());
}