Example usage for weka.classifiers.meta AdditiveRegression setNumIterations

List of usage examples for weka.classifiers.meta AdditiveRegression setNumIterations

Introduction

In this page you can find the example usage for weka.classifiers.meta AdditiveRegression setNumIterations.

Prototype

public void setNumIterations(int numIterations) 

Source Link

Document

Sets the number of bagging iterations

Usage

From source file:jjj.asap.sas.models1.job.BuildRegressionModels.java

License:Open Source License

@Override
protected void run() throws Exception {

    // validate args
    if (!Bucket.isBucket("datasets", inputBucket)) {
        throw new FileNotFoundException(inputBucket);
    }//from   w  w  w  .  j  av  a 2 s.  c  o m
    if (!Bucket.isBucket("models", outputBucket)) {
        throw new FileNotFoundException(outputBucket);
    }

    // create prototype classifiers
    List<Classifier> models = new ArrayList<Classifier>();

    LinearRegression m5 = new LinearRegression();
    m5.setAttributeSelectionMethod(M5);

    LinearRegression lr = new LinearRegression();
    lr.setAttributeSelectionMethod(NONE);

    RandomSubSpace rss = new RandomSubSpace();
    rss.setClassifier(lr);
    rss.setNumIterations(30);

    AdditiveRegression boostedStumps = new AdditiveRegression();
    boostedStumps.setClassifier(new DecisionStump());
    boostedStumps.setNumIterations(1000);

    AdditiveRegression boostedTrees = new AdditiveRegression();
    boostedTrees.setClassifier(new REPTree());
    boostedTrees.setNumIterations(100);

    models.add(m5);
    models.add(boostedStumps);
    models.add(boostedTrees);
    models.add(rss);

    // init multi-threading
    Job.startService();
    final Queue<Future<Object>> queue = new LinkedList<Future<Object>>();

    // get the input from the bucket
    List<String> names = Bucket.getBucketItems("datasets", this.inputBucket);
    for (String dsn : names) {

        for (Classifier model : models) {

            String tag = null;
            if (model instanceof SingleClassifierEnhancer) {
                tag = model.getClass().getSimpleName() + "-"
                        + ((SingleClassifierEnhancer) model).getClassifier().getClass().getSimpleName();
            } else {
                tag = model.getClass().getSimpleName();
            }

            queue.add(Job.submit(new RegressionModelBuilder(dsn, tag, AbstractClassifier.makeCopy(model),
                    this.outputBucket)));
        }
    }

    // wait on complete
    Progress progress = new Progress(queue.size(), this.getClass().getSimpleName());
    while (!queue.isEmpty()) {
        try {
            queue.remove().get();
        } catch (Exception e) {
            Job.log("ERROR", e.toString());
        }
        progress.tick();
    }
    progress.done();
    Job.stopService();

}

From source file:jjj.asap.sas.models1.job.RGramModels.java

License:Open Source License

@Override
protected void run() throws Exception {

    // validate args
    if (!Bucket.isBucket("datasets", inputBucket)) {
        throw new FileNotFoundException(inputBucket);
    }//from   w ww . j  ava2  s. co  m
    if (!Bucket.isBucket("models", outputBucket)) {
        throw new FileNotFoundException(outputBucket);
    }

    // create prototype classifiers
    List<Classifier> models = new ArrayList<Classifier>();

    //SGD sgd = new SGD();
    //sgd.setDontNormalize(true);
    //sgd.setLossFunction(new SelectedTag(SGD.SQUAREDLOSS,SGD.TAGS_SELECTION));

    LinearRegression m5 = new LinearRegression();
    m5.setAttributeSelectionMethod(M5);

    //models.add(sgd);
    models.add(m5);

    LinearRegression lr = new LinearRegression();
    lr.setAttributeSelectionMethod(NONE);

    RandomSubSpace rss = new RandomSubSpace();
    rss.setClassifier(lr);
    rss.setNumIterations(30);

    models.add(rss);

    AdditiveRegression boostedStumps = new AdditiveRegression();
    boostedStumps.setClassifier(new DecisionStump());
    boostedStumps.setNumIterations(1000);

    AdditiveRegression boostedTrees = new AdditiveRegression();
    boostedTrees.setClassifier(new REPTree());
    boostedTrees.setNumIterations(100);

    models.add(boostedStumps);
    models.add(boostedTrees);

    models.add(new PLSClassifier());

    // init multi-threading
    Job.startService();
    final Queue<Future<Object>> queue = new LinkedList<Future<Object>>();

    // get the input from the bucket
    List<String> names = Bucket.getBucketItems("datasets", this.inputBucket);
    for (String dsn : names) {

        for (Classifier model : models) {

            String tag = null;
            if (model instanceof SingleClassifierEnhancer) {
                tag = model.getClass().getSimpleName() + "-"
                        + ((SingleClassifierEnhancer) model).getClassifier().getClass().getSimpleName();
            } else {
                tag = model.getClass().getSimpleName();
            }

            queue.add(Job.submit(new RegressionModelBuilder(dsn, tag, AbstractClassifier.makeCopy(model),
                    this.outputBucket)));
        }
    }

    // wait on complete
    Progress progress = new Progress(queue.size(), this.getClass().getSimpleName());
    while (!queue.isEmpty()) {
        try {
            queue.remove().get();
        } catch (Exception e) {
            Job.log("ERROR", e.toString());
        }
        progress.tick();
    }
    progress.done();
    Job.stopService();

}