Example usage for weka.classifiers.trees RandomForest RandomForest

List of usage examples for weka.classifiers.trees RandomForest RandomForest

Introduction

In this page you can find the example usage for weka.classifiers.trees RandomForest RandomForest.

Prototype

public RandomForest() 

Source Link

Document

Constructor that sets base classifier for bagging to RandomTre and default number of iterations to 100.

Usage

From source file:org.knime.knip.suise.node.boundarymodel.BoundaryModel.java

License:Open Source License

public BoundaryModel() {
    RandomForest r = new RandomForest();
    // r.setNumExecutionSlots(Runtime.getRuntime().availableProcessors());
    m_wekaClassifier = r;/* w  w  w. j a v a 2s . co m*/
    // m_wekaClassifier = new J48();
}

From source file:org.knime.knip.suise.node.boundarymodel.BoundaryModelNodeModel.java

License:Open Source License

static SettingsModelWekaClassifier createClassifierModel() {
    return new SettingsModelWekaClassifier("weka_classifier", new RandomForest());
}

From source file:org.knime.knip.suise.node.pixclassmodel.PixClassModelNodeModel.java

License:Open Source License

static final SettingsModelWekaClassifier createClassifierSelectionModel() {
    return new SettingsModelWekaClassifier("weka_classifier", new RandomForest());
}

From source file:org.kramerlab.mlcbmad.classifier.MLCBMaD.java

License:Open Source License

public static void main(String[] args) throws Exception {
    String datasetbase = Utils.getOption("dataset", args);

    MultiLabelInstances dataset = new MultiLabelInstances(datasetbase + ".arff", datasetbase + ".xml");

    RandomForest rf = new RandomForest();

    for (double t = 0.9; t >= 0.1; t -= 0.1) {
        for (int k = dataset.getLabelIndices().length - 1; k >= 2; k--) {
            MLCBMaD mlcbmad = new MLCBMaD(rf);
            mlcbmad.setDebug(true);//  w ww .  j ava 2  s . c o  m

            mlcbmad.setK(k);
            mlcbmad.setT(t);

            Evaluator eval = new Evaluator();
            MultipleEvaluation res = eval.crossValidate(mlcbmad, dataset, 3);
            System.out.println("\n======\nt=" + t + "\nk=" + k + "\n" + res.toString());
        }
    }
}

From source file:oxis.yologp.YOLogPDescriptor.java

License:Open Source License

/**
 * Train a model, erase the other one//from  w ww .  ja  va2  s. com
 *
 * @param String name of the model to save
 */
public void train(String name) throws Exception {

    compute();

    Instances instances = buildDataset();

    model = new RandomForest();

    Map<Object, Object> properties;
    for (DrugStruct drugStruct : listDrug) {

        if (drugStruct.drug.getProperty("flag")) {
            properties = drugStruct.drug.getProperties();
            Instance instance = new DenseInstance(instances.numAttributes()); //28 + 1024
            instance.setDataset(instances);
            for (Object propKey : properties.keySet()) {
                if (!(propKey.equals("hash") || propKey.equals("flag") || propKey.equals("smiles"))) {
                    try {
                        instance.setValue(instances.attribute(propKey.toString()),
                                Double.parseDouble(properties.get(propKey).toString()));
                    } catch (NullPointerException ex) {
                        Logger.getLogger(YOLogPDescriptor.class.getName()).log(Level.WARNING,
                                "Property not used: {0}", propKey.toString());
                    }
                }
            }
            instance.setClassValue(drugStruct.getLogP());
            instances.add(instance);
        }
    }
    model.setNumFeatures(200);
    model.setNumTrees(400);
    model.setMaxDepth(0);
    model.buildClassifier(instances);

    weka.core.SerializationHelper.write(path + name, model);
}

From source file:qa.qcri.nadeef.core.utils.classification.RandomForestClassifier.java

License:Open Source License

public RandomForestClassifier(ExecutionContext executionContext, Schema tableSchema,
        List<String> permittedAttributes, Column newValueColumn, int forestSize)
        throws NadeefDatabaseException {
    super(executionContext, tableSchema, permittedAttributes, newValueColumn);
    this.forestSize = forestSize;

    // initialize the model
    this.classifier = new RandomForest();
    ((RandomForest) this.classifier).setNumTrees(forestSize);
}

From source file:recsys.BuildModel.java

public static void main(String args[]) throws Exception {

    //Opening the training file
    int own_training = StaticVariables.own_training;
    DataSource sourceTrain;/*from  w w w  . j a  v  a 2 s  .  c  om*/
    if (own_training == 1)
        sourceTrain = new DataSource("D://own_training//item//feature data//train_feature.arff");
    else
        sourceTrain = new DataSource("E://recsys//item//feature data//train_feature.arff");

    Instances train = sourceTrain.getDataSet();

    String[] options = new String[2];
    options[0] = "-R"; // "range"
    options[1] = "1,2,4"; // first attribute
    //options[2] = "2";                                     // first attribute
    //options[3] = "4";         
    //options[2] = "9";                                     // first attribute
    //options[3] = "3";                                     // first attribute
    //options[4] = "4";                                     // first attribute

    Remove remove = new Remove(); // new instance of filter
    remove.setOptions(options); // set options
    remove.setInputFormat(train); // inform filter about dataset **AFTER** setting options
    Instances newData = Filter.useFilter(train, remove); // apply filter
    System.out.println("number of attributes " + newData.numAttributes());

    System.out.println(newData.firstInstance());

    if (newData.classIndex() == -1) {
        newData.setClassIndex(newData.numAttributes() - 1);
    }

    Resample sampler = new Resample();
    String Fliteroptions = "-B 1.0";
    sampler.setOptions(weka.core.Utils.splitOptions(Fliteroptions));
    sampler.setRandomSeed((int) System.currentTimeMillis());
    sampler.setInputFormat(newData);
    newData = Resample.useFilter(newData, sampler);

    //Normalize normalize = new Normalize();
    //normalize.toSource(Fliteroptions, newData);
    //Remove remove = new Remove();                         // new instance of filter
    //remove.setOptions(options);                           // set options
    //remove.setInputFormat(train);                          // inform filter about dataset **AFTER** setting options
    //Instances newData = Filter.useFilter(train, remove);   // apply filter

    //rm.setAttributeIndices("2");
    //rm.setAttributeIndices("3");
    //rm.setAttributeIndices("4");
    //rm.setAttributeIndices("5");
    //rm.setAttributeIndices("6");

    //rm.setAttributeIndices("6");
    //rm.setAttributeIndices("5");

    //Remove rm = new Remove();
    //rm.setInputFormat(train);
    //rm.setAttributeIndices("1");
    //FilteredClassifier fc = new FilteredClassifier();
    //cls.setOptions(args);
    //J48 cls = new J48();
    //LibSVM cls = new LibSVM();
    //SMO cls = new SMO();
    //Logistic cls = new Logistic();
    //BayesianLogisticRegression cls = new BayesianLogisticRegression();
    //cls.setThreshold(0.52);
    //AdaBoostM1 cls = new AdaBoostM1();
    //NaiveBayes cls = new NaiveBayes();
    //weka.classifiers.meta.Bagging cls = new Bagging();
    //weka.classifiers.functions.IsotonicRegression cls = new IsotonicRegression();
    //j48.setUnpruned(true);        // using an unpruned J48
    // meta-classifier

    //BayesNet cls = new BayesNet();
    RandomForest cls = new RandomForest();
    //cls.setNumTrees(100);
    //cls.setMaxDepth(3);
    //cls.setNumFeatures(3);

    //fc.setClassifier(cls);
    //fc.setFilter(rm);

    // train and make predictions
    //System.out.println(fc.globalInfo());
    //System.out.println(fc.getFilter());
    //fc.buildClassifier(train);
    cls.buildClassifier(newData);
    //Evaluation eval = new Evaluation(newData);
    //Random rand = new Random(1);  // using seed = 1
    //int folds = 2;
    //eval.crossValidateModel(cls, newData, folds, rand);
    //System.out.println(eval.toSummaryString());
    //System.out.println("precision on buy " + eval.precision(newData.classAttribute().indexOfValue("buy")));

    //System.out.println("recall on buy " + eval.recall(newData.classAttribute().indexOfValue("buy")));
    //System.out.println(eval.confusionMatrix().toString());
    //System.out.println("Precision " + eval.precision(newData.classIndex()-1));
    //System.out.println("Recall " + eval.recall(newData.classIndex()-1));
    //Classfier cls = new weka.classifiers.bayes.NaiveBayes();
    //FilteredClassifier fc = new FilteredClassifier();
    //fc.setFilter(rm);
    //fc.setClassifier(cls);

    //train and make predictions
    //fc.buildClassifier(train);

    // serialize model
    ObjectOutputStream oos;
    if (own_training == 1)
        oos = new ObjectOutputStream(new FileOutputStream("D://own_training//item//model//train.model"));
    else
        oos = new ObjectOutputStream(new FileOutputStream("E://recsys//item//model//train.model"));

    oos.writeObject(cls);
    oos.flush();
    oos.close();
}