List of usage examples for weka.classifiers.trees RandomForest RandomForest
public RandomForest()
From source file:ClassifierBuilder.java
public static MyClassifier buildClassifier(String name) { MyClassifier toReturn = new MyClassifier(name); switch (name) { case "Decision Table Majority": toReturn.setClassifier(new DecisionTable()); break;//from w ww . ja v a2s. c om case "Logistic Regression": toReturn.setClassifier(new Logistic()); break; case "Multi Layer Perceptron": toReturn.setClassifier(new MultilayerPerceptron()); break; case "Naive Baesian": toReturn.setClassifier(new NaiveBayes()); break; case "Random Forest": toReturn.setClassifier(new RandomForest()); break; default: break; } return toReturn; }
From source file:at.aictopic1.sentimentanalysis.machinelearning.impl.RandomForestClassifier.java
/** * sets classifier/*from ww w. j ava2s. co m*/ */ @Override protected void setClassifier() { //classifier this.usedClassifier = new RandomForest(); //.. other options this.fcClassifier.setClassifier(this.usedClassifier); }
From source file:au.edu.usyd.it.yangpy.sampling.BPSO.java
License:Open Source License
/** * the target function in fitness form/* w w w .j a v a2 s . c om*/ * * @return classification accuracy */ public double ensembleClassify() { double fitnessValue = 0.0; double classifiersScore = 0.0; /* load in the modified data set */ try { Instances reducedSet = new Instances(new BufferedReader(new FileReader("reduced.arff"))); reducedSet.setClassIndex(reducedSet.numAttributes() - 1); // calculating the evaluation values using each classifier respectively if (verbose == true) { System.out.println(); System.out.println(" |----------J4.8-----------|"); System.out.println(" | | |"); } J48 tree = new J48(); classifiersScore = classify(tree, reducedSet, internalTest); fitnessValue += classifiersScore; if (verbose == true) { System.out.println(); System.out.println(" |-----3NearestNeighbor----|"); System.out.println(" | | |"); } IBk nn3 = new IBk(3); classifiersScore = classify(nn3, reducedSet, internalTest); fitnessValue += classifiersScore; if (verbose == true) { System.out.println(); System.out.println(" |--------NaiveBayes-------|"); System.out.println(" | | |"); } NaiveBayes nb = new NaiveBayes(); classifiersScore = classify(nb, reducedSet, internalTest); fitnessValue += classifiersScore; if (verbose == true) { System.out.println(); System.out.println(" |-------RandomForest------|"); System.out.println(" | | |"); } RandomForest rf5 = new RandomForest(); rf5.setNumTrees(5); classifiersScore = classify(rf5, reducedSet, internalTest); fitnessValue += classifiersScore; if (verbose == true) { System.out.println(); System.out.println(" |---------Logistic--------|"); System.out.println(" | | |"); } Logistic log = new Logistic(); classifiersScore = classify(log, reducedSet, internalTest); fitnessValue += classifiersScore; } catch (IOException ioe) { ioe.printStackTrace(); } fitnessValue /= 5; if (verbose == true) { System.out.println(); System.out.println("Fitness: " + fitnessValue); System.out.println("---------------------------------------------------"); } return fitnessValue; }
From source file:boa.aggregators.RandomForestAggregator.java
License:Apache License
/** {@inheritDoc} */ @Override/*from ww w. j a v a 2 s.c o m*/ public void finish() throws IOException, InterruptedException { int NumOfAttributes = this.getVectorSize(); List<Attribute> attributes = new ArrayList<Attribute>(); FastVector fvAttributes = new FastVector(NumOfAttributes); for (int i = 0; i < NumOfAttributes; i++) { attributes.add(new Attribute("Attribute" + i)); fvAttributes.addElement(attributes.get(i)); } Instances trainingSet = new Instances("RandomForest", fvAttributes, 1); trainingSet.setClassIndex(NumOfAttributes - 1); for (List<Double> vector : this.vectors.values()) { Instance instance = new Instance(NumOfAttributes); for (int i = 0; i < vector.size(); i++) { instance.setValue((Attribute) fvAttributes.elementAt(i), vector.get(i)); } trainingSet.add(instance); } try { this.model = new RandomForest(); this.model.setOptions(options); this.model.buildClassifier(trainingSet); } catch (Exception ex) { } this.saveModel(this.model); }
From source file:classifier.SellerClassifier.java
public void rebuildModel(String dataset) { try {// w w w . j a va 2s . com myInstances = startFeatureExtraction(loadData(dataset)); myClassifier = new RandomForest(); // build the model myClassifier.buildClassifier(myInstances); SerializationHelper.write(modelPath, myClassifier); SerializationHelper.write(refPath, myFilter); } catch (Exception ex) { Logger.getLogger(SellerClassifier.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.edwardraff.WekaMNIST.java
License:Open Source License
public static void main(String[] args) throws IOException, Exception { String folder = args[0];//w ww . j av a 2 s . com String trainPath = folder + "MNISTtrain.arff"; String testPath = folder + "MNISTtest.arff"; System.out.println("Weka Timings"); Instances mnistTrainWeka = new Instances(new BufferedReader(new FileReader(new File(trainPath)))); mnistTrainWeka.setClassIndex(mnistTrainWeka.numAttributes() - 1); Instances mnistTestWeka = new Instances(new BufferedReader(new FileReader(new File(testPath)))); mnistTestWeka.setClassIndex(mnistTestWeka.numAttributes() - 1); //normalize range like into [0, 1] Normalize normalizeFilter = new Normalize(); normalizeFilter.setInputFormat(mnistTrainWeka); mnistTestWeka = Normalize.useFilter(mnistTestWeka, normalizeFilter); mnistTrainWeka = Normalize.useFilter(mnistTrainWeka, normalizeFilter); long start, end; System.out.println("RBF SVM (Full Cache)"); SMO smo = new SMO(); smo.setKernel(new RBFKernel(mnistTrainWeka, 0/*0 causes Weka to cache the whole matrix...*/, 0.015625)); smo.setC(8.0); smo.setBuildLogisticModels(false); evalModel(smo, mnistTrainWeka, mnistTestWeka); System.out.println("RBF SVM (No Cache)"); smo = new SMO(); smo.setKernel(new RBFKernel(mnistTrainWeka, 1, 0.015625)); smo.setC(8.0); smo.setBuildLogisticModels(false); evalModel(smo, mnistTrainWeka, mnistTestWeka); System.out.println("Decision Tree C45"); J48 wekaC45 = new J48(); wekaC45.setUseLaplace(false); wekaC45.setCollapseTree(false); wekaC45.setUnpruned(true); wekaC45.setMinNumObj(2); wekaC45.setUseMDLcorrection(true); evalModel(wekaC45, mnistTrainWeka, mnistTestWeka); System.out.println("Random Forest 50 trees"); int featuresToUse = (int) Math.sqrt(28 * 28);//Weka uses different defaults, so lets make sure they both use the published way RandomForest wekaRF = new RandomForest(); wekaRF.setNumExecutionSlots(1); wekaRF.setMaxDepth(0/*0 for unlimited*/); wekaRF.setNumFeatures(featuresToUse); wekaRF.setNumTrees(50); evalModel(wekaRF, mnistTrainWeka, mnistTestWeka); System.out.println("1-NN (brute)"); IBk wekaNN = new IBk(1); wekaNN.setNearestNeighbourSearchAlgorithm(new LinearNNSearch()); wekaNN.setCrossValidate(false); evalModel(wekaNN, mnistTrainWeka, mnistTestWeka); System.out.println("1-NN (Ball Tree)"); wekaNN = new IBk(1); wekaNN.setNearestNeighbourSearchAlgorithm(new BallTree()); wekaNN.setCrossValidate(false); evalModel(wekaNN, mnistTrainWeka, mnistTestWeka); System.out.println("1-NN (Cover Tree)"); wekaNN = new IBk(1); wekaNN.setNearestNeighbourSearchAlgorithm(new CoverTree()); wekaNN.setCrossValidate(false); evalModel(wekaNN, mnistTrainWeka, mnistTestWeka); System.out.println("Logistic Regression LBFGS lambda = 1e-4"); Logistic logisticLBFGS = new Logistic(); logisticLBFGS.setRidge(1e-4); logisticLBFGS.setMaxIts(500); evalModel(logisticLBFGS, mnistTrainWeka, mnistTestWeka); System.out.println("k-means (Loyd)"); int origClassIndex = mnistTrainWeka.classIndex(); mnistTrainWeka.setClassIndex(-1); mnistTrainWeka.deleteAttributeAt(origClassIndex); { long totalTime = 0; for (int i = 0; i < 10; i++) { SimpleKMeans wekaKMeans = new SimpleKMeans(); wekaKMeans.setNumClusters(10); wekaKMeans.setNumExecutionSlots(1); wekaKMeans.setFastDistanceCalc(true); start = System.currentTimeMillis(); wekaKMeans.buildClusterer(mnistTrainWeka); end = System.currentTimeMillis(); totalTime += (end - start); } System.out.println("\tClustering took: " + (totalTime / 10.0) / 1000.0 + " on average"); } }
From source file:com.rokittech.ml.server.utils.MLUtils.java
License:Open Source License
public static Classifier getClassifier(String mlAlgorithm) { notEmpty(mlAlgorithm);//from w ww .j a va 2 s . com Classifier classifier; switch (mlAlgorithm.toUpperCase()) { case "J48": { classifier = new J48(); break; } case "IBK": { classifier = new IBk(); break; } case "NAIVE_BAYES": { classifier = new NaiveBayes(); break; } case "RANDOM_TREE": { classifier = new RandomTree(); break; } case "RANDOM_FOREST": { classifier = new RandomForest(); break; } case "BOOSTING": { classifier = new DecisionStump(); break; } case "BAGGING": { classifier = new Bagging(); break; } default: throw new UnsupportedOperationException("Classifier " + mlAlgorithm + " is not supported."); } return classifier; }
From source file:cz.vse.fis.keg.entityclassifier.core.salience.EntitySaliencer.java
License:Open Source License
private void trainModel() { BufferedReader reader = null; try {// w w w . j a v a2 s.c o m URL fileURL = THDController.getInstance().getClass().getResource(Settings.SALIENCE_DATASET); File arrfFile = new File(fileURL.getFile()); reader = new BufferedReader(new FileReader(arrfFile)); Instances data = new Instances(reader); data.setClassIndex(data.numAttributes() - 1); // classifier = new NaiveBayes(); classifier = new RandomForest(); // Train the classifer. classifier.buildClassifier(data); } catch (FileNotFoundException ex) { Logger.getLogger(EntitySaliencer.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(EntitySaliencer.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(EntitySaliencer.class.getName()).log(Level.SEVERE, null, ex); } finally { try { reader.close(); System.out.println("Model was successfully trained."); } catch (IOException ex) { Logger.getLogger(EntitySaliencer.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:de.fub.maps.project.detector.model.inference.impl.RandomForestInferenceModel.java
License:Open Source License
@Override protected Classifier createClassifier() { classifier = new RandomForest(); return classifier; }
From source file:de.tudarmstadt.ukp.dkpro.spelling.experiments.hoo2012.featureextraction.AllFeaturesExtractor.java
License:Apache License
private Classifier getClassifier() throws Exception { Classifier cl = null;//from ww w .j ava 2 s.co m // Build and evaluate classifier // The options given correspond to the default settings in the WEKA GUI if (classifier.equals("smo")) { SMO smo = new SMO(); smo.setOptions(Utils.splitOptions( "-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\"")); cl = smo; } else if (classifier.equals("j48")) { J48 j48 = new J48(); j48.setOptions(new String[] { "-C", "0.25", "-M", "2" }); cl = j48; } else if (classifier.equals("naivebayes")) { cl = new NaiveBayes(); } else if (classifier.equals("randomforest")) { RandomForest rf = new RandomForest(); rf.setOptions(Utils.splitOptions("-I 10 -K 0 -S 1")); cl = rf; } return cl; }