List of usage examples for weka.classifiers.trees RandomTree setKValue
public void setKValue(int k)
From source file:com.walmart.productgenome.matching.models.EMSRandomForest.java
License:Open Source License
/** * Builds a classifier for a set of instances. * * @param data the instances to train the classifier with * @throws Exception if something goes wrong */// w ww .jav a 2 s . c o m public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class data = new Instances(data); data.deleteWithMissingClass(); m_bagger = new Bagging(); RandomTree rTree = new RandomTree(); // set up the random tree options m_KValue = m_numFeatures; if (m_KValue < 1) m_KValue = (int) Utils.log2(data.numAttributes()) + 1; rTree.setKValue(m_KValue); rTree.setMaxDepth(getMaxDepth()); // set up the bagger and build the forest m_bagger.setClassifier(rTree); m_bagger.setSeed(m_randomSeed); m_bagger.setNumIterations(m_numTrees); m_bagger.setCalcOutOfBag(true); m_bagger.setNumExecutionSlots(m_numExecutionSlots); m_bagger.buildClassifier(data); }
From source file:controller.MineroControler.java
public String clasificardorArbolAleat(String atributo) { BufferedReader breader = null; Instances datos = null;/* w w w . j a v a 2 s . com*/ breader = new BufferedReader(fuente_arff); try { datos = new Instances(breader); Attribute atr = datos.attribute(atributo); datos.setClass(atr); //datos.setClassIndex(0); } catch (IOException ex) { System.err.println("Problemas al intentar cargar los datos"); return null; } RandomTree arbol = new RandomTree(); // Class for constructing a tree that considers K randomly chosen attributes at each node. try { arbol.setNumFolds(100); arbol.setKValue(0); arbol.setMinNum(1); arbol.setMaxDepth(0); arbol.setSeed(1); arbol.buildClassifier(datos); } catch (Exception ex) { System.err.println("Problemas al ejecutar algorimo de clasificacion" + ex.getLocalizedMessage()); } return arbol.toString(); }
From source file:KFST.featureSelection.embedded.TreeBasedMethods.DecisionTreeBasedMethod.java
License:Open Source License
/** * {@inheritDoc }//from w w w . j a v a 2 s.c om */ @Override protected String buildClassifier(Instances dataTrain) { try { if (TREE_TYPE == TreeType.C45) { J48 decisionTreeC45 = new J48(); decisionTreeC45.setConfidenceFactor((float) confidenceValue); decisionTreeC45.setMinNumObj(minNumSampleInLeaf); decisionTreeC45.buildClassifier(dataTrain); return decisionTreeC45.toString(); } else if (TREE_TYPE == TreeType.RANDOM_TREE) { RandomTree decisionTreeRandomTree = new RandomTree(); decisionTreeRandomTree.setKValue(randomTreeKValue); decisionTreeRandomTree.setMaxDepth(randomTreeMaxDepth); decisionTreeRandomTree.setMinNum(randomTreeMinNum); decisionTreeRandomTree.setMinVarianceProp(randomTreeMinVarianceProp); decisionTreeRandomTree.buildClassifier(dataTrain); return decisionTreeRandomTree.toString(); } } catch (Exception ex) { Logger.getLogger(DecisionTreeBasedMethod.class.getName()).log(Level.SEVERE, null, ex); } return ""; }
From source file:learning.DMRandomForest.java
License:Open Source License
/** * Builds a classifier for a set of instances. * * @param data the instances to train the classifier with * @throws Exception if something goes wrong *//*from w ww. ja v a 2 s . com*/ public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class data = new Instances(data); data.deleteWithMissingClass(); m_bagger = new DMBagging(); RandomTree rTree = new RandomTree(); // set up the random tree options m_KValue = m_numFeatures; if (m_KValue < 1) m_KValue = (int) Utils.log2(data.numAttributes()) + 1; rTree.setKValue(m_KValue); rTree.setMaxDepth(getMaxDepth()); // set up the bagger and build the forest m_bagger.setClassifier(rTree); m_bagger.setSeed(m_randomSeed); m_bagger.setNumIterations(m_numTrees); m_bagger.setCalcOutOfBag(true); m_bagger.buildClassifier(data); }
From source file:org.openml.webapplication.fantail.dc.landmarking.RandomTreeBasedLandmarker2.java
License:Open Source License
public Map<String, Double> characterize(Instances data) { int seed = m_Seed; Random r = new Random(seed); int numFolds = m_NumFolds; double score1 = 0.5; double score2 = 0.5; // double score3 = 0.5; double score3 = 0.5; double score4 = 0.5; // double score3 = 0.5; double score5 = 0.5; double score6 = 0.5; weka.classifiers.trees.RandomTree cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt());//from ww w . j ava2s . co m cls.setKValue(m_K); // cls.setMaxDepth(1); try { // ds.buildClassifier(data); weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score1 = eval.pctIncorrect(); score2 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(2); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score3 = eval.pctIncorrect(); score4 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(3); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); score5 = eval.pctIncorrect(); score6 = eval.kappa(); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(4); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); } catch (Exception e) { e.printStackTrace(); } // cls = new weka.classifiers.trees.RandomTree(); cls.setSeed(r.nextInt()); cls.setKValue(m_K); // cls.setMaxDepth(5); try { weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data); eval.crossValidateModel(cls, data, numFolds, new java.util.Random(1)); } catch (Exception e) { e.printStackTrace(); } Map<String, Double> qualities = new HashMap<String, Double>(); qualities.put(ids[0], score1); qualities.put(ids[1], score2); qualities.put(ids[2], score3); qualities.put(ids[3], score4); qualities.put(ids[4], score5); qualities.put(ids[5], score6); return qualities; }