List of usage examples for weka.classifiers Classifier buildClassifier
public abstract void buildClassifier(Instances data) throws Exception;
From source file:myID3.MyId3.java
public static void main(String[] args) throws IOException, Exception { Weka a = new Weka(); a.setTraining("weather.nominal.arff"); Classifier b = new MyId3(); b.buildClassifier(a.getM_Training()); System.out.println(b.toString()); }
From source file:naivebayes.NBRun.java
public static void main(String[] args) throws Exception { System.out.println("Naive Bayes Classifier"); Instances data = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush_test.arff"); Scanner scan = new Scanner(System.in); Classifier cls; Instances train = TucilWeka.readDataSet("C:\\Program Files\\Weka-3-8\\data\\mush.arff"); System.out.println("Read or create model? r/c"); if (scan.next().equals("c")) { cls = new NBTubesAI(); cls.buildClassifier(train); TucilWeka.saveModel(train);/*from w w w . j a va2 s. co m*/ } else { cls = (NBTubesAI) TucilWeka.readModel(); } Evaluation eval = new Evaluation(data); System.out.println("10 fold cross validate or Full train? c/f"); if (scan.next().equals("c")) { int fold = 10; for (int i = 0; i < data.numAttributes(); i++) { System.out.println(i + ". " + data.attribute(i)); } eval.crossValidateModel(cls, data, fold, new Random(1)); } else { for (int i = 0; i < data.numAttributes(); i++) { System.out.println(i + ". " + data.attribute(i)); } data.deleteWithMissingClass(); try { eval.evaluateModel(cls, data); } catch (java.lang.Exception ex) { eval.crossValidateModel(cls, data, 11, new Random(1)); } } // Classifier cls=new NBTubesAI(); // cls.buildClassifier(data); System.out.println("Hasil evaluasi: "); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); System.out.println(eval.toClassDetailsString()); }
From source file:naivebayes.TucilWeka.java
public static Evaluation crossValidation(Instances data) { //10-fold cross validation Evaluation eval = null;/*from ww w . j a v a 2 s . co m*/ try { eval = new Evaluation(data); Classifier cls = new NBTubesAI(); if (cls == null) { System.out.println("MODEL CANNOT BE USED"); } else { System.out.println("MODEL IS USED"); } cls.buildClassifier(data); //crossValidateModel: //param 1 = tipe classifier (disini J48) //param 2 = Instances data //param 3 = jumlah fold //param 4 = Randomizer (seed) eval.crossValidateModel(cls, data, 10, new Random(1)); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:naivebayes.TucilWeka.java
public static Evaluation fullTraining(Instances data) { //10-fold cross validation Evaluation eval = null;//from w w w . j a v a2 s. c om Instances train = new Instances(data); Instances test = new Instances(data); try { Classifier classify = new NBTubesAI(); //Membuat klasifier dari data training classify.buildClassifier(train); eval = new Evaluation(train); eval.evaluateModel(classify, test); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:naivebayes.TucilWeka.java
public static Classifier getClassifier(Instances data) { Classifier classify = new NBTubesAI(); try {// ww w . j a v a 2 s.c o m classify.buildClassifier(data); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } return classify; }
From source file:net.sf.bddbddb.order.WekaInterface.java
License:LGPL
public static Classifier buildClassifier(String cClassName, Instances data) { // Build the classifier. Classifier classifier = null; try {/*from ww w.j a va 2 s .co m*/ long time = System.currentTimeMillis(); classifier = (Classifier) Class.forName(cClassName).newInstance(); classifier.buildClassifier(data); if (FindBestDomainOrder.TRACE > 1) System.out.println("Classifier " + cClassName + " took " + (System.currentTimeMillis() - time) + " ms to build."); if (FindBestDomainOrder.TRACE > 2) System.out.println(classifier); } catch (Exception x) { FindBestDomainOrder.out.println(cClassName + ": " + x.getLocalizedMessage()); return null; } return classifier; }
From source file:net.sf.jclal.classifier.WekaComitteClassifier.java
License:Open Source License
/** * Constructs the learning model from the dataset. * * @param instances The instances to use * @throws Exception The exception that will be launched. *///from w w w. j av a2 s .com @Override public void buildClassifier(IDataset instances) throws Exception { for (Classifier classifier : classifiers) { classifier.buildClassifier(instances.getDataset()); } }
From source file:neuralnetwork.NeuralNetwork.java
/** * @param args the command line arguments * @throws java.lang.Exception/*w w w.j ava 2 s . co m*/ */ public static void main(String[] args) throws Exception { ConverterUtils.DataSource source; source = new ConverterUtils.DataSource("C:\\Users\\Harvey\\Documents\\iris.csv"); Instances data = source.getDataSet(); if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } data.randomize(new Debug.Random(1)); RemovePercentage trainFilter = new RemovePercentage(); trainFilter.setPercentage(70); trainFilter.setInputFormat(data); Instances train = Filter.useFilter(data, trainFilter); trainFilter.setInvertSelection(true); trainFilter.setInputFormat(data); Instances test = Filter.useFilter(data, trainFilter); Standardize filter = new Standardize(); filter.setInputFormat(train); Instances newTrain = Filter.useFilter(test, filter); Instances newTest = Filter.useFilter(train, filter); Classifier nNet = new NeuralNet(); nNet.buildClassifier(newTrain); Evaluation eval = new Evaluation(newTest); eval.evaluateModel(nNet, newTest); System.out.println(eval.toSummaryString("\nResults\n-------------\n", false)); }
From source file:newsclassifier.NewsClassifier.java
public void CrossValidation(Classifier cls, int n) throws Exception { data.setClassIndex(0);//from www .j a v a 2 s . co m Evaluation eval = new Evaluation(data); cls.buildClassifier(data); eval.crossValidateModel(cls, data, n, new Random(1)); System.out.println(eval.toSummaryString("Results", false)); //System.out.println(eval.toClassDetailsString()); //System.out.println(eval.toMatrixString()); }
From source file:nl.bioinf.roelen.thema11.classifier_tools.BoundaryClassifier.java
License:Open Source License
/** * method to build a classifier// w w w .jav a2s .c o m * @param fileLocation the arrf file our attributes are in * @param method the method to use for building our classifier * @return the classifier object that was built */ public static Classifier build(String fileLocation, String method) { //init classifier object Classifier classifier; classifier = null; try { //get data ConverterUtils.DataSource source = new ConverterUtils.DataSource(fileLocation); //SET DATA AND OPTIONS Instances data = source.getDataSet(); //remove the name and position entries, these are not important for classifying data.deleteAttributeAt(data.numAttributes() - 2); data.deleteAttributeAt(data.numAttributes() - 2); data.setClassIndex(data.numAttributes() - 1); //prepare data for classifying String[] options = new String[1]; //unpruned options[0] = "-U"; // unpruned tree //see what method was given switch (method.toUpperCase()) { case "J48": //Build J48 classifier classifier = new J48(); // new instance of tree break; case "OneR": //Build OneR classifier classifier = new OneR(); break; case "ZeroR": //build (useless) ZeroR classifier classifier = new ZeroR(); break; default: //default is building OneR classifier = new OneR(); break; } //set the options and build that thing classifier.setOptions(options); // set the options classifier.buildClassifier(data); // build classifier } catch (Exception ex) { Logger.getLogger(BoundaryClassifier.class.getName()).log(Level.SEVERE, null, ex); } return classifier; }