List of usage examples for weka.classifiers.lazy IBk IBk
public IBk(int k)
From source file:development.GoodHonoursPrediction.java
public static void main(String[] args) { Instances data = ClassifierTools.loadData("C:\\Admin\\Perfomance Analysis\\GoodHonsClassification"); RandomForest rf = new RandomForest(); double[][] a = ClassifierTools.crossValidationWithStats(rf, data, data.numInstances()); System.out.println(" Random forest LOOCV accuracy =" + a[0][0]); J48 tree = new J48(); a = ClassifierTools.crossValidationWithStats(tree, data, data.numInstances()); System.out.println(" C4.5 LOOCV accuracy =" + a[0][0]); IBk knn = new IBk(11); knn.setCrossValidate(true);// w w w. j a va 2 s . c o m a = ClassifierTools.crossValidationWithStats(knn, data, data.numInstances()); System.out.println(" KNN LOOCV accuracy =" + a[0][0]); NaiveBayes nb = new NaiveBayes(); a = ClassifierTools.crossValidationWithStats(nb, data, data.numInstances()); System.out.println(" Naive Bayes LOOCV accuracy =" + a[0][0]); /* try { tree.buildClassifier(data); System.out.println(" Tree ="+tree); Classifier cls = new J48(); Evaluation eval = new Evaluation(data); Random rand = new Random(1); // using seed = 1 int folds = data.numInstances(); eval.crossValidateModel(cls, data, folds, rand); System.out.println(eval.toSummaryString()); tree.getTechnicalInformation(); } catch (Exception ex) { Logger.getLogger(GoodHonoursPrediction.class.getName()).log(Level.SEVERE, null, ex); } */ }
From source file:examples.Pair.java
License:Open Source License
/** * @param args the command line arguments *///from w ww . j av a2 s . co m public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println("Requires path to the dataset as the first and only argument"); return; } final String datasetPath = args[0]; // Create classifiers MultiStageCascading msc = new MultiStageCascading(); J48 classifier1 = new J48(); IBk knn = new IBk(3); // Set sequence of classifiers msc.setClassifiers(new Classifier[] { classifier1, new NBTree() }); msc.setDebug(true); // Set a classifier that will classify an instance that is not classified by all other classifiers msc.setLastClassifier(knn); // First classifier will have confidence threshold 0.95 and the second one 0.97 msc.setConfidenceThresholds("0.95,0.97"); // 80% of instances in training set will be randomly selected to train j-th classifier msc.setPercentTrainingInstances(0.8); Instances dataset = DataSource.read(datasetPath); dataset.setClassIndex(dataset.numAttributes() - 1); // Create test and training sets Pair<Instances, Instances> sets = seprateTestAndTrainingSets(dataset, 0.7); Instances trainingSet = sets.getFirst(); Instances testSet = sets.getSecond(); // Build cascade classifier msc.buildClassifier(trainingSet); // Evaluate created classifier Evaluation eval = new Evaluation(trainingSet); eval.evaluateModel(msc, testSet); System.out.println(eval.toSummaryString("\nResults\n\n", false)); }
From source file:hurtowniedanych.FXMLController.java
public void trainAndTestKNN() throws FileNotFoundException, IOException, Exception { InstanceQuery instanceQuery = new InstanceQuery(); instanceQuery.setUsername("postgres"); instanceQuery.setPassword("szupek"); instanceQuery.setCustomPropsFile(new File("./src/data/DatabaseUtils.props")); // Wskazanie pliku z ustawieniami dla PostgreSQL String query = "select ks.wydawnictwo,ks.gatunek, kl.mia-sto\n" + "from zakupy z,ksiazki ks,klienci kl\n" + "where ks.id_ksiazka=z.id_ksiazka and kl.id_klient=z.id_klient"; instanceQuery.setQuery(query);/*from w ww . java2 s . c om*/ Instances data = instanceQuery.retrieveInstances(); data.setClassIndex(data.numAttributes() - 1); data.randomize(new Random()); double percent = 70.0; int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances trainData = new Instances(data, 0, trainSize); Instances testData = new Instances(data, trainSize, testSize); int lSasiadow = Integer.parseInt(textFieldKnn.getText()); System.out.println(lSasiadow); IBk ibk = new IBk(lSasiadow); // Ustawienie odleglosci EuclideanDistance euclidean = new EuclideanDistance(); // euklidesowej ManhattanDistance manhatan = new ManhattanDistance(); // miejska LinearNNSearch linearNN = new LinearNNSearch(); if (comboboxOdleglosc.getSelectionModel().getSelectedItem().equals("Manhatan")) { linearNN.setDistanceFunction(manhatan); } else { linearNN.setDistanceFunction(euclidean); } ibk.setNearestNeighbourSearchAlgorithm(linearNN); // ustawienie sposobu szukania sasiadow // Tworzenie klasyfikatora ibk.buildClassifier(trainData); Evaluation eval = new Evaluation(trainData); eval.evaluateModel(ibk, testData); spr.setVisible(true); labelKnn.setVisible(true); labelOdleglosc.setVisible(true); labelKnn.setText(textFieldKnn.getText()); labelOdleglosc.setText(comboboxOdleglosc.getSelectionModel().getSelectedItem().toString()); spr.setText(eval.toSummaryString("Wynik:", true)); }
From source file:mulan.examples.TrainTestExperiment.java
License:Open Source License
public static void main(String[] args) { String[] methodsToCompare = { "HOMER", "BR", "CLR", "MLkNN", "MC-Copy", "IncludeLabels", "MC-Ignore", "RAkEL", "LP", "MLStacking" }; try {//from w w w .ja v a 2 s.c o m String path = Utils.getOption("path", args); // e.g. -path dataset/ String filestem = Utils.getOption("filestem", args); // e.g. -filestem emotions String percentage = Utils.getOption("percentage", args); // e.g. -percentage 50 (for 50%) System.out.println("Loading the dataset"); MultiLabelInstances mlDataSet = new MultiLabelInstances(path + filestem + ".arff", path + filestem + ".xml"); //split the data set into train and test Instances dataSet = mlDataSet.getDataSet(); //dataSet.randomize(new Random(1)); RemovePercentage rmvp = new RemovePercentage(); rmvp.setInvertSelection(true); rmvp.setPercentage(Double.parseDouble(percentage)); rmvp.setInputFormat(dataSet); Instances trainDataSet = Filter.useFilter(dataSet, rmvp); rmvp = new RemovePercentage(); rmvp.setPercentage(Double.parseDouble(percentage)); rmvp.setInputFormat(dataSet); Instances testDataSet = Filter.useFilter(dataSet, rmvp); MultiLabelInstances train = new MultiLabelInstances(trainDataSet, path + filestem + ".xml"); MultiLabelInstances test = new MultiLabelInstances(testDataSet, path + filestem + ".xml"); Evaluator eval = new Evaluator(); Evaluation results; for (int i = 0; i < methodsToCompare.length; i++) { if (methodsToCompare[i].equals("BR")) { System.out.println(methodsToCompare[i]); Classifier brClassifier = new NaiveBayes(); BinaryRelevance br = new BinaryRelevance(brClassifier); br.setDebug(true); br.build(train); results = eval.evaluate(br, test); System.out.println(results); } if (methodsToCompare[i].equals("LP")) { System.out.println(methodsToCompare[i]); Classifier lpBaseClassifier = new J48(); LabelPowerset lp = new LabelPowerset(lpBaseClassifier); lp.setDebug(true); lp.build(train); results = eval.evaluate(lp, test); System.out.println(results); } if (methodsToCompare[i].equals("CLR")) { System.out.println(methodsToCompare[i]); Classifier clrClassifier = new J48(); CalibratedLabelRanking clr = new CalibratedLabelRanking(clrClassifier); clr.setDebug(true); clr.build(train); results = eval.evaluate(clr, test); System.out.println(results); } if (methodsToCompare[i].equals("RAkEL")) { System.out.println(methodsToCompare[i]); MultiLabelLearner lp = new LabelPowerset(new J48()); RAkEL rakel = new RAkEL(lp); rakel.setDebug(true); rakel.build(train); results = eval.evaluate(rakel, test); System.out.println(results); } if (methodsToCompare[i].equals("MC-Copy")) { System.out.println(methodsToCompare[i]); Classifier mclClassifier = new J48(); MultiClassTransformation mcTrans = new Copy(); MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans); mcl.setDebug(true); mcl.build(train); results = eval.evaluate(mcl, test); System.out.println(results); } if (methodsToCompare[i].equals("MC-Ignore")) { System.out.println(methodsToCompare[i]); Classifier mclClassifier = new J48(); MultiClassTransformation mcTrans = new Ignore(); MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans); mcl.build(train); results = eval.evaluate(mcl, test); System.out.println(results); } if (methodsToCompare[i].equals("IncludeLabels")) { System.out.println(methodsToCompare[i]); Classifier ilClassifier = new J48(); IncludeLabelsClassifier il = new IncludeLabelsClassifier(ilClassifier); il.setDebug(true); il.build(train); results = eval.evaluate(il, test); System.out.println(results); } if (methodsToCompare[i].equals("MLkNN")) { System.out.println(methodsToCompare[i]); int numOfNeighbors = 10; double smooth = 1.0; MLkNN mlknn = new MLkNN(numOfNeighbors, smooth); mlknn.setDebug(true); mlknn.build(train); results = eval.evaluate(mlknn, test); System.out.println(results); } if (methodsToCompare[i].equals("HMC")) { System.out.println(methodsToCompare[i]); Classifier baseClassifier = new J48(); LabelPowerset lp = new LabelPowerset(baseClassifier); RAkEL rakel = new RAkEL(lp); HMC hmc = new HMC(rakel); hmc.build(train); results = eval.evaluate(hmc, test); System.out.println(results); } if (methodsToCompare[i].equals("HOMER")) { System.out.println(methodsToCompare[i]); Classifier baseClassifier = new SMO(); CalibratedLabelRanking learner = new CalibratedLabelRanking(baseClassifier); learner.setDebug(true); HOMER homer = new HOMER(learner, 3, HierarchyBuilder.Method.Random); homer.setDebug(true); homer.build(train); results = eval.evaluate(homer, test); System.out.println(results); } if (methodsToCompare[i].equals("MLStacking")) { System.out.println(methodsToCompare[i]); int numOfNeighbors = 10; Classifier baseClassifier = new IBk(numOfNeighbors); Classifier metaClassifier = new Logistic(); MultiLabelStacking mls = new MultiLabelStacking(baseClassifier, metaClassifier); mls.setMetaPercentage(1.0); mls.setDebug(true); mls.build(train); results = eval.evaluate(mls, test); System.out.println(results); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:trabfs.machineLeaningFrameWork.core.AvaliadordeSolucao.java
public double avalia(Solucao s) { double precision = 0.0, c; try {/*w w w . jav a 2 s. c o m*/ // cria vetor de indices dos atributos selecionados int[] toremove = makeIndex(s); //remove atributos nao selecionados Remove remove = new Remove(); remove.setAttributeIndicesArray(toremove); remove.setInvertSelection(true); remove.setInputFormat(p.getInstances()); Instances subproblema = Filter.useFilter(p.getInstances(), remove); subproblema.setClassIndex(subproblema.numAttributes() - 1); // classifica e pega o resultado Random rand = new Random(1); // create seeded number generator IBk clf = new IBk(K); //SimpleNaiveBayes clf = new SimpleNaiveBayes(); //NaiveBayesSimple clf = new NaiveBayesSimple(); //clf.buildClassifier(subproblema); Evaluation eval = new Evaluation(subproblema); eval.crossValidateModel(clf, subproblema, nfolds, rand); precision = (double) eval.correct() / subproblema.numInstances(); calls++; } catch (Exception ex) { Logger.getLogger(AvaliadordeSolucao.class.getName()).log(Level.SEVERE, null, ex); } s.setQuality(precision); if (precision > this.best) { this.best = precision; } evolucao.add(this.best); return s.getQuality(); }
From source file:trabfs.machineLeaningFrameWork.search.misc.BlackBox.java
public double startBlackBox(Problema p) { try {/*from w w w .jav a2 s .co m*/ Random r = new Random(); //cria avaliador de solucao AvaliadordeSolucao ads = new AvaliadordeSolucao(p); //seta busca GeneticSearch gs = new GeneticSearch(); gs.setSeed(r.nextInt()); //seta evaluator WrapperSubsetEval wse = new WrapperSubsetEval(); //seta classificador IBk ibk = new IBk(3); // seta selecao de atributos AttributeSelection as = new AttributeSelection(); //liga componentes wse.setClassifier(ibk); as.setSearch(gs); as.setEvaluator(wse); //usa metodo e pega solucao Solucao s = new Solucao(p.getNumAtributos() - 1); s.initZero(); as.SelectAttributes(p.getInstances()); for (int i = 0; i < as.selectedAttributes().length - 1; i++) { s.set(as.selectedAttributes()[i], 1); } ads.avalia(s); return s.getQuality(); } catch (Exception ex) { Logger.getLogger(BlackBox.class.getName()).log(Level.SEVERE, null, ex); } return 0.0; }