List of usage examples for weka.classifiers.lazy IBk setKNN
public void setKNN(int k)
From source file:jjj.asap.sas.models1.job.BuildCosineModels.java
License:Open Source License
@Override protected void run() throws Exception { // validate args if (!Bucket.isBucket("datasets", inputBucket)) { throw new FileNotFoundException(inputBucket); }//from w ww . j av a 2 s .co m if (!Bucket.isBucket("models", outputBucket)) { throw new FileNotFoundException(outputBucket); } // init multi-threading Job.startService(); final Queue<Future<Object>> queue = new LinkedList<Future<Object>>(); // get the input from the bucket List<String> names = Bucket.getBucketItems("datasets", this.inputBucket); for (String dsn : names) { int essaySet = Contest.getEssaySet(dsn); int k = -1; switch (essaySet) { case 3: k = 13; break; case 5: case 7: k = 55; break; case 2: case 6: case 10: k = 21; break; case 1: case 4: case 8: case 9: k = 34; break; } if (k == -1) { throw new IllegalArgumentException("not k defined for " + essaySet); } LinearNNSearch search = new LinearNNSearch(); search.setDistanceFunction(new CosineDistance()); search.setSkipIdentical(false); IBk knn = new IBk(); knn.setKNN(k); knn.setDistanceWeighting(INVERSE); knn.setNearestNeighbourSearchAlgorithm(search); queue.add(Job.submit(new ModelBuilder(dsn, "KNN-" + k, knn, this.outputBucket))); } // wait on complete Progress progress = new Progress(queue.size(), this.getClass().getSimpleName()); while (!queue.isEmpty()) { try { queue.remove().get(); } catch (Exception e) { Job.log("ERROR", e.toString()); e.printStackTrace(System.err); } progress.tick(); } progress.done(); Job.stopService(); }
From source file:tcc.FeatureExtraction.java
public void knn() throws IOException { //parsing CSV to Arff CSVLoader loader = new CSVLoader(); loader.setSource(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.csv")); Instances inst = loader.getDataSet(); ArffSaver saver = new ArffSaver(); saver.setInstances(inst);// ww w. java2 s. c om saver.setFile(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); saver.setDestination(new File("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); saver.writeBatch(); BufferedReader reader = new BufferedReader( new FileReader("/root/TCC/Resultados/Parte 4 - Novos Casos/TamuraHaralickMomentos.arff")); Instances data = new Instances(reader); reader.close(); data.setClassIndex(data.numAttributes() - 1); //Normalizando try { Normalize norm = new Normalize(); norm.setInputFormat(data); data = Filter.useFilter(data, norm); } catch (Exception ex) { Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex); } File csv = new File("/root/TCC/Resultados/knn.csv"); FileWriter fw = new FileWriter(csv); BufferedWriter bw = new BufferedWriter(fw); for (int i = 1; i < 51; i++) { //instanciando o classificador IBk knn = new IBk(); knn.setKNN(i); try { knn.buildClassifier(data); Evaluation eval = new Evaluation(data); //System.out.println(eval.toSummaryString("\nResults\n======\n", false)); eval.crossValidateModel(knn, data, 10, new Random(1), new Object[] {}); double auc = eval.areaUnderROC(1); System.out.println(auc); bw.write(Double.toString(auc)); bw.newLine(); } catch (Exception ex) { Logger.getLogger(FeatureExtraction.class.getName()).log(Level.SEVERE, null, ex); } } bw.close(); }