List of usage examples for weka.core Instances numAttributes
publicint numAttributes()
From source file:A_MachineLearning.java
private void jButton7ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton7ActionPerformed Instances data;/*from w w w .jav a 2 s. com*/ try { data = new Instances(new BufferedReader(new FileReader(this.file2 + ".arff"))); Instances newData = null; Add filter; newData = new Instances(data); filter = new Add(); filter.setAttributeIndex("last"); filter.setNominalLabels("rods,punctua,networks"); filter.setAttributeName("target"); filter.setInputFormat(newData); newData = Filter.useFilter(newData, filter); System.out.print(newData); Vector vec = new Vector(); newData.setClassIndex(newData.numAttributes() - 1); if (!newData.equalHeaders(newData)) { throw new IllegalArgumentException("Train and test are not compatible!"); } URL urlToModel = this.getClass().getResource("/" + "Final.model"); InputStream stream = urlToModel.openStream(); Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream); System.out.println("PROVANT MODEL.classifyInstance"); for (int i = 0; i < newData.numInstances(); i++) { double pred = cls.classifyInstance(newData.instance(i)); double[] dist = cls.distributionForInstance(newData.instance(i)); System.out.print((i + 1) + " - "); System.out.print(newData.classAttribute().value((int) pred) + " - "); //txtarea2.setText(Utils.arrayToString(dist)); System.out.println(Utils.arrayToString(dist)); vec.add(newData.classAttribute().value((int) pred)); } int p = 0, n = 0, r = 0; //txtarea2.append(Utils.arrayToString(this.target)); for (Object vec1 : vec) { if ("rods".equals(vec1.toString())) { r = r + 1; } if ("punctua".equals(vec1.toString())) { p = p + 1; } if ("networks".equals(vec1.toString())) { n = n + 1; } PrintWriter out = null; try { out = new PrintWriter(this.file2 + "_morphology.txt"); out.println(vec); out.close(); } catch (Exception ex) { ex.printStackTrace(); } //System.out.println(vec.get(i)); } System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n); IJ.showMessage( "Your file:" + this.file2 + "arff" + "\nhas been analysed, and it is composed by-> punctua: " + p + ", rods: " + r + ", networks: " + n); txtresults1.setText( "Your file:" + this.file2 + "arff" + "\nhas been analysed, and it is composed by: \npunctua: " + p + ", rods: " + r + ", networks: " + n); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } IJ.showMessage("analysing complete "); }
From source file:homemadeWEKA.java
public static Instances loadData(String filename) { Instances data = null; try {/*from w ww . j a v a2s .c om*/ data = DataSource.read(filename); if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); } catch (Exception ex) { Logger.getLogger(homemadeWEKA.class.getName()).log(Level.SEVERE, null, ex); } return data; }
From source file:ClassificationClass.java
public Evaluation cls_svm(Instances data) { Evaluation eval = null;// w w w . ja v a2 s . c om try { Classifier classifier; data.setClassIndex(data.numAttributes() - 1); classifier = new SMO(); classifier.buildClassifier(data); eval = new Evaluation(data); eval.evaluateModel(classifier, data); } catch (Exception ex) { Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:ClassificationClass.java
public Evaluation cls_knn(Instances data) { Evaluation eval = null;//w w w . j a va2 s. c o m try { Classifier classifier; data.setClassIndex(data.numAttributes() - 1); classifier = new IBk(); classifier.buildClassifier(data); eval = new Evaluation(data); eval.evaluateModel(classifier, data); System.out.println(eval.weightedFMeasure()); } catch (Exception ex) { Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:ClassificationClass.java
public Evaluation cls_naivebayes(Instances data) { Evaluation eval = null;/*from w ww . j ava 2 s . c om*/ try { Classifier classifier; PreparingSteps preparingSteps = new PreparingSteps(); data.setClassIndex(data.numAttributes() - 1); classifier = new NaiveBayes(); classifier.buildClassifier(data); eval = new Evaluation(data); eval.evaluateModel(classifier, data); System.out.println(eval.toSummaryString()); } catch (Exception ex) { Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:ClassificationClass.java
public Evaluation cls_c4_5(Instances data) { Evaluation eval = null;/* w w w .j a v a 2s . com*/ try { Classifier classifier; PreparingSteps preparingSteps = new PreparingSteps(); data.setClassIndex(data.numAttributes() - 1); classifier = new J48(); classifier.buildClassifier(data); eval = new Evaluation(data); eval.evaluateModel(classifier, data); System.out.println(eval.toSummaryString()); } catch (Exception ex) { Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:RunExhaustiveSearch.java
License:Open Source License
public static void main(String[] arg) throws Exception { // Load data. ///*from w ww . j a va 2 s .c o m*/ System.out.println("\nLoading sample file..."); DataSource source = new DataSource(arg[0]); Instances data = source.getDataSet(); if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); int n = Integer.parseInt(arg[1]); System.out.println("Instance with " + n + " features!"); System.out.println("\nRunning ES algorithm with CFS cost function..."); // Run feature selection algorithm ES using CFS cost function. // runAttributeSelection(data, n); }
From source file:classificationPLugin.java
private void ClassifyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_ClassifyActionPerformed this.name = txtdirecotry2.getText(); System.out.println(this.name); try {/*from w ww. j a va2 s . c om*/ CSVLoader loader = new CSVLoader(); loader.setSource(new File(this.name)); Instances data = loader.getDataSet(); System.out.println(data); // save ARFF String arffile = this.name + ".arff"; System.out.println(arffile); ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(new File(arffile)); saver.writeBatch(); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } try { FileReader reader = new FileReader(this.name + ".arff"); BufferedReader br = new BufferedReader(reader); instance.read(br, null); br.close(); instance.requestFocus(); } catch (Exception e2) { System.out.println(e2); } Instances data; try { data = new Instances(new BufferedReader(new FileReader(this.name + ".arff"))); Instances newData = null; Add filter; newData = new Instances(data); filter = new Add(); filter.setAttributeIndex("last"); filter.setNominalLabels("rods,punctua,networks"); filter.setAttributeName("target"); filter.setInputFormat(newData); newData = Filter.useFilter(newData, filter); System.out.print(newData); Vector vec = new Vector(); newData.setClassIndex(newData.numAttributes() - 1); if (!newData.equalHeaders(newData)) { throw new IllegalArgumentException("Train and test are not compatible!"); } URL urlToModel = this.getClass().getResource("/" + "Final.model"); InputStream stream = urlToModel.openStream(); Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream); System.out.println("PROVANT MODEL.classifyInstance"); for (int i = 0; i < newData.numInstances(); i++) { double pred = cls.classifyInstance(newData.instance(i)); double[] dist = cls.distributionForInstance(newData.instance(i)); System.out.print((i + 1) + " - "); System.out.print(newData.classAttribute().value((int) pred) + " - "); //txtarea2.setText(Utils.arrayToString(dist)); System.out.println(Utils.arrayToString(dist)); vec.add(newData.classAttribute().value((int) pred)); } int p = 0, n = 0, r = 0; //txtarea2.append(Utils.arrayToString(this.target)); for (Object vec1 : vec) { if ("rods".equals(vec1.toString())) { r = r + 1; } if ("punctua".equals(vec1.toString())) { p = p + 1; } if ("networks".equals(vec1.toString())) { n = n + 1; } PrintWriter out = null; try { out = new PrintWriter(this.name + "_morphology.txt"); out.println(vec); out.close(); } catch (Exception ex) { ex.printStackTrace(); } //System.out.println(vec.get(i)); } System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n); IJ.showMessage( "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by-> \npunctua: " + p + ", rods: " + r + ", networks: " + n); classi.setText( "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by: \npunctua: " + p + ", rods: " + r + ", networks: " + n); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } IJ.run("Clear Results"); IJ.run("Clear Results"); IJ.run("Close All", ""); if (WindowManager.getFrame("Results") != null) { IJ.selectWindow("Results"); IJ.run("Close"); } if (WindowManager.getFrame("Summary") != null) { IJ.selectWindow("Summary"); IJ.run("Close"); } if (WindowManager.getFrame("Results") != null) { IJ.selectWindow("Results"); IJ.run("Close"); } if (WindowManager.getFrame("ROI Manager") != null) { IJ.selectWindow("ROI Manager"); IJ.run("Close"); } IJ.run("Close All", "roiManager"); IJ.run("Close All", ""); }
From source file:CopiaSeg3.java
public static void main(String[] args) throws Exception { BufferedReader datafile = readDataFile("breast-cancer-wisconsin.arff"); Instances data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); // Elije el nmero de particiones para la valicacin (4 = 75% Train, 25% Test) Instances[] split = split(data, 4);/*from w w w . ja v a 2 s. co m*/ // Separa los conjuntos en los arrays trainning y testing Instances trainingSplits = split[0]; Instances testingSplits = split[1]; // Elegir un conjunto de clasificadores Classifier[] models = { new MultilayerPerceptron() //, new J48 //, ... }; FastVector fvWekaAttributes = new FastVector(9); // Ejecutar cada clasificador for (int j = 0; j < models.length; j++) { // Collect every group of predictions for current model in a FastVector FastVector predictions = new FastVector(); // For each training-testing split pair, train and test the classifier Evaluation validation = simpleClassify(models[j], trainingSplits, testingSplits); predictions.appendElements(validation.predictions()); // Uncomment to see the summary for each training-testing pair. System.out.println(models[j].toString()); // Calculate overall accuracy of current classifier on all splits double accuracy = calculateAccuracy(predictions); // // Print current classifier's name and accuracy in a complicated, but nice-looking way. System.out.println(models[j].getClass().getSimpleName() + " Accuracy: " + String.format("%.2f%%", accuracy) + "\n====================="); // // // Step 4: use the classifier // // For real world applications, the actual use of the classifier is the ultimate goal. Heres the simplest way to achieve that. Lets say weve built an instance (named iUse) as explained in step 2: // // Specify that the instance belong to the training set // // in order to inherit from the set description Instance iUse = new DenseInstance(9); iUse.setValue((Attribute) predictions.elementAt(0), 4); iUse.setValue((Attribute) predictions.elementAt(1), 8); iUse.setValue((Attribute) predictions.elementAt(2), 8); iUse.setValue((Attribute) predictions.elementAt(3), 5); iUse.setValue((Attribute) predictions.elementAt(4), 4); iUse.setValue((Attribute) predictions.elementAt(5), 5); iUse.setValue((Attribute) predictions.elementAt(6), 10); iUse.setValue((Attribute) predictions.elementAt(7), 4); iUse.setValue((Attribute) predictions.elementAt(8), 1); iUse.setDataset(trainingSplits); // // // Get the likelihood of each classes // fDistribution[0] is the probability of being positive? // fDistribution[1] is the probability of being negative? double[] fDistribution = models[j].distributionForInstance(iUse); System.out.println("Probabilidad positivo: " + fDistribution[0]); System.out.println("Probabilidad negativo: " + fDistribution[1]); } }
From source file:MLP.java
MLP() { try {/*from www. jav a 2 s . c o m*/ FileReader trainreader = new FileReader("C:\\new.arff"); FileReader testreader = new FileReader("C:\\new.arff"); Instances train = new Instances(trainreader); Instances test = new Instances(testreader); train.setClassIndex(train.numAttributes() - 1); test.setClassIndex(test.numAttributes() - 1); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 4")); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); trainreader.close(); testreader.close(); } catch (Exception ex) { ex.printStackTrace(); } }