List of usage examples for weka.core Instances setClassIndex
public void setClassIndex(int classIndex)
From source file:ClassificationClass.java
public Evaluation cls_naivebayes(Instances data) { Evaluation eval = null;/*from w w w . j a v a 2 s .c o m*/ try { Classifier classifier; PreparingSteps preparingSteps = new PreparingSteps(); data.setClassIndex(data.numAttributes() - 1); classifier = new NaiveBayes(); classifier.buildClassifier(data); eval = new Evaluation(data); eval.evaluateModel(classifier, data); System.out.println(eval.toSummaryString()); } catch (Exception ex) { Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:ClassificationClass.java
public Evaluation cls_c4_5(Instances data) { Evaluation eval = null;// w w w. j av a 2 s . c om try { Classifier classifier; PreparingSteps preparingSteps = new PreparingSteps(); data.setClassIndex(data.numAttributes() - 1); classifier = new J48(); classifier.buildClassifier(data); eval = new Evaluation(data); eval.evaluateModel(classifier, data); System.out.println(eval.toSummaryString()); } catch (Exception ex) { Logger.getLogger(ClassificationClass.class.getName()).log(Level.SEVERE, null, ex); } return eval; }
From source file:RunExhaustiveSearch.java
License:Open Source License
public static void main(String[] arg) throws Exception { // Load data. ///* www .j av a 2 s.com*/ System.out.println("\nLoading sample file..."); DataSource source = new DataSource(arg[0]); Instances data = source.getDataSet(); if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); int n = Integer.parseInt(arg[1]); System.out.println("Instance with " + n + " features!"); System.out.println("\nRunning ES algorithm with CFS cost function..."); // Run feature selection algorithm ES using CFS cost function. // runAttributeSelection(data, n); }
From source file:classificationPLugin.java
private void ClassifyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_ClassifyActionPerformed this.name = txtdirecotry2.getText(); System.out.println(this.name); try {// w ww . j a v a 2 s.c o m CSVLoader loader = new CSVLoader(); loader.setSource(new File(this.name)); Instances data = loader.getDataSet(); System.out.println(data); // save ARFF String arffile = this.name + ".arff"; System.out.println(arffile); ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(new File(arffile)); saver.writeBatch(); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } try { FileReader reader = new FileReader(this.name + ".arff"); BufferedReader br = new BufferedReader(reader); instance.read(br, null); br.close(); instance.requestFocus(); } catch (Exception e2) { System.out.println(e2); } Instances data; try { data = new Instances(new BufferedReader(new FileReader(this.name + ".arff"))); Instances newData = null; Add filter; newData = new Instances(data); filter = new Add(); filter.setAttributeIndex("last"); filter.setNominalLabels("rods,punctua,networks"); filter.setAttributeName("target"); filter.setInputFormat(newData); newData = Filter.useFilter(newData, filter); System.out.print(newData); Vector vec = new Vector(); newData.setClassIndex(newData.numAttributes() - 1); if (!newData.equalHeaders(newData)) { throw new IllegalArgumentException("Train and test are not compatible!"); } URL urlToModel = this.getClass().getResource("/" + "Final.model"); InputStream stream = urlToModel.openStream(); Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream); System.out.println("PROVANT MODEL.classifyInstance"); for (int i = 0; i < newData.numInstances(); i++) { double pred = cls.classifyInstance(newData.instance(i)); double[] dist = cls.distributionForInstance(newData.instance(i)); System.out.print((i + 1) + " - "); System.out.print(newData.classAttribute().value((int) pred) + " - "); //txtarea2.setText(Utils.arrayToString(dist)); System.out.println(Utils.arrayToString(dist)); vec.add(newData.classAttribute().value((int) pred)); } int p = 0, n = 0, r = 0; //txtarea2.append(Utils.arrayToString(this.target)); for (Object vec1 : vec) { if ("rods".equals(vec1.toString())) { r = r + 1; } if ("punctua".equals(vec1.toString())) { p = p + 1; } if ("networks".equals(vec1.toString())) { n = n + 1; } PrintWriter out = null; try { out = new PrintWriter(this.name + "_morphology.txt"); out.println(vec); out.close(); } catch (Exception ex) { ex.printStackTrace(); } //System.out.println(vec.get(i)); } System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n); IJ.showMessage( "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by-> \npunctua: " + p + ", rods: " + r + ", networks: " + n); classi.setText( "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by: \npunctua: " + p + ", rods: " + r + ", networks: " + n); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } IJ.run("Clear Results"); IJ.run("Clear Results"); IJ.run("Close All", ""); if (WindowManager.getFrame("Results") != null) { IJ.selectWindow("Results"); IJ.run("Close"); } if (WindowManager.getFrame("Summary") != null) { IJ.selectWindow("Summary"); IJ.run("Close"); } if (WindowManager.getFrame("Results") != null) { IJ.selectWindow("Results"); IJ.run("Close"); } if (WindowManager.getFrame("ROI Manager") != null) { IJ.selectWindow("ROI Manager"); IJ.run("Close"); } IJ.run("Close All", "roiManager"); IJ.run("Close All", ""); }
From source file:CopiaSeg3.java
public static void main(String[] args) throws Exception { BufferedReader datafile = readDataFile("breast-cancer-wisconsin.arff"); Instances data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); // Elije el nmero de particiones para la valicacin (4 = 75% Train, 25% Test) Instances[] split = split(data, 4);/* w w w .j a v a2 s. c o m*/ // Separa los conjuntos en los arrays trainning y testing Instances trainingSplits = split[0]; Instances testingSplits = split[1]; // Elegir un conjunto de clasificadores Classifier[] models = { new MultilayerPerceptron() //, new J48 //, ... }; FastVector fvWekaAttributes = new FastVector(9); // Ejecutar cada clasificador for (int j = 0; j < models.length; j++) { // Collect every group of predictions for current model in a FastVector FastVector predictions = new FastVector(); // For each training-testing split pair, train and test the classifier Evaluation validation = simpleClassify(models[j], trainingSplits, testingSplits); predictions.appendElements(validation.predictions()); // Uncomment to see the summary for each training-testing pair. System.out.println(models[j].toString()); // Calculate overall accuracy of current classifier on all splits double accuracy = calculateAccuracy(predictions); // // Print current classifier's name and accuracy in a complicated, but nice-looking way. System.out.println(models[j].getClass().getSimpleName() + " Accuracy: " + String.format("%.2f%%", accuracy) + "\n====================="); // // // Step 4: use the classifier // // For real world applications, the actual use of the classifier is the ultimate goal. Heres the simplest way to achieve that. Lets say weve built an instance (named iUse) as explained in step 2: // // Specify that the instance belong to the training set // // in order to inherit from the set description Instance iUse = new DenseInstance(9); iUse.setValue((Attribute) predictions.elementAt(0), 4); iUse.setValue((Attribute) predictions.elementAt(1), 8); iUse.setValue((Attribute) predictions.elementAt(2), 8); iUse.setValue((Attribute) predictions.elementAt(3), 5); iUse.setValue((Attribute) predictions.elementAt(4), 4); iUse.setValue((Attribute) predictions.elementAt(5), 5); iUse.setValue((Attribute) predictions.elementAt(6), 10); iUse.setValue((Attribute) predictions.elementAt(7), 4); iUse.setValue((Attribute) predictions.elementAt(8), 1); iUse.setDataset(trainingSplits); // // // Get the likelihood of each classes // fDistribution[0] is the probability of being positive? // fDistribution[1] is the probability of being negative? double[] fDistribution = models[j].distributionForInstance(iUse); System.out.println("Probabilidad positivo: " + fDistribution[0]); System.out.println("Probabilidad negativo: " + fDistribution[1]); } }
From source file:MLP.java
MLP() { try {//from w w w . j a v a 2 s .c om FileReader trainreader = new FileReader("C:\\new.arff"); FileReader testreader = new FileReader("C:\\new.arff"); Instances train = new Instances(trainreader); Instances test = new Instances(testreader); train.setClassIndex(train.numAttributes() - 1); test.setClassIndex(test.numAttributes() - 1); MultilayerPerceptron mlp = new MultilayerPerceptron(); mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 4")); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); trainreader.close(); testreader.close(); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:LabeledItemSet.java
License:Open Source License
/** * Splits the class attribute away. Depending on the invert flag, the instances without class attribute or only the class attribute of all instances is returned * @param instances the instances// w w w. ja v a 2s.c om * @param invert flag; if true only the class attribute remains, otherweise the class attribute is the only attribute that is deleted. * @throws Exception exception if instances cannot be splitted * @return Instances without the class attribute or instances with only the class attribute */ public static Instances divide(Instances instances, boolean invert) throws Exception { Instances newInstances = new Instances(instances); if (instances.classIndex() < 0) throw new Exception("For class association rule mining a class attribute has to be specified."); if (invert) { for (int i = 0; i < newInstances.numAttributes(); i++) { if (i != newInstances.classIndex()) { newInstances.deleteAttributeAt(i); i--; } } return newInstances; } else { newInstances.setClassIndex(-1); newInstances.deleteAttributeAt(instances.classIndex()); return newInstances; } }
From source file:dialog1.java
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed try {/* www . ja v a 2 s . c o m*/ CSVLoader loader = new CSVLoader(); loader.setSource(new File(txtfilename.getText() + "_complete.csv")); Instances data = loader.getDataSet(); System.out.println(data); // save ARFF String arffile = this.name3 + ".arff"; System.out.println(arffile); ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(new File(arffile)); saver.writeBatch(); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } Instances data; try { data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff"))); Instances newData = null; Add filter; newData = new Instances(data); filter = new Add(); filter.setAttributeIndex("last"); filter.setNominalLabels("rods,punctua,networks"); filter.setAttributeName("target"); filter.setInputFormat(newData); newData = Filter.useFilter(newData, filter); System.out.print(newData); Vector vec = new Vector(); newData.setClassIndex(newData.numAttributes() - 1); if (!newData.equalHeaders(newData)) { throw new IllegalArgumentException("Train and test are not compatible!"); } /*URL urlToModel = this.getClass().getResource("/" + "Final.model"); InputStream stream = urlToModel.openStream();*/ InputStream stream = this.getClass().getResourceAsStream("/" + "Final.model"); Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream); System.out.println("PROVANT MODEL.classifyInstance"); for (int i = 0; i < newData.numInstances(); i++) { double pred = cls.classifyInstance(newData.instance(i)); double[] dist = cls.distributionForInstance(newData.instance(i)); System.out.print((i + 1) + " - "); System.out.print(newData.classAttribute().value((int) pred) + " - "); //txtarea2.setText(Utils.arrayToString(dist)); System.out.println(Utils.arrayToString(dist)); vec.add(newData.classAttribute().value((int) pred)); //txtarea2.append(Utils.arrayToString(newData.classAttribute().value((int) pred))); //this.target2.add((i + 1) + " -); //this.target.add(newData.classAttribute().value((int) pred)); //for (String s : this.list) { //this.target2 += s + ","; } int p = 0, n = 0, r = 0; //txtarea2.append(Utils.arrayToString(this.target)); for (Object vec1 : vec) { if ("rods".equals(vec1.toString())) { r = r + 1; } if ("punctua".equals(vec1.toString())) { p = p + 1; } if ("networks".equals(vec1.toString())) { n = n + 1; } PrintWriter out = null; try { out = new PrintWriter(this.name3 + "_morphology.txt"); out.println(vec); out.close(); } catch (Exception ex) { ex.printStackTrace(); } //System.out.println(vec.get(i)); } System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n); IJ.showMessage( "Your file:" + this.name3 + "arff" + "\nhas been analysed, and it is composed by-> punctua: " + p + ", rods: " + r + ", networks: " + n); //txtarea2.setText("Your file:" + this.name3 + ".arff" //+ "\nhas been analysed, and it is composed by-> punctua: " + p + ", rods: " + r + ", networks: " + n //+ "\n" //+ "\nAnalyse complete"); //txtarea.setText("Analyse complete"); } catch (IOException ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex); } IJ.run("Clear Results"); IJ.run("Clear Results"); IJ.run("Close All", ""); if (WindowManager.getFrame("Results") != null) { IJ.selectWindow("Results"); IJ.run("Close"); } if (WindowManager.getFrame("Summary") != null) { IJ.selectWindow("Summary"); IJ.run("Close"); } if (WindowManager.getFrame("Results") != null) { IJ.selectWindow("Results"); IJ.run("Close"); } if (WindowManager.getFrame("ROI Manager") != null) { IJ.selectWindow("ROI Manager"); IJ.run("Close"); } IJ.run("Close All", "roiManager"); IJ.run("Close All", ""); setVisible(false); dispose();// TODO add your handling code here: setVisible(false); dispose();// TODO add your handling code here: // TODO add your handling code here: }
From source file:MultiClassClassifier.java
License:Open Source License
/** * Builds the classifiers.// w w w . j a v a 2 s . c om * * @param insts the training data. * @throws Exception if a classifier can't be built */ public void buildClassifier(Instances insts) throws Exception { Instances newInsts; // can classifier handle the data? getCapabilities().testWithFail(insts); // remove instances with missing class insts = new Instances(insts); insts.deleteWithMissingClass(); if (m_Classifier == null) { throw new Exception("No base classifier has been set!"); } m_ZeroR = new ZeroR(); m_ZeroR.buildClassifier(insts); m_TwoClassDataset = null; int numClassifiers = insts.numClasses(); if (numClassifiers <= 2) { m_Classifiers = Classifier.makeCopies(m_Classifier, 1); m_Classifiers[0].buildClassifier(insts); m_ClassFilters = null; } else if (m_Method == METHOD_1_AGAINST_1) { // generate fastvector of pairs FastVector pairs = new FastVector(); for (int i = 0; i < insts.numClasses(); i++) { for (int j = 0; j < insts.numClasses(); j++) { if (j <= i) continue; int[] pair = new int[2]; pair[0] = i; pair[1] = j; pairs.addElement(pair); } } numClassifiers = pairs.size(); m_Classifiers = Classifier.makeCopies(m_Classifier, numClassifiers); m_ClassFilters = new Filter[numClassifiers]; m_SumOfWeights = new double[numClassifiers]; // generate the classifiers for (int i = 0; i < numClassifiers; i++) { RemoveWithValues classFilter = new RemoveWithValues(); classFilter.setAttributeIndex("" + (insts.classIndex() + 1)); classFilter.setModifyHeader(true); classFilter.setInvertSelection(true); classFilter.setNominalIndicesArr((int[]) pairs.elementAt(i)); Instances tempInstances = new Instances(insts, 0); tempInstances.setClassIndex(-1); classFilter.setInputFormat(tempInstances); newInsts = Filter.useFilter(insts, classFilter); if (newInsts.numInstances() > 0) { newInsts.setClassIndex(insts.classIndex()); m_Classifiers[i].buildClassifier(newInsts); m_ClassFilters[i] = classFilter; m_SumOfWeights[i] = newInsts.sumOfWeights(); } else { m_Classifiers[i] = null; m_ClassFilters[i] = null; } } // construct a two-class header version of the dataset m_TwoClassDataset = new Instances(insts, 0); int classIndex = m_TwoClassDataset.classIndex(); m_TwoClassDataset.setClassIndex(-1); m_TwoClassDataset.deleteAttributeAt(classIndex); FastVector classLabels = new FastVector(); classLabels.addElement("class0"); classLabels.addElement("class1"); m_TwoClassDataset.insertAttributeAt(new Attribute("class", classLabels), classIndex); m_TwoClassDataset.setClassIndex(classIndex); } else { // use error correcting code style methods Code code = null; switch (m_Method) { case METHOD_ERROR_EXHAUSTIVE: code = new ExhaustiveCode(numClassifiers); break; case METHOD_ERROR_RANDOM: code = new RandomCode(numClassifiers, (int) (numClassifiers * m_RandomWidthFactor), insts); break; case METHOD_1_AGAINST_ALL: code = new StandardCode(numClassifiers); break; default: throw new Exception("Unrecognized correction code type"); } numClassifiers = code.size(); m_Classifiers = Classifier.makeCopies(m_Classifier, numClassifiers); m_ClassFilters = new MakeIndicator[numClassifiers]; for (int i = 0; i < m_Classifiers.length; i++) { m_ClassFilters[i] = new MakeIndicator(); MakeIndicator classFilter = (MakeIndicator) m_ClassFilters[i]; classFilter.setAttributeIndex("" + (insts.classIndex() + 1)); classFilter.setValueIndices(code.getIndices(i)); classFilter.setNumeric(false); classFilter.setInputFormat(insts); newInsts = Filter.useFilter(insts, m_ClassFilters[i]); m_Classifiers[i].buildClassifier(newInsts); } } m_ClassAttribute = insts.classAttribute(); }
From source file:task2.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods./* w ww. j av a 2 s .c om*/ * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { /* TODO output your page here. You may use following sample code. */ out.println("<!DOCTYPE html>"); out.println("<html>"); out.println("<head>"); out.println("<title>Servlet selection</title>"); out.println("</head>"); out.println("<body>"); CSVLoader loader = new CSVLoader(); loader.setSource(new File("C:/Users//Raguvinoth/Desktop/5339.csv")); Instances data = loader.getDataSet(); //Save ARFF ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(new File("\"C:/Users/Raguvinoth/Desktop/5339_converted.arff")); saver.writeBatch(); BufferedReader reader = new BufferedReader( new FileReader("C://Users//Raguvinoth//Desktop//weka1//5339_nominal.arff")); Instances data1 = new Instances(reader); if (data1.classIndex() == -1) data1.setClassIndex(data1.numAttributes() - 14); // 1. meta-classifier // useClassifier(data); // 2. AttributeSelector try { AttributeSelection attsel = new AttributeSelection(); GreedyStepwise search = new GreedyStepwise(); CfsSubsetEval eval = new CfsSubsetEval(); attsel.setEvaluator(eval); attsel.setSearch(search); attsel.SelectAttributes(data); int[] indices = attsel.selectedAttributes(); System.out.println("selected attribute indices:\n" + Utils.arrayToString(indices)); System.out.println("\n 4. Linear-Regression on above selected attributes"); long time1 = System.currentTimeMillis(); long sec1 = time1 / 1000; BufferedReader reader1 = new BufferedReader( new FileReader("C://Users//Raguvinoth//Desktop//weka1//5339_linear2.arff")); Instances data2 = new Instances(reader1); data2.setClassIndex(0); LinearRegression lr = new LinearRegression(); lr.buildClassifier(data2); System.out.println(lr.toString()); long time2 = System.currentTimeMillis(); long sec2 = time2 / 1000; long timeTaken = sec2 - sec1; System.out.println("Total time taken for building the model: " + timeTaken + " seconds"); for (int i = 0; i < 5; i++) { out.println("<p>" + "selected attribute indices:\n" + Utils.arrayToString(indices[i]) + "</p>"); } out.println("<p>" + "\n 4. Linear-Regression on above selected attributes" + "</p>"); out.println("<p>" + lr.toString() + "</p>"); out.println("<p>" + "Total time taken for building the model: " + timeTaken + " seconds" + "</p>"); out.println("</body>"); out.println("</html>"); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }