List of usage examples for weka.core Instances numAttributes
publicint numAttributes()
From source file:experimentalclassifier.ExperimentalClassifier.java
/** * @param args the command line arguments *//*from w ww. j a v a2s.c om*/ public static void main(String[] args) throws Exception { DataSource source = new DataSource("data/iris.csv"); Instances data = source.getDataSet(); if (data.classIndex() == -1) { data.setClassIndex(data.numAttributes() - 1); } data.randomize(new Random()); String[] options = weka.core.Utils.splitOptions("-P 30"); RemovePercentage remove = new RemovePercentage(); remove.setOptions(options); remove.setInputFormat(data); Instances train = Filter.useFilter(data, remove); remove.setInvertSelection(true); remove.setInputFormat(data); Instances test = Filter.useFilter(data, remove); Classifier classifier = new HardCodedClassifier(); classifier.buildClassifier(train);//Currently, this does nothing Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); }
From source file:explorer.ChordalysisModelling.java
License:Open Source License
public void buildModelNoExplore(Instances dataset) { this.nbInstances = dataset.numInstances(); this.dataset = dataset; int[] variables = new int[dataset.numAttributes()]; int[] nbValuesForAttribute = new int[variables.length]; for (int i = 0; i < variables.length; i++) { variables[i] = i;//w ww . j a va 2s . c o m nbValuesForAttribute[i] = dataset.numDistinctValues(i); } this.lattice = new Lattice(dataset); this.entropyComputer = new EntropyComputer(dataset.numInstances(), this.lattice); this.scorer = new GraphActionScorerPValue(nbInstances, entropyComputer); this.bestModel = new DecomposableModel(variables, nbValuesForAttribute); this.pq = new MyPriorityQueue(variables.length, bestModel, scorer); for (int i = 0; i < variables.length; i++) { for (int j = i + 1; j < variables.length; j++) { pq.enableEdge(i, j); } } }
From source file:explorer.ChordalysisModelling.java
License:Open Source License
public void buildModelNoExplore(Instances dataset, ArffReader loader) throws IOException { this.dataset = dataset; int[] variables = new int[dataset.numAttributes()]; int[] nbValuesForAttribute = new int[variables.length]; for (int i = 0; i < variables.length; i++) { variables[i] = i;//from w w w . ja v a 2 s . co m nbValuesForAttribute[i] = dataset.numDistinctValues(i); } this.lattice = new Lattice(dataset, loader); this.nbInstances = this.lattice.getNbInstances(); this.entropyComputer = new EntropyComputer(nbInstances, this.lattice); this.scorer = new GraphActionScorerPValue(nbInstances, entropyComputer); this.bestModel = new DecomposableModel(variables, nbValuesForAttribute); this.pq = new MyPriorityQueue(variables.length, bestModel, scorer); for (int i = 0; i < variables.length; i++) { for (int j = i + 1; j < variables.length; j++) { pq.enableEdge(i, j); } } }
From source file:explorer.ChordalysisModellingMML.java
License:Open Source License
public void buildModelNoExplore(Instances dataset) { this.nbInstances = dataset.numInstances(); this.dataset = dataset; int[] variables = new int[dataset.numAttributes()]; int[] nbValuesForAttribute = new int[variables.length]; for (int i = 0; i < variables.length; i++) { variables[i] = i;/*ww w. ja v a 2 s . c o m*/ nbValuesForAttribute[i] = dataset.numDistinctValues(i); } this.lattice = new Lattice(dataset); this.computer = new MessageLengthFactorialComputer(dataset.numInstances(), this.lattice); this.scorer = new GraphActionScorerMML(nbInstances, computer); this.bestModel = new DecomposableModel(variables, nbValuesForAttribute); this.pq = new MyPriorityQueue(variables.length, bestModel, scorer); for (int i = 0; i < variables.length; i++) { for (int j = i + 1; j < variables.length; j++) { pq.enableEdge(i, j); } } }
From source file:explorer.ChordalysisModellingMML.java
License:Open Source License
public void buildModelNoExplore(Instances dataset, ArffReader loader) throws IOException { this.dataset = dataset; int[] variables = new int[dataset.numAttributes()]; int[] nbValuesForAttribute = new int[variables.length]; for (int i = 0; i < variables.length; i++) { variables[i] = i;//from w w w . j a va 2 s . c o m nbValuesForAttribute[i] = dataset.numDistinctValues(i); } this.lattice = new Lattice(dataset, loader); this.nbInstances = this.lattice.getNbInstances(); this.computer = new MessageLengthFactorialComputer(nbInstances, this.lattice); this.scorer = new GraphActionScorerMML(nbInstances, computer); this.bestModel = new DecomposableModel(variables, nbValuesForAttribute); this.pq = new MyPriorityQueue(variables.length, bestModel, scorer); for (int i = 0; i < variables.length; i++) { for (int j = i + 1; j < variables.length; j++) { pq.enableEdge(i, j); } } }
From source file:expshell.ExpShell.java
/** * @param args the command line arguments * @throws java.lang.Exception/*from w w w . j a v a 2s . c o m*/ */ public static void main(String[] args) throws Exception { String file = "C:\\Users\\YH Jonathan Kwok\\Documents\\NetBeansProjects\\ExpShell\\src\\expshell\\iris.csv"; DataSource source = new DataSource(file); Instances data = source.getDataSet(); if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); //Randomize it data.randomize(new Random(1)); RemovePercentage rp = new RemovePercentage(); rp.setPercentage(70); rp.setInputFormat(data); Instances training = Filter.useFilter(data, rp); rp.setInvertSelection(true); rp.setInputFormat(data); Instances test = Filter.useFilter(data, rp); //standardize the data Standardize filter = new Standardize(); filter.setInputFormat(training); Instances newTest = Filter.useFilter(test, filter); Instances newTraining = Filter.useFilter(training, filter); //Part 5 - Now it's a knn Classifier knn = new NeuralClassifier(); knn.buildClassifier(newTraining); Evaluation eval = new Evaluation(newTraining); eval.evaluateModel(knn, newTest); System.out.println(eval.toSummaryString("***** Overall results: *****", false)); }
From source file:expshell.NeuralClassifier.java
@Override public void buildClassifier(Instances i) throws Exception { List<Integer> numNodes = new ArrayList<Integer>(); //numNodes.add(5); //numNodes.add(6); numNodes.add(i.numClasses());//from w w w. j a va 2s. co m nn = new NeuralNetwork(numNodes.size(), i.numAttributes() - 1, numNodes); for (int j = 0; j < i.numInstances(); j++) { System.out.println(nn.run(i.instance(j))); } }
From source file:eyetracker.MLPProcessor.java
public MLPProcessor() { try {// ww w . j a v a 2s . c om FileReader fr = new FileReader("trainingData.arff"); Instances training = new Instances(fr); training.setClassIndex(training.numAttributes() - 1); mlp = new MultilayerPerceptron(); mlp.setOptions(Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 5")); mlp.buildClassifier(training); FileReader tr = new FileReader("trainingData.arff"); Instances testdata = new Instances(tr); inst = testdata; testdata.setClassIndex(testdata.numAttributes() - 1); Evaluation eval = new Evaluation(training); eval.evaluateModel(mlp, testdata); System.out.println(eval.toSummaryString("\nResults\n*******\n", false)); tr.close(); fr.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
From source file:facebookpostpuller.PostModel.java
public static void convertToArff(File file) throws Exception { FastVector atts;/*from www . j a v a2 s . c om*/ FastVector attVals; Instances data; double[] vals; file = new File(file + ".arff"); atts = new FastVector(); atts.addElement(new Attribute(("name"), (FastVector) null)); // 5/27/2014 atts.addElement(new Attribute(("message"), (FastVector) null)); attVals = new FastVector(); attVals.addElement("13-17"); attVals.addElement("18-24"); attVals.addElement("25-34"); attVals.addElement("35-44"); attVals.addElement("45-54"); atts.addElement(new Attribute("age-group", attVals)); data = new Instances("predict_age", atts, 0); Iterator it = posts.entrySet().iterator(); while (it.hasNext()) { Map.Entry pairs = (Map.Entry) it.next(); vals = new double[data.numAttributes()]; User user = (User) pairs.getValue(); String name = user.getName(); // 5/27/2014 String message = ((Post) (pairs.getKey())).getMessage(); Preprocess pre = new Preprocess(); message = pre.emoticons(message); message = pre.emoji(message); message = pre.url(message); //StringFilter filter = new StringFilter(message); vals[0] = data.attribute(0).addStringValue(name); // 5/27/2014 vals[1] = data.attribute(1).addStringValue(message); int age = calculateAge(user.getBirthdayAsDate()); if (age >= 13 && age <= 17) { vals[2] = attVals.indexOf("13-17"); } else if (age >= 18 && age <= 24) { vals[2] = attVals.indexOf("18-24"); } else if (age >= 25 && age <= 34) { vals[2] = attVals.indexOf("25-34"); } else if (age >= 35 && age <= 44) { vals[2] = attVals.indexOf("35-44"); } else if (age >= 45) { // Modified 6/11/2014 vals[2] = attVals.indexOf("45-54"); } data.add(new Instance(1.0, vals)); it.remove(); } ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(file); saver.writeBatch(); }
From source file:facebookpostpuller.PostModelBACKUP.java
public static void convertToArff(File file) throws Exception { FastVector atts;/*from www.ja va 2s. c o m*/ FastVector attVals; Instances data; double[] vals; file = new File(file + ".arff"); atts = new FastVector(); atts.addElement(new Attribute(("name"), (FastVector) null)); // 5/27/2014 atts.addElement(new Attribute(("message"), (FastVector) null)); attVals = new FastVector(); attVals.addElement("13-17"); attVals.addElement("18-24"); attVals.addElement("25-34"); attVals.addElement("35-44"); attVals.addElement("45-54"); atts.addElement(new Attribute("age-group", attVals)); data = new Instances("predict_age", atts, 0); Iterator it = posts.entrySet().iterator(); while (it.hasNext()) { Map.Entry pairs = (Map.Entry) it.next(); vals = new double[data.numAttributes()]; User user = (User) pairs.getValue(); String name = user.getName(); // 5/27/2014 String message = ((Post) (pairs.getKey())).getMessage(); //StringFilter filter = new StringFilter(message); vals[0] = data.attribute(0).addStringValue(name); // 5/27/2014 vals[1] = data.attribute(1).addStringValue(message); int age = calculateAge(user.getBirthdayAsDate()); if (age >= 13 && age <= 17) { vals[2] = attVals.indexOf("13-17"); } else if (age >= 18 && age <= 24) { vals[2] = attVals.indexOf("18-24"); } else if (age >= 25 && age <= 34) { vals[2] = attVals.indexOf("25-34"); } else if (age >= 35 && age <= 44) { vals[2] = attVals.indexOf("35-44"); } else if (age >= 45 && age <= 54) { vals[2] = attVals.indexOf("45-54"); } data.add(new Instance(1.0, vals)); it.remove(); } ArffSaver saver = new ArffSaver(); saver.setInstances(data); saver.setFile(file); saver.writeBatch(); }