List of usage examples for weka.core Instances setClassIndex
public void setClassIndex(int classIndex)
From source file:mlpoc.MLPOC.java
public static Evaluation crossValidate(String filename) { Evaluation eval = null;//from w w w .j av a2 s . com try { BufferedReader br = new BufferedReader(new FileReader(filename)); // loads data and set class index Instances data = new Instances(br); br.close(); /*File csv=new File(filename); CSVLoader loader = new CSVLoader(); loader.setSource(csv); Instances data = loader.getDataSet();*/ data.setClassIndex(data.numAttributes() - 1); // classifier String[] tmpOptions; String classname = "weka.classifiers.trees.J48 -C 0.25"; tmpOptions = classname.split(" "); classname = "weka.classifiers.trees.J48"; tmpOptions[0] = ""; Classifier cls = (Classifier) Utils.forName(Classifier.class, classname, tmpOptions); // other options int seed = 2; int folds = 10; // randomize data Random rand = new Random(seed); Instances randData = new Instances(data); randData.randomize(rand); if (randData.classAttribute().isNominal()) randData.stratify(folds); // perform cross-validation eval = new Evaluation(randData); for (int n = 0; n < folds; n++) { Instances train = randData.trainCV(folds, n); Instances test = randData.testCV(folds, n); // the above code is used by the StratifiedRemoveFolds filter, the // code below by the Explorer/Experimenter: // Instances train = randData.trainCV(folds, n, rand); // build and evaluate classifier Classifier clsCopy = Classifier.makeCopy(cls); clsCopy.buildClassifier(train); eval.evaluateModel(clsCopy, test); } // output evaluation System.out.println(); System.out.println("=== Setup ==="); System.out .println("Classifier: " + cls.getClass().getName() + " " + Utils.joinOptions(cls.getOptions())); System.out.println("Dataset: " + data.relationName()); System.out.println("Folds: " + folds); System.out.println("Seed: " + seed); System.out.println(); System.out.println(eval.toSummaryString("Summary for testing", true)); System.out.println("Correctly Classified Instances: " + eval.correct()); System.out.println("Percentage of Correctly Classified Instances: " + eval.pctCorrect()); System.out.println("InCorrectly Classified Instances: " + eval.incorrect()); System.out.println("Percentage of InCorrectly Classified Instances: " + eval.pctIncorrect()); } catch (Exception ex) { System.err.println(ex.getMessage()); } return eval; }
From source file:moa.classifiers.novelClass.AbstractNovelClassClassifier.java
License:Apache License
final public static Instances augmentInstances(Instances datum) { ArrayList<Attribute> attInfo = new ArrayList<>(datum.numAttributes()); for (int aIdx = 0; aIdx < datum.numAttributes(); aIdx++) { Attribute a = datum.attribute(aIdx).copy(datum.attribute(aIdx).name()); if ((aIdx == datum.classIndex()) && (a.indexOfValue(NOVEL_LABEL_STR) < 0)) { // only if we don't already have these List<String> values = new ArrayList<>(a.numValues() + 2); for (int i = 0; i < a.numValues(); ++i) { values.add(a.value(i));//from ww w . ja v a 2 s .com } values.add(OUTLIER_LABEL_STR); values.add(NOVEL_LABEL_STR); a = new Attribute(a.name(), values, a.getMetadata()); } attInfo.add(a); } String relationshipName = NOVEL_CLASS_INSTANCE_RELATIONSHIP_TYPE + "-" + datum.relationName(); Instances ret = new Instances(relationshipName, attInfo, 1); ret.setClassIndex(datum.classIndex()); return ret; }
From source file:moa.classifiers.rules.GeRules.java
License:Open Source License
public static void main(String[] args) throws Exception { // TODO Auto-generated method stub //ArffFileStream arffFileStream = new ArffFileStream("resources/UCI_KDD/nominal/cmc.arff", -1); // read arff file WEKA way DataSource source = new DataSource("data/cmc.arff"); // stream generator RandomTreeGenerator treeGenerator = new RandomTreeGenerator(); treeGenerator.numClassesOption.setValue(5); treeGenerator.numNumericsOption.setValue(0); treeGenerator.prepareForUse();//w w w .j a va 2 s. c om // HoeffdingRules classifier GeRules gErules = new GeRules(); gErules.prepareForUse(); // load data into instances set Instances data = source.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (data.classIndex() == -1) data.setClassIndex(data.numAttributes() - 1); // Using Prism classifier //hoeffdingRules.learnRules(Collections.list(data.enumerateInstances())); for (Instance instance : Collections.list(data.enumerateInstances())) { gErules.trainOnInstanceImpl(instance); gErules.correctlyClassifies(instance); } Instance anInstance = Collections.list(data.enumerateInstances()).get(10); System.out.println(anInstance); for (Rule aRule : gErules.RulesCoveredInstance(anInstance)) { System.out.println(aRule.printRule()); } for (Rule aRule : gErules.rulesList) { System.out.println(aRule.printRule()); } }
From source file:moa.clusterers.AmidstClusteringAlgorithm.java
License:Apache License
/** * Returns the data set./*from www .ja v a2s . co m*/ * @param numatt an {@code int} that represents the number of attributes. * @param numclus an {@code int} that represents the number of clusters. * @return {@link Instances} object that represents the data set. */ private Instances getDataset(int numatt, int numclus) { FastVector attributes = new FastVector(); for (int i = 0; i < numatt; i++) { attributes.addElement(new Attribute("att" + (i + 1))); } if (numclus > 0) { FastVector classLabels = new FastVector(); for (int i = 0; i < numclus; i++) { classLabels.addElement("class" + (i + 1)); } attributes.addElement(new Attribute("class", classLabels)); } Instances myDataset = new Instances("horizon", attributes, 0); if (numclus > 0) { myDataset.setClassIndex(myDataset.numAttributes() - 1); } return myDataset; }
From source file:moa.reduction.bayes.IncrInfoThAttributeEval.java
License:Open Source License
/** * Updates an information gain attribute evaluator. Discretizes all * attributes that are numeric./* w w w.j a va2 s.c om*/ * * @param data set of instances serving as training data * @throws Exception if the evaluator has not been generated successfully */ public void updateEvaluator(Instance inst) throws Exception { if (counts == null) { // can evaluator handle data? weka.core.Instance winst = new weka.core.DenseInstance(inst.weight(), inst.toDoubleArray()); ArrayList<Attribute> list = new ArrayList<Attribute>(); //ArrayList<Attribute> list = Collections.list(winst.enumerateAttributes()); //list.add(winst.classAttribute()); for (int i = 0; i < inst.numAttributes(); i++) list.add(new Attribute(inst.attribute(i).name(), i)); weka.core.Instances data = new weka.core.Instances("single", list, 1); data.setClassIndex(inst.classIndex()); data.add(winst); //getCapabilities().testWithFail(data); classIndex = inst.classIndex(); counts = (HashMap<Key, Float>[]) new HashMap[inst.numAttributes()]; for (int i = 0; i < counts.length; i++) counts[i] = new HashMap<Key, Float>(); } for (int i = 0; i < inst.numValues(); i++) { if (inst.index(i) != classIndex) { Key key = new Key((float) inst.valueSparse(i), (float) inst.classValue()); Float cval = (float) (counts[inst.index(i)].getOrDefault(key, 0.0f) + inst.weight()); counts[inst.index(i)].put(key, cval); } } updated = true; }
From source file:moa.reduction.core.ReductionClassifier.java
License:Open Source License
private Instance performFS(Instance rinst) { // Feature selection process performed before weka.core.Instance winst = new weka.core.DenseInstance(rinst.weight(), rinst.toDoubleArray()); if (fselector != null) { if (fselector.isUpdated() && totalCount % winSizeOption.getValue() == 0) { fselector.applySelection();/*from w w w. j a va 2 s . c om*/ selector = new AttributeSelection(); Ranker ranker = new Ranker(); ranker.setNumToSelect(Math.min(numFeaturesOption.getValue(), winst.numAttributes() - 1)); selector.setEvaluator((ASEvaluation) fselector); selector.setSearch(ranker); ArrayList<Attribute> list = new ArrayList<Attribute>(); //ArrayList<Attribute> list = Collections.list(winst.enumerateAttributes()); //list.add(winst.classAttribute()); for (int i = 0; i < rinst.numAttributes(); i++) list.add(new Attribute(rinst.attribute(i).name(), i)); //ArrayList<Attribute> list = Collections.list(winst.enumerateAttributes()); //list.add(winst.classAttribute()); weka.core.Instances single = new weka.core.Instances("single", list, 1); single.setClassIndex(rinst.classIndex()); single.add(winst); try { selector.SelectAttributes(single); System.out.println("Selected features: " + selector.toResultsString()); selectedFeatures.clear(); for (int att : selector.selectedAttributes()) selectedFeatures.add(att); WekaToSamoaInstanceConverter convWS = new WekaToSamoaInstanceConverter(); return convWS.samoaInstance(selector.reduceDimensionality(winst)); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } return rinst; }
From source file:moa.streams.generators.multilabel.MetaMultilabelGenerator.java
License:Open Source License
/** * GenerateMultilabelHeader./*from w w w . j a va 2 s .com*/ * * @param si single-label Instances */ protected MultilabelInstancesHeader generateMultilabelHeader(Instances si) { Instances mi = new Instances(si, 0, 0); mi.setClassIndex(-1); mi.deleteAttributeAt(mi.numAttributes() - 1); FastVector bfv = new FastVector(); bfv.addElement("0"); bfv.addElement("1"); for (int i = 0; i < this.m_L; i++) { mi.insertAttributeAt(new Attribute("class" + i, bfv), i); } this.multilabelStreamTemplate = mi; this.multilabelStreamTemplate.setRelationName("SYN_Z" + this.labelCardinalityOption.getValue() + "L" + this.m_L + "X" + m_A + "S" + metaRandomSeedOption.getValue() + ": -C " + this.m_L); this.multilabelStreamTemplate.setClassIndex(this.m_L); return new MultilabelInstancesHeader(multilabelStreamTemplate, m_L); }
From source file:moa.tud.ke.patching.AdaptivePatchingAdwin.java
/** * Modify the instances and insert into them the class which the base * classifier had them classified as./*from w w w .j a va2 s. co m*/ * * @return */ private Instances addBaseClassToInstances(Instances origInstances) { Instances moddedInstances = new Instances(origInstances); // deep copy double predictedClass = 0; // create new attribute try { moddedInstances = copyClassAttribute(moddedInstances, "baseLabel", 1); // das was hier attribute 1 ist, wird zu index 0 moddedInstances.setClassIndex(origInstances.classIndex() + 1); } catch (Exception e) { System.err.println("Error while copying class Attribute for baseLabel"); System.err.println(e.getMessage()); } Iterator inst = origInstances.iterator(); int index = 0; while (inst.hasNext()) { weka.core.Instance a = (weka.core.Instance) inst.next(); weka.core.Instance target = moddedInstances.instance(index); predictedClass = 0; try { predictedClass = this.baseClassifier.classifyInstance(a); // Achtung: das hier muss "base" bleiben!! } catch (Exception e) { System.err.println("Error while classifying instance in addBaseClassToInstances"); System.err.println(a); System.err.println(e.getMessage()); } target.setValue(0, predictedClass); // index 0 ist attribute 1 index++; } return moddedInstances; }
From source file:moa.tud.ke.patching.AdaptivePatchingAdwin.java
public static Instances changeClassToWrongRight(Instances instances) throws Exception { int whichAttribute = instances.classIndex(); // System.out.println(instances.classAttribute().toString()); Add filter = new Add(); //filter.setAttributeIndex("" + (whichAttribute + 1)); filter.setAttributeName("newClass"); String newNominalLabels = "wrong,right"; filter.setNominalLabels(newNominalLabels); filter.setInputFormat(instances);//w w w. j a v a 2 s. c om instances = Filter.useFilter(instances, filter); Iterator inst = instances.iterator(); int index = 0; while (inst.hasNext()) { weka.core.Instance a = (weka.core.Instance) inst.next(); a.setValue((whichAttribute + 1), a.classValue()); index++; } Remove rmfilter = new Remove(); rmfilter.setAttributeIndices("" + (instances.classIndex() + 1)); rmfilter.setInputFormat(instances); instances = Filter.useFilter(instances, rmfilter); instances.setClassIndex(instances.numAttributes() - 1); // System.out.println(instances.classAttribute().toString()); return instances; }
From source file:model.clasification.klasifikacijaIstanca.java
public static void main(String[] args) throws Exception { // load data/* www . j a va 2s. c o m*/ DataSource loader = new DataSource(fileName); Instances data = loader.getDataSet(); data.setClassIndex(data.numAttributes() - 1); // Create the Naive Bayes Classifier NaiveBayes bayesClsf = new NaiveBayes(); bayesClsf.buildClassifier(data); // output generated model // System.out.println(bayesClsf); // Test the model with the original set Evaluation eval = new Evaluation(data); eval.evaluateModel(bayesClsf, data); // Print the result as in Weka explorer String strSummary = eval.toSummaryString(); // System.out.println("=== Evaluation on training set ==="); // System.out.println("=== Summary ==="); // System.out.println(strSummary); // Get the confusion matrix System.out.println(eval.toMatrixString()); }