List of usage examples for weka.core Instance setClassValue
public void setClassValue(String value);
From source file:adams.flow.transformer.WekaClassifying.java
License:Open Source License
/** * Processes the instance and generates the output token. * * @param inst the instance to process * @return the generated output token (e.g., container) * @throws Exception if processing fails *///from w ww . j a v a2s. c om @Override protected Token processInstance(Instance inst) throws Exception { Token result; WekaPredictionContainer cont; List<String> rangeChecks; String rangeCheck; AbstainingClassifier abstain; double classification; double[] distribution; synchronized (m_Model) { // does the classifier support range checks? rangeCheck = null; if (m_Model instanceof RangeCheckClassifier) { rangeChecks = ((RangeCheckClassifier) m_Model).checkRangeForInstance(inst); if (rangeChecks.size() > 0) rangeCheck = Utils.flatten(rangeChecks, "\n"); } if (inst.classAttribute().isNumeric()) { classification = m_Model.classifyInstance(inst); distribution = new double[] { classification }; } else { distribution = m_Model.distributionForInstance(inst); classification = StatUtils.maxIndex(distribution); if (distribution[(int) Math.round(classification)] == 0) classification = weka.core.Utils.missingValue(); } cont = new WekaPredictionContainer(inst, classification, distribution, rangeCheck); // abstaining classifier? if (m_Model instanceof AbstainingClassifier) { abstain = (AbstainingClassifier) m_Model; if (abstain.canAbstain()) { if (inst.classAttribute().isNumeric()) { classification = abstain.getAbstentionClassification(inst); distribution = new double[] { classification }; } else { distribution = abstain.getAbstentionDistribution(inst); classification = StatUtils.maxIndex(distribution); if (distribution[(int) Math.round(classification)] == 0) classification = weka.core.Utils.missingValue(); } cont.setValue(WekaPredictionContainer.VALUE_ABSTENTION_CLASSIFICATION, classification); if (inst.classAttribute().isNominal() && !weka.core.Utils.isMissingValue(classification)) cont.setValue(WekaPredictionContainer.VALUE_ABSTENTION_CLASSIFICATION_LABEL, inst.classAttribute().value((int) Math.round(classification))); cont.setValue(WekaPredictionContainer.VALUE_ABSTENTION_DISTRIBUTION, distribution); } } } if (m_OutputInstance) { inst = (Instance) ((Instance) cont.getValue(WekaPredictionContainer.VALUE_INSTANCE)).copy(); inst.setClassValue((Double) cont.getValue(WekaPredictionContainer.VALUE_CLASSIFICATION)); result = new Token(inst); } else { result = new Token((WekaPredictionContainer) cont); } return result; }
From source file:ant.Game.java
private char keyFromWeka(int classifier) { char key = W; Instance i = new Instance(m_ant.getNumAttributes() + 1); i.setDataset(m_data);/*w w w . j ava 2 s . c o m*/ for (int j = 0; j < i.numAttributes() - 1; ++j) { i.setValue(new Attribute(Integer.toString(j), j), see(j)); } i.setClassValue(1); double p = -1; try { p = m_wrapper10x1.classifyInstance(i); } catch (Exception ex) { Logger.getLogger(Game.class.getName()).log(Level.SEVERE, null, ex); } switch ((int) p) { case 0: key = W; break; case 1: key = A; break; case 2: key = S; break; case 3: key = D; break; default: System.err.println("Unexpected classifier output!"); break; } return key; }
From source file:at.tuflowgraphy.semanticapps.semdroid.DalvikBaseAnalyzer.java
License:Apache License
public Instances getWekaInstances() { Instances instances = null;/*from w w w .j av a 2s. com*/ List<DActivationPatternPackage> dActivationPatternPackages = mAnalysisChain.getFinalLayers().get(0) .getResultAnalysisPackage().getActivationPatternPackages(); int counter = 0; for (DActivationPatternPackage dActivationPatternPackage : dActivationPatternPackages) { if (counter > 0) { throw new RuntimeException("More than one DActivationPatternPackage found!"); } counter++; DActivationPattern activationPatternTemp = dActivationPatternPackage.getActivationPatterns().get(0); FastVector fvWekaAttributes = new FastVector(activationPatternTemp.getRawPattern().length); for (int j = 0; j < activationPatternTemp.getRawPattern().length; j++) { Attribute attribute = new Attribute(j + ""); fvWekaAttributes.addElement(attribute); } Set<String> labelSet = getLabelSet(dActivationPatternPackage); FastVector classValues = new FastVector(labelSet.size()); for (String label : labelSet) { classValues.addElement(label); } Attribute classAttribute = new Attribute("Class", classValues); fvWekaAttributes.addElement(classAttribute); instances = new Instances(mAnalysisConfig.getApplicationAnalysisName(), fvWekaAttributes, dActivationPatternPackage.getActivationPatterns().size()); instances.setClassIndex(instances.numAttributes() - 1); for (int i = 0; i < dActivationPatternPackage.getActivationPatterns().size(); i++) { DActivationPattern activationPattern = dActivationPatternPackage.getActivationPatterns().get(i); Instance instance = new Instance(fvWekaAttributes.size()); for (int j = 0; j < activationPattern.getRawPattern().length; j++) { instance.setValue((Attribute) fvWekaAttributes.elementAt(j), activationPattern.getRawPattern()[j]); } instance.setDataset(instances); DSimpleStringMetaData metadata = (DSimpleStringMetaData) activationPattern.getMetaData(); List<String> keys = metadata.getMetaDataKeys(); for (int k = 0; k < keys.size(); k++) { if (keys.get(k).equals(DalvikInputPlugin.TAG_LABEL)) { String label = metadata.getMetaDataEntries().get(k); instance.setClassValue(label); break; } } instances.add(instance); } } return instances; }
From source file:at.tuflowgraphy.semanticapps.semdroid.utils.ArffHelper.java
License:Apache License
public Instances getWekaInstances(AnalysisChain analysisChain, String name) { Instances instances = null;/*from ww w . java 2 s .co m*/ List<DActivationPatternPackage> dActivationPatternPackages = analysisChain.getFinalLayers().get(0) .getResultAnalysisPackage().getActivationPatternPackages(); int counter = 0; for (DActivationPatternPackage dActivationPatternPackage : dActivationPatternPackages) { if (counter > 0) { // String resultFileName = arffFile.getName(); // String newName = resultFileName.split("_")[0]; // int index = resultFileName.indexOf("_"); // newName += "-MISSING-" + counter + "-" // + resultFileName.substring(index); // arffFileToWriteTo = new File(arffFile.getParentFile(), newName); System.err.println("ERROR: Multiple activation pattern packages found! Should not happen..."); } counter++; DActivationPattern activationPatternTemp = dActivationPatternPackage.getActivationPatterns().get(0); FastVector fvWekaAttributes = new FastVector(activationPatternTemp.getRawPattern().length); for (int j = 0; j < activationPatternTemp.getRawPattern().length; j++) { Attribute attribute = new Attribute(j + ""); fvWekaAttributes.addElement(attribute); } Set<String> labelSet = getLabelSet(dActivationPatternPackage); FastVector classValues = new FastVector(labelSet.size()); for (String label : labelSet) { classValues.addElement(label); } Attribute classAttribute = new Attribute("Class", classValues); fvWekaAttributes.addElement(classAttribute); instances = new Instances(name, fvWekaAttributes, dActivationPatternPackage.getActivationPatterns().size()); instances.setClassIndex(instances.numAttributes() - 1); for (int i = 0; i < dActivationPatternPackage.getActivationPatterns().size(); i++) { DActivationPattern activationPattern = dActivationPatternPackage.getActivationPatterns().get(i); Instance instance = new Instance(fvWekaAttributes.size()); for (int j = 0; j < activationPattern.getRawPattern().length; j++) { instance.setValue((Attribute) fvWekaAttributes.elementAt(j), activationPattern.getRawPattern()[j]); } instance.setDataset(instances); DSimpleStringMetaData metadata = (DSimpleStringMetaData) activationPattern.getMetaData(); List<String> keys = metadata.getMetaDataKeys(); for (int k = 0; k < keys.size(); k++) { if (keys.get(k).equals(DalvikInputPlugin.TAG_LABEL)) { String label = metadata.getMetaDataEntries().get(k); // TODO: dynamically add new labels to instances so that getLabelSet for-loop is not required // System.out.println(label); // if(!labelSet.contains(label)) { // labelSet.add(label); // // classValues.addElement(label); // classAttribute.addStringValue(label); // instances.attribute(instances.classIndex()).addValue(label); // System.out.println("ADDED " + label); // } instance.setClassValue(label); // TODO: only first class value used break; } } instances.add(instance); } } return instances; }
From source file:cn.ict.zyq.bestConf.COMT2.Branch2.java
License:Open Source License
public Instance maxPoint(Instances dataset) throws Exception { Instance max = new DenseInstance(dataset.numAttributes()); max.setDataset(dataset);//from w w w . ja v a 2s. com double[] combinedCoefs = null; int len = 0; for (PreConstructedLinearModel model : linearModelList) { //initialization if (combinedCoefs == null) { len = model.coefficients().length; combinedCoefs = new double[len]; for (int i = 0; i < len; i++) combinedCoefs[i] = 0; } for (int i = 0; i < len; i++) combinedCoefs[i] += model.coefficients()[i]; } //the max value is obtained at ends of a range for (Map.Entry<Attribute, Range<Double>> ent : rangeMap.entrySet()) { int attIdx = ent.getKey().index(); if (combinedCoefs[attIdx] > 0) { //use the upper bound if (ent.getValue().hasUpperBound()) max.setValue(attIdx, ent.getValue().upperEndpoint()); } else if (combinedCoefs[attIdx] < 0) { //use the lower bound if (ent.getValue().hasLowerBound()) max.setValue(attIdx, ent.getValue().lowerEndpoint()); } } //now we set the predicted values double y = 0; for (PreConstructedLinearModel model : linearModelList) { y += model.classifyInstance(max); } y /= linearModelList.size(); max.setClassValue(y); return max; }
From source file:com.dhamacher.sentimentanalysis4tweets.preprocessing.TweetClassifier.java
License:Apache License
/** * Updates model using the given training message. * * @param message the message content/*from ww w .jav a 2 s .co m*/ * @param classValue the class label */ public void updateData(String message, String classValue) { Instance instance = makeInstance(message, m_Data); instance.setClassValue(classValue); m_Data.add(instance); m_UpToDate = false; }
From source file:com.yahoo.labs.samoa.instances.SamoaToWekaInstanceConverter.java
License:Apache License
/** * Weka instance./*from ww w. j ava2s .c o m*/ * * @param inst the inst * @return the weka.core. instance */ public weka.core.Instance wekaInstance(Instance inst) { weka.core.Instance wekaInstance; if (((InstanceImpl) inst).instanceData instanceof SparseInstanceData) { InstanceImpl instance = (InstanceImpl) inst; SparseInstanceData sparseInstanceData = (SparseInstanceData) instance.instanceData; wekaInstance = new weka.core.SparseInstance(instance.weight(), sparseInstanceData.getAttributeValues(), sparseInstanceData.getIndexValues(), sparseInstanceData.getNumberAttributes()); /*if (this.wekaInstanceInformation == null) { this.wekaInstanceInformation = this.wekaInstancesInformation(inst.dataset()); } wekaInstance.insertAttributeAt(inst.classIndex()); wekaInstance.setDataset(wekaInstanceInformation); wekaInstance.setClassValue(inst.classValue()); //wekaInstance.setValueSparse(wekaInstance.numAttributes(), inst.classValue());*/ } else { Instance instance = inst; wekaInstance = new weka.core.DenseInstance(instance.weight(), instance.toDoubleArray()); /* if (this.wekaInstanceInformation == null) { this.wekaInstanceInformation = this.wekaInstancesInformation(inst.dataset()); } //We suppose that the class is the last attibute. We should deal when this is not the case wekaInstance.insertAttributeAt(inst.classIndex()); wekaInstance.setDataset(wekaInstanceInformation); wekaInstance.setClassValue(inst.classValue());*/ } if (this.wekaInstanceInformation == null) { this.wekaInstanceInformation = this.wekaInstancesInformation(inst.dataset()); } //wekaInstance.insertAttributeAt(inst.classIndex()); wekaInstance.setDataset(wekaInstanceInformation); if (inst.numOutputAttributes() == 1) { wekaInstance.setClassValue(inst.classValue()); } return wekaInstance; }
From source file:cyber009.udal.functions.LinearFunction.java
public void syntacticLabelFunction(Instance set) { double sum = 0.0D; for (int n = 0; n < set.numAttributes() - 1; n++) { sum += set.value(n) * coefficients[n]; }/*from w w w. ja va 2 s .c o m*/ if (sum < 0.0D) { set.setClassValue("1"); } else { set.setClassValue("0"); } }
From source file:cyber009.udal.functions.StatisticalAnalysis.java
/** * /*from ww w . j a v a 2 s . c o m*/ * @param classifier * @param trainingDataSet * @param unLabelDataSets * @param unLabelSet * @param classTarget * @return */ public double conditionalEntropy(Classifier classifier, Instances trainingDataSet, Instances unLabelDataSets, Instance unLabelSet, double classTarget) { double cEnt = 0.0D; double entropy = 0.0D; unLabelSet.setClassValue(classTarget); trainingDataSet.add(trainingDataSet.numInstances(), unLabelSet); AttributeStats classStats = trainingDataSet.attributeStats(trainingDataSet.classIndex()); for (Instance set : unLabelDataSets) { if (instanceCMPWithoutClass(set, unLabelSet) == true) continue; for (int i = 0; i < classStats.nominalCounts.length; i++) { double target = new Double(trainingDataSet.attribute(trainingDataSet.classIndex()).value(i)); set.setClassValue(target); entropy = posteriorDistribution(classifier, trainingDataSet, set, classTarget); //System.out.println("entropy:"+entropy); cEnt += -(entropy) * Math.log10(entropy); set.setClassMissing(); } } trainingDataSet.remove(trainingDataSet.numInstances() - 1); return cEnt; }
From source file:cyber009.udal.mains.WekaUDAL.java
public void forwardInstanceSelection() { double pp = 0.0D; AttributeStats classStats = data.labelDataSets.attributeStats(data.labelDataSets.classIndex()); StatisticalAnalysis sa = new StatisticalAnalysis(); if (classStats.nominalCounts != null) { for (int n = 0; n < data.unLabelDataSets.numInstances(); n++) { Instance unLabelSet = data.unLabelDataSets.get(n); pp = 0.0D;//from www. j a va2 s. com for (int i = 0; i < classStats.nominalCounts.length; i++) { double classTarget = new Double( data.labelDataSets.attribute(data.labelDataSets.classIndex()).value(i)); unLabelSet.setClassValue(classTarget); pp += sa.posteriorDistribution(classifier, data.labelDataSets, unLabelSet, classTarget); pp *= sa.conditionalEntropy(classifier, data.labelDataSets, data.unLabelDataSets, unLabelSet, classTarget); unLabelSet.setClassValue(Double.NaN); } data.infoFWunLabel.put(n, pp); } } }