List of usage examples for weka.core Instances add
@Override public boolean add(Instance instance)
From source file:csav2.Weka_additive.java
public void classifyTestSet4(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES/* w w w .j a v a 2s. c om*/ Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); attr[7] = new Attribute("amod"); attr[8] = new Attribute("acomp"); attr[9] = new Attribute("advmod"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[10] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); attrs.addElement(attr[8]); attrs.addElement(attr[9]); attrs.addElement(attr[10]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(11); for (int j = 0; j < 11; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 10) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test4.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(10); //Read classifier back String file1 = "Classifier\\classifier_add_asAndpolarwordsAndposAnddep.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(10); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test4", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet5(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES// ww w .ja v a 2s. c o m Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); attr[7] = new Attribute("amod"); attr[8] = new Attribute("acomp"); attr[9] = new Attribute("advmod"); attr[10] = new Attribute("BLPos"); attr[11] = new Attribute("BLNeg"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[12] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); attrs.addElement(attr[8]); attrs.addElement(attr[9]); attrs.addElement(attr[10]); attrs.addElement(attr[11]); attrs.addElement(attr[12]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(13); for (int j = 0; j < 13; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 12) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test5.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(12); //Read classifier back String file1 = "Classifier\\classifier_add_asAndpolarwordsAndposAnddepAndbl.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(12); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test5", "txt"); }
From source file:csav2.Weka_additive.java
public void classifyTestSet6(String input) throws Exception { String ids = ""; ReaderWriter rw = new ReaderWriter(); //ATTRIBUTES/*from w w w .ja va2 s. c om*/ Attribute attr[] = new Attribute[50]; //numeric attr[0] = new Attribute("Autosentiment"); attr[1] = new Attribute("PositiveMatch"); attr[2] = new Attribute("NegativeMatch"); attr[3] = new Attribute("FW"); attr[4] = new Attribute("JJ"); attr[5] = new Attribute("RB"); attr[6] = new Attribute("RB_JJ"); attr[7] = new Attribute("amod"); attr[8] = new Attribute("acomp"); attr[9] = new Attribute("advmod"); attr[10] = new Attribute("BLPos"); attr[11] = new Attribute("BLNeg"); attr[12] = new Attribute("VSPos"); attr[13] = new Attribute("VSNeg"); //class FastVector classValue = new FastVector(3); classValue.addElement("p"); classValue.addElement("n"); classValue.addElement("o"); attr[14] = new Attribute("answer", classValue); FastVector attrs = new FastVector(); attrs.addElement(attr[0]); attrs.addElement(attr[1]); attrs.addElement(attr[2]); attrs.addElement(attr[3]); attrs.addElement(attr[4]); attrs.addElement(attr[5]); attrs.addElement(attr[6]); attrs.addElement(attr[7]); attrs.addElement(attr[8]); attrs.addElement(attr[9]); attrs.addElement(attr[10]); attrs.addElement(attr[11]); attrs.addElement(attr[12]); attrs.addElement(attr[13]); attrs.addElement(attr[14]); // Add Instances Instances dataset = new Instances("my_dataset", attrs, 0); StringTokenizer tokenizer = new StringTokenizer(input); while (tokenizer.hasMoreTokens()) { Instance example = new Instance(15); for (int j = 0; j < 15; j++) { String st = tokenizer.nextToken(); System.out.println(j + " " + st); if (j == 0) example.setValue(attr[j], Float.parseFloat(st)); else if (j == 14) example.setValue(attr[j], st); else example.setValue(attr[j], Integer.parseInt(st)); } ids += tokenizer.nextToken() + "\t"; dataset.add(example); } //Save dataset String file = "Classifier\\featurefile_additive_test6.arff"; ArffSaver saver = new ArffSaver(); saver.setInstances(dataset); saver.setFile(new File(file)); saver.writeBatch(); //Read dataset ArffLoader loader = new ArffLoader(); loader.setFile(new File(file)); dataset = loader.getDataSet(); //Build classifier dataset.setClassIndex(14); //Read classifier back String file1 = "Classifier\\classifier_asAndpolarwordsAndposAnddepAndblAndvs.model"; InputStream is = new FileInputStream(file1); Classifier classifier; ObjectInputStream objectInputStream = new ObjectInputStream(is); classifier = (Classifier) objectInputStream.readObject(); //Evaluate Instances test = new Instances(dataset, 0, dataset.numInstances()); test.setClassIndex(14); //Do eval Evaluation eval = new Evaluation(test); //trainset eval.evaluateModel(classifier, test); //testset System.out.println(eval.toSummaryString()); System.out.println("WEIGHTED F-MEASURE:" + eval.weightedFMeasure()); System.out.println("WEIGHTED PRECISION:" + eval.weightedPrecision()); System.out.println("WEIGHTED RECALL:" + eval.weightedRecall()); //output predictions String optest = "", val = ""; StringTokenizer op = new StringTokenizer(ids); int count = 0; while (op.hasMoreTokens()) { double[] prediction = classifier.distributionForInstance(test.instance(count)); count += 1; if (prediction[0] > prediction[1]) { if (prediction[0] > prediction[2]) { val = "p: " + Double.toString((double) Math.round((prediction[0]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } else { if (prediction[1] > prediction[2]) { val = "n: " + Double.toString((double) Math.round((prediction[1]) * 1000) / 1000); } else { val = "o: " + Double.toString((double) Math.round((prediction[2]) * 1000) / 1000); } } optest += op.nextToken() + "\t" + val + "\n"; } rw.writeToFile(optest, "Answers_additive_Test6", "txt"); }
From source file:cyber009.main.MainSyntacticData.java
public static void main(String[] args) { Random r = new Random(System.currentTimeMillis()); Variable v = new Variable(); long timeStart = 0, timeEnd = 0; ANN ann = new ANN(v, 0.014013); for (int f = 2; f <= 2; f++) { v.N = f;//from w w w .j a v a 2s . c om v.D = 4000; v.threshold = 0.0; cyber009.function.LinearFunction func = new cyber009.function.LinearFunction(v.N); v.X = new double[v.D][]; v.TARGET = new double[v.D]; v.WEIGHT = new double[v.N + 1]; for (int d = 0; d < v.D; d++) { v.X[d] = new double[v.N + 1]; v.X[d][0] = 1.0; for (int n = 1; n <= v.N; n++) { v.X[d][n] = r.nextGaussian(); } v.TARGET[d] = func.syntacticFunction(v.X[d], v.threshold); } //v.showAll(); //Lib.Utility.writeCSVDataSet("data/syn_data_x_"+v.N+"_d_"+v.D+".csv", v); List<Attribute> atts = new ArrayList<>(); Attribute[] att = new Attribute[v.N + 2]; for (int i = 0; i <= v.N; i++) { att[i] = new Attribute("X" + i); atts.add(att[i]); } List<String> classValus = new ArrayList<>(); classValus.add("1.0"); classValus.add("0.0"); att[v.N + 1] = new Attribute("class", classValus); atts.add(att[v.N + 1]); Instances dataSet = new Instances("Syn Data", (ArrayList<Attribute>) atts, v.D); for (int d = 0; d < v.D; d++) { Instance ins = new DenseInstance(v.N + 2); for (int i = 0; i <= v.N; i++) { ins.setValue(atts.get(i), v.X[d][i]); } ins.setValue(atts.get(v.N + 1), v.TARGET[d]); dataSet.add(ins); } //System.out.println(dataSet); PlotData2D p2D = new PlotData2D(dataSet); p2D.setPlotName("Syn data"); VisualizePanel vp = new VisualizePanel(); vp.setName("Show Data"); try { vp.addPlot(p2D); JFrame frame = new JFrame("Show Data"); frame.setSize(600, 600); frame.setVisible(true); frame.getContentPane().setLayout(new BorderLayout()); frame.getContentPane().add(vp, BorderLayout.CENTER); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); func.showCoefficients(); } catch (Exception ex) { Logger.getLogger(MainSyntacticData.class.getName()).log(Level.SEVERE, null, ex); } ann.weightReset(); timeStart = System.currentTimeMillis(); ann.gradientDescent(10000L, 2, v.D); timeEnd = System.currentTimeMillis(); //v.showTable(); //v.showWEIGHT(); System.out.println("feature #:" + v.N + " time:(" + (timeEnd - timeStart) + ")"); v.showResult(); //func.showCoefficients(); } }
From source file:cyber009.main.UDAL.java
public void showData() { List<Attribute> atts = new ArrayList<>(); Attribute[] att = new Attribute[v.N + 2]; for (int i = 0; i <= v.N; i++) { att[i] = new Attribute("X" + i); atts.add(att[i]);/*from ww w. j a v a 2 s .c o m*/ } List<String> classValus = new ArrayList<>(); classValus.add("1.0"); classValus.add("0.0"); att[v.N + 1] = new Attribute("class", classValus); atts.add(att[v.N + 1]); Instances dataSet = new Instances("Syn Data", (ArrayList<Attribute>) atts, v.D); for (int d = 0; d < v.D; d++) { Instance ins = new DenseInstance(v.N + 2); for (int i = 0; i <= v.N; i++) { ins.setValue(atts.get(i), v.X[d][i]); } ins.setValue(atts.get(v.N + 1), v.TARGET[d]); dataSet.add(ins); } //System.out.println(dataSet); PlotData2D p2D = new PlotData2D(dataSet); p2D.setPlotName("Syn data"); VisualizePanel vp = new VisualizePanel(); vp.setName("Show Data"); try { vp.addPlot(p2D); JFrame frame = new JFrame("Show Data"); frame.setSize(600, 600); frame.setVisible(true); frame.getContentPane().setLayout(new BorderLayout()); frame.getContentPane().add(vp, BorderLayout.CENTER); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); func.showCoefficients(); } catch (Exception ex) { Logger.getLogger(MainSyntacticData.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:cyber009.udal.mains.WekaUDAL.java
@SuppressWarnings("unchecked") public void updateLabelDataSet() { int count = 0; Instances temp = new Instances(data.unLabelDataSets, 0, 0); data.infoFWunLabel = (HashMap<Integer, Double>) Utilitys.sortByValue((Map) data.infoFWunLabel); for (Map.Entry<Integer, Double> entrySet : data.infoFWunLabel.entrySet()) { int index = entrySet.getKey(); if (count < data.N_FL) { // System.out.println(index + " : " // +entrySet.getValue() + " : " // + data.unLabelDataSets.instance(index).toString()); func.syntacticLabelFunction(data.unLabelDataSets.get(index)); data.labelDataSets.add(data.unLabelDataSets.get(index)); } else {//from w ww . j a va 2 s.c o m temp.add(data.unLabelDataSets.get(index)); } count++; } data.infoFWunLabel.clear(); data.unLabelDataSets.clear(); data.unLabelDataSets.addAll(temp); //System.out.println("------------------------------------------"); }
From source file:cz.vse.fis.keg.entityclassifier.core.salience.EntitySaliencer.java
License:Open Source License
public void computeSalience(List<Entity> entities) { try {/*from w w w . j a v a 2 s . c o m*/ if (!initialized) { initialize(); initialized = true; } ArrayList<SEntity> processedEntities = new ArrayList<SEntity>(); for (Entity e : entities) { SEntity entityMention = new SEntity(); entityMention.setBeginIndex(e.getStartOffset().intValue()); entityMention.setEntityType(e.getEntityType()); ArrayList<Type> types = e.getTypes(); ArrayList<String> loggedURIs = new ArrayList<String>(); if (types != null) { for (Type t : types) { String entityURI = t.getEntityURI(); if (!loggedURIs.contains(entityURI)) { loggedURIs.add(entityURI); entityMention.getUrls().add(entityURI); } } } boolean entityAlreadyLogged = false; for (SEntity sEntity : processedEntities) { boolean isThisEntitySame = false; ArrayList<String> entityURIs1 = sEntity.getUrls(); ArrayList<String> entityURIs2 = entityMention.getUrls(); for (String eURI1 : entityURIs1) { for (String eURI2 : entityURIs2) { if (!entityAlreadyLogged) { if (eURI1.equals(eURI2)) { entityAlreadyLogged = true; isThisEntitySame = true; sEntity.setNumOccurrences(sEntity.getNumOccurrences() + 1); } } } } if (isThisEntitySame) { for (String uri : entityMention.getUrls()) { if (!sEntity.getUrls().contains(uri)) { sEntity.getUrls().add(uri); } } } } // Entity seen for first time in the document. if (!entityAlreadyLogged) { entityMention.setNumOccurrences(1); processedEntities.add(entityMention); } } // Preparing the test data container. FastVector attributes = new FastVector(6); attributes.add(new Attribute("beginIndex")); attributes.add(new Attribute("numUniqueEntitiesInDoc")); attributes.add(new Attribute("numOfOccurrencesOfEntityInDoc")); attributes.add(new Attribute("numOfEntityMentionsInDoc")); FastVector entityTypeNominalAttVal = new FastVector(2); entityTypeNominalAttVal.addElement("named_entity"); entityTypeNominalAttVal.addElement("common_entity"); Attribute entityTypeAtt = new Attribute("type", entityTypeNominalAttVal); attributes.add(entityTypeAtt); FastVector classNominalAttVal = new FastVector(3); classNominalAttVal.addElement("not_salient"); classNominalAttVal.addElement("less_salient"); classNominalAttVal.addElement("most_salient"); Attribute classAtt = new Attribute("class", classNominalAttVal); attributes.add(classAtt); Instances evalData = new Instances("MyRelation", attributes, 0); evalData.setClassIndex(evalData.numAttributes() - 1); for (int i = 0; i < processedEntities.size(); i++) { String entityType = ""; if (processedEntities.get(i).getEntityType().equals("named entity")) { entityType = "named_entity"; } else if (processedEntities.get(i).getEntityType().equals("common entity")) { entityType = "common_entity"; } else { } Instance inst = new DenseInstance(6); inst.setValue(evalData.attribute(0), processedEntities.get(i).getBeginIndex()); // begin index inst.setValue(evalData.attribute(1), processedEntities.size()); // num of unique entities in doc inst.setValue(evalData.attribute(2), processedEntities.get(i).getNumOccurrences()); // num of entity occurrences in doc inst.setValue(evalData.attribute(3), entities.size()); // num of entity mentions in doc inst.setValue(evalData.attribute(4), entityType); // type of the entity evalData.add(inst); } for (int i = 0; i < processedEntities.size(); i++) { SEntity sEntity = processedEntities.get(i); int classIndex = (int) classifier.classifyInstance(evalData.get(i)); String classLabel = evalData.firstInstance().classAttribute().value(classIndex); double pred[] = classifier.distributionForInstance(evalData.get(i)); double probability = pred[classIndex]; double salienceScore = pred[1] * 0.5 + pred[2]; sEntity.setSalienceScore(salienceScore); sEntity.setSalienceConfidence(probability); sEntity.setSalienceClass(classLabel); for (Entity e : entities) { ArrayList<Type> types = e.getTypes(); if (types != null) { for (Type t : types) { if (sEntity.getUrls().contains(t.getEntityURI())) { Salience s = new Salience(); s.setClassLabel(classLabel); DecimalFormat df = new DecimalFormat("0.000"); double fProbability = df.parse(df.format(probability)).doubleValue(); double fSalience = df.parse(df.format(salienceScore)).doubleValue(); s.setConfidence(fProbability); s.setScore(fSalience); t.setSalience(s); } } } } } } catch (Exception ex) { Logger.getLogger(EntitySaliencer.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:data.generation.target.utils.PrincipalComponents.java
License:Open Source License
/** * Gets the transformed training data./* ww w . j a v a2 s .c o m*/ * @return the transformed training data * @throws Exception if transformed data can't be returned */ public Instances transformedData(Instances data) throws Exception { if (m_eigenvalues == null) { throw new Exception("Principal components hasn't been built yet"); } Instances output = null; if (m_transBackToOriginal) { output = new Instances(m_originalSpaceFormat); } else { output = new Instances(m_transformedFormat); } for (int i = 0; i < data.numInstances(); i++) { Instance converted = convertInstance(data.instance(i)); output.add(converted); } return output; }
From source file:de.fub.maps.project.detector.model.inference.processhandler.CrossValidationProcessHandler.java
License:Open Source License
@Override protected void handle() { Collection<Attribute> attributeList = getInferenceModel().getAttributes(); Instances trainingSet = new Instances("Classes", new ArrayList<Attribute>(attributeList), 9); trainingSet.setClassIndex(0);//from w w w . j a va 2 s . co m HashMap<String, HashSet<TrackSegment>> dataset = getInferenceModel().getInput().getTrainingsSet(); for (Entry<String, HashSet<TrackSegment>> entry : dataset.entrySet()) { for (TrackSegment trackSegment : entry.getValue()) { Instance instance = getInstance(entry.getKey(), trackSegment); trainingSet.add(instance); } } assert trainingSet.numInstances() > 0 : "Training set is empty and has no instances"; //NO18N evaluate(trainingSet); }
From source file:de.fub.maps.project.detector.model.inference.processhandler.InferenceDataProcessHandler.java
License:Open Source License
@Override protected void handle() { clearResults();/*from w ww.j a v a2s. c o m*/ Classifier classifier = getInferenceModel().getClassifier(); HashSet<TrackSegment> inferenceDataSet = getInferenceDataSet(); Collection<Attribute> attributeList = getInferenceModel().getAttributes(); if (!attributeList.isEmpty()) { Set<String> keySet = getInferenceModel().getInput().getTrainingsSet().keySet(); setClassesToView(keySet); Instances unlabeledInstances = new Instances("Unlabeld Tracks", new ArrayList<Attribute>(attributeList), 0); //NO18N unlabeledInstances.setClassIndex(0); ArrayList<TrackSegment> segmentList = new ArrayList<TrackSegment>(); for (TrackSegment segment : inferenceDataSet) { Instance instance = getInstance(segment); unlabeledInstances.add(instance); segmentList.add(segment); } // create copy Instances labeledInstances = new Instances(unlabeledInstances); for (int index = 0; index < labeledInstances.numInstances(); index++) { try { Instance instance = labeledInstances.instance(index); // classify instance double classifyed = classifier.classifyInstance(instance); instance.setClassValue(classifyed); // get class label String value = unlabeledInstances.classAttribute().value((int) classifyed); if (index < segmentList.size()) { instanceToTrackSegmentMap.put(instance, segmentList.get(index)); } // put label and instance to result map put(value, instance); } catch (Exception ex) { Exceptions.printStackTrace(ex); } } // update visw updateVisualRepresentation(); // update result set of the inferenceModel for (Entry<String, List<Instance>> entry : resultMap.entrySet()) { HashSet<TrackSegment> trackSegmentList = new HashSet<TrackSegment>(); for (Instance instance : entry.getValue()) { TrackSegment trackSegment = instanceToTrackSegmentMap.get(instance); if (trackSegment != null) { trackSegmentList.add(trackSegment); } } // only those classes are put into the result data set, which are not empty if (!trackSegmentList.isEmpty()) { getInferenceModel().getResult().put(entry.getKey(), trackSegmentList); } } } else { throw new InferenceModelClassifyException(MessageFormat .format("No attributes available. Attribute list lengeth == {0}", attributeList.size())); } resultMap.clear(); instanceToTrackSegmentMap.clear(); }