List of usage examples for weka.classifiers Classifier classifyInstance
public double classifyInstance(Instance instance) throws Exception;
From source file:org.jaqpot.algorithms.resource.WekaMLR.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {/* w ww . jav a2 s .c om*/ if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response.status(Response.Status.BAD_REQUEST) .entity("Dataset is empty. Cannot train model on empty dataset.").build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); List<String> additionalInfo = (List) request.getAdditionalInfo(); String dependentFeature = additionalInfo.get(0); String dependentFeatureName = additionalInfo.get(1); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka MLR prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka MLR prediction of " + dependentFeatureName, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST) .entity("Error while gettting predictions. " + ex.getMessage()).build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.jaqpot.algorithms.resource.WekaPLS.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {// w w w. j av a 2 s .co m if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response.status(Response.Status.BAD_REQUEST) .entity("Dataset is empty. Cannot make predictions on empty dataset.").build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); String dependentFeature = (String) request.getAdditionalInfo(); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); data.setClass(data.attribute(dependentFeature)); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka PLS prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka PLS prediction of " + dependentFeature, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST) .entity("Error while gettting predictions. " + ex.getMessage()).build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.jaqpot.algorithms.resource.WekaRBF.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {//from w ww. j a v a 2s . c o m if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response.status(Response.Status.BAD_REQUEST) .entity("Dataset is empty. Cannot make predictions on empty dataset.").build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); String dependentFeature = (String) request.getAdditionalInfo(); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); data.setClass(data.attribute(dependentFeature)); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka MLR prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka RBF prediction of " + dependentFeature, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST) .entity("Error while gettting predictions. " + ex.getMessage()).build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.jaqpot.algorithms.resource.WekaSVM.java
License:Open Source License
@POST @Path("prediction") public Response prediction(PredictionRequest request) { try {//from www . j a v a 2s . c o m if (request.getDataset().getDataEntry().isEmpty() || request.getDataset().getDataEntry().get(0).getValues().isEmpty()) { return Response.status(Response.Status.BAD_REQUEST) .entity("Dataset is empty. Cannot make predictions on empty dataset.").build(); } String base64Model = (String) request.getRawModel(); byte[] modelBytes = Base64.getDecoder().decode(base64Model); ByteArrayInputStream bais = new ByteArrayInputStream(modelBytes); ObjectInput in = new ObjectInputStream(bais); WekaModel model = (WekaModel) in.readObject(); Classifier classifier = model.getClassifier(); Instances data = InstanceUtils.createFromDataset(request.getDataset()); String dependentFeature = (String) request.getAdditionalInfo(); data.insertAttributeAt(new Attribute(dependentFeature), data.numAttributes()); data.setClass(data.attribute(dependentFeature)); List<LinkedHashMap<String, Object>> predictions = new ArrayList<>(); // data.stream().forEach(instance -> { // try { // double prediction = classifier.classifyInstance(instance); // Map<String, Object> predictionMap = new HashMap<>(); // predictionMap.put("Weka SVM prediction of " + dependentFeature, prediction); // predictions.add(predictionMap); // } catch (Exception ex) { // Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); // } // }); for (int i = 0; i < data.numInstances(); i++) { Instance instance = data.instance(i); try { double prediction = classifier.classifyInstance(instance); LinkedHashMap<String, Object> predictionMap = new LinkedHashMap<>(); predictionMap.put("Weka SVM prediction of " + dependentFeature, prediction); predictions.add(predictionMap); } catch (Exception ex) { Logger.getLogger(WekaMLR.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.BAD_REQUEST) .entity("Error while gettting predictions. " + ex.getMessage()).build(); } } PredictionResponse response = new PredictionResponse(); response.setPredictions(predictions); return Response.ok(response).build(); } catch (Exception ex) { Logger.getLogger(WekaSVM.class.getName()).log(Level.SEVERE, null, ex); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.ClassifyPerActor.java
License:Open Source License
public void execute(Graph g) { // construct the queries to be used ActorByMode groundMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); groundMode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false); ActorByMode targetMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); targetMode.buildQuery((String) parameter.get("TargetMode").get(), ".*", false); // build a list of new artists TreeSet<Actor> artists = new TreeSet<Actor>(); artists.addAll(AlgorithmMacros.filterActor(parameter, g, targetMode.execute(g, artists, null))); // collect the instance variables from the properties to be the for (Actor i : artists) { Property classifierProperty = i.getProperty( AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("ClassifierProperty").get())); if (!classifierProperty.getValue().isEmpty()) { Classifier classifier = (Classifier) classifierProperty.getValue().get(0); TreeSet<Actor> artist = new TreeSet<Actor>(); artist.add(i);//from w w w . j a v a 2 s.com Iterator<Actor> users = AlgorithmMacros.filterActor(parameter, g, groundMode, null, null); Instances dataSet = null; boolean firstRun = true; while (users.hasNext()) { TreeSet<Actor> user = new TreeSet<Actor>(); user.add(users.next()); Property property = user.first().getProperty(AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get())); if (property.getPropertyClass().getName().contentEquals(Instance.class.getName())) { List values = property.getValue(); if (!values.isEmpty()) { // get the existing instance Instance object = (Instance) values.get(0); if (firstRun) { firstRun = false; Instances current = object.dataset(); FastVector attributes = new FastVector(); for (int j = 0; j < current.numAttributes(); ++j) { attributes.addElement(current.attribute(j)); } Attribute classValue = new Attribute(i.getID()); attributes.addElement(classValue); dataSet = new Instances(i.getID(), attributes, 1000); dataSet.setClassIndex(dataSet.numAttributes() - 1); } // for every artist, create a temporary artist classifer double[] content = new double[object.numAttributes() + 1]; for (int j = 0; j < object.numAttributes() + 1; ++j) { content[j] = object.value(j); } Instance base = new Instance(1.0, content); try { double strength = classifier.classifyInstance(base); if ((!Double.isNaN(strength)) && (strength != 0.0)) { Link link = LinkFactory.newInstance() .create((String) parameter.get("Relation").get()); if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) { link.set(user.first(), strength, artist.first()); } else { link.set(artist.first(), strength, user.first()); } g.add(link); } } catch (Exception ex) { Logger.getLogger(ClassifyPerActor.class.getName()).log(Level.SEVERE, null, ex); } } } } } } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.ClassifySingleAttribute.java
License:Open Source License
public void execute(Graph g) { // construct the queries to be used ActorByMode groundMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); groundMode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false); ActorByMode targetMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode"); targetMode.buildQuery((String) parameter.get("TargetMode").get(), ".*", false); LinkByRelation groundTruth = (LinkByRelation) LinkQueryFactory.newInstance().create("LinkByRelation"); groundTruth.buildQuery((String) parameter.get("Relation").get(), false); // build a list of new artists TreeSet<Actor> artists = new TreeSet<Actor>(); artists.addAll(AlgorithmMacros.filterActor(parameter, g, targetMode.execute(g, artists, null))); // collect the instance variables from the properties to be the Property classifierProperty = g.getProperty( AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("ClassifierProperty").get())); if (!classifierProperty.getValue().isEmpty()) { Classifier classifier = (Classifier) classifierProperty.getValue().get(0); Iterator<Actor> users = AlgorithmMacros.filterActor(parameter, g, groundMode, null, null); Instances dataSet = null;/*from w ww . ja v a 2s. co m*/ boolean firstEntry = true; while (users.hasNext()) { TreeSet<Actor> user = new TreeSet<Actor>(); user.add(users.next()); Property property = user.first().getProperty( AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get())); if (property.getPropertyClass().getName().contentEquals(Instance.class.getName())) { List values = property.getValue(); if (!values.isEmpty()) { // get the existing instance Instance object = (Instance) values.get(0); if (firstEntry) { firstEntry = false; Instances current = object.dataset(); FastVector attributes = new FastVector(); for (int j = 0; j < current.numAttributes(); ++j) { attributes.addElement(current.attribute(j)); } FastVector targetNames = new FastVector(); Iterator<Actor> artistIt = targetMode.executeIterator(g, null, null); while (artistIt.hasNext()) { targetNames.addElement(artistIt.next().getID()); } Attribute classValue = new Attribute("TargetID", targetNames); attributes.addElement(classValue); dataSet = new Instances("Training", attributes, 1000); dataSet.setClassIndex(dataSet.numAttributes() - 1); } // for every artist, create a temporary artist classifer double[] content = new double[object.numAttributes() + 1]; for (int j = 0; j < object.numAttributes() + 1; ++j) { content[j] = object.value(j); } Instance base = new Instance(1.0, content); try { double strength = classifier.classifyInstance(base); if (!Double.isNaN(strength)) { String id = dataSet.classAttribute().value((int) strength); Actor target = g.getActor((String) parameter.get("TargetMode").get(), id); Link link = LinkFactory.newInstance() .create((String) parameter.get("Relation").get()); if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) { link.set(user.first(), strength, target); } else { link.set(target, strength, user.first()); } g.add(link); } } catch (Exception ex) { Logger.getLogger(ClassifyPerActor.class.getName()).log(Level.SEVERE, null, ex); } } } } } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.MultiInstanceSVM.java
License:Open Source License
protected void evaluateClassifier(Classifier classifier, Instances dataSet, Graph g, Actor toBePredicted) throws Exception { if (user != null) { for (int i = 0; i < user.length; ++i) { int total = 0; int setTrue = 0; // evaluate all propositionalized instances and evalulate the results Link[] interests = g.getLinkBySource((String) parameter[4].getValue(), user[i]); if (i % 100 == 0) { Logger.getLogger(MultiInstanceSVM.class.getName()).log(Level.FINER, "Evaluating for user " + i); }// w w w. j a va2 s . c o m if (interests != null) { for (int j = 0; j < interests.length; ++j) { Link[] music = g.getLink((String) parameter[5].getValue(), user[i], interests[j].getDestination()); if (music != null) { Link[] given = g.getLinkBySource((String) parameter[3].getValue(), interests[j].getDestination()); if (given != null) { Instances evaluateData = dataSet.stringFreeStructure(); double[] values = new double[artists.length + 3]; java.util.Arrays.fill(values, 0.0); values[0] = interests[j].getStrength(); values[1] = music[0].getStrength(); for (int k = 0; k < given.length; ++k) { values[java.util.Arrays.binarySearch(artists, given[k].getDestination()) + 2] = 1.0; } Instance instance = new SparseInstance(artists.length + 3, values); instance.setDataset(evaluateData); double result = classifier.classifyInstance(instance); if (result == 1.0) { setTrue++; } total++; } } } } boolean evaluate = evaluateResult(setTrue, total); if (evaluate) { Properties props = new Properties(); props.setProperty("LinkType", (String) parameter[9].getValue()); Link derived = LinkFactory.newInstance().create(props); derived.set(user[i], 1.0, toBePredicted); g.add(derived); } // if ((g.getLink((String) parameter[3].getValue(), user[i], toBePredicted) != null) && (evaluate)) { // correctlyClassified[i]++; // } // if (evaluate) { // totalClassified[i]++; // } } } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.SVM.java
License:Open Source License
protected void evaluateClassifier(Classifier classifier, Instances dataSet, Graph g, Actor toBePredicted) throws Exception { if (user != null) { for (int i = 0; i < user.length; ++i) { // evaluate all propositionalized instances and evalulate the results if (i % 100 == 0) { // System.out.println("Evaluating for user " + i); }// w ww .j a v a 2 s . com Link[] given = g.getLinkBySource((String) parameter[3].getValue(), user[i]); if (given != null) { Instances evaluateData = dataSet.stringFreeStructure(); double[] values = new double[artists.length + 3]; java.util.Arrays.fill(values, 0.0); for (int k = 0; k < given.length; ++k) { if (given[k].getDestination().equals(toBePredicted)) { values[java.util.Arrays.binarySearch(artists, given[k].getDestination())] = Double.NaN; } else { values[java.util.Arrays.binarySearch(artists, given[k].getDestination())] = 1.0; } } Instance instance = new SparseInstance(artists.length + 3, values); instance.setDataset(evaluateData); double result = classifier.classifyInstance(instance); if (result == 1.0) { Properties props = new Properties(); props.setProperty("LinkType", (String) parameter[9].getValue()); Link derived = LinkFactory.newInstance().create(props); derived.set(user[i], 1.0, toBePredicted); g.add(derived); } } } } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.WekaClassifierMultiAttribute.java
License:Open Source License
@Override public void execute(Graph g) { Actor[] source = g.getActor((String) parameter[1].getValue()); if (source != null) { // create the atributes for each artist FastVector sourceTypes = new FastVector(); Actor[] dest = g.getActor((String) parameter[3].getValue()); if (dest != null) { // create the Instances set backing this object Instances masterSet = null;//from w ww .ja v a 2s . c om Instance[] trainingData = new Instance[source.length]; for (int i = 0; i < source.length; ++i) { // First, acquire the instance objects for each actor Property p = null; if ((Boolean) parameter[10].getValue()) { p = source[i].getProperty((String) parameter[2].getValue() + g.getID()); } else { p = source[i].getProperty((String) parameter[2].getValue()); } if (p != null) { Object[] values = p.getValue(); if (values.length > 0) { sourceTypes.addElement(source[i].getID()); trainingData[i] = (Instance) ((Instance) values[0]).copy(); // assume that this Instance has a backing dataset // that contains all Instance objects to be tested if (masterSet == null) { masterSet = new Instances(trainingData[i].dataset(), source.length); } masterSet.add(trainingData[i]); sourceTypes.addElement(source[i].getID()); } else { trainingData[i] = null; Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have an Instance value of property ID " + p.getType()); } } else { trainingData[i] = null; Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have a property of ID " + p.getType()); } } Vector<Attribute> destVector = new Vector<Attribute>(); for (int i = 0; i < dest.length; ++i) { FastVector type = new FastVector(); type.addElement("false"); type.addElement("true"); Attribute tmp = new Attribute(dest[i].getID(), type); destVector.add(tmp); masterSet.insertAttributeAt(tmp, masterSet.numAttributes()); } Attribute sourceID = new Attribute("sourceID", sourceTypes); masterSet.insertAttributeAt(sourceID, masterSet.numAttributes()); //set ground truth for evaluation for (int i = 0; i < masterSet.numInstances(); ++i) { Instance inst = masterSet.instance(i); Actor user = g.getActor((String) parameter[i].getValue(), sourceID.value((int) inst.value(sourceID))); if (user != null) { for (int j = 0; j < dest.length; ++j) { if (g.getLink((String) parameter[4].getValue(), user, dest[j]) != null) { inst.setValue(sourceID, "true"); } else { if ((Boolean) parameter[9].getValue()) { inst.setValue(sourceID, "false"); } else { inst.setValue(sourceID, Double.NaN); } } } } else { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, "Actor " + sourceID.value((int) inst.value(sourceID)) + " does not exist in graph"); } } // perform cross fold evaluation of each classifier in turn String[] opts = ((String) parameter[9].getValue()).split("\\s+"); Properties props = new Properties(); if ((Boolean) parameter[11].getValue()) { props.setProperty("LinkType", (String) parameter[5].getValue() + g.getID()); } else { props.setProperty("LinkType", (String) parameter[5].getValue()); } props.setProperty("LinkClass", "Basic"); try { for (int destCount = 0; destCount < dest.length; ++destCount) { masterSet.setClass(destVector.get(destCount)); for (int i = 0; i < (Integer) parameter[8].getValue(); ++i) { Instances test = masterSet.testCV((Integer) parameter[8].getValue(), i); Instances train = masterSet.testCV((Integer) parameter[8].getValue(), i); Classifier classifier = (Classifier) ((Class) parameter[7].getValue()).newInstance(); classifier.setOptions(opts); classifier.buildClassifier(train); for (int j = 0; j < test.numInstances(); ++j) { String sourceName = sourceID.value((int) test.instance(j).value(sourceID)); double result = classifier.classifyInstance(test.instance(j)); String predicted = masterSet.classAttribute().value((int) result); Link derived = LinkFactory.newInstance().create(props); derived.set(g.getActor((String) parameter[2].getValue(), sourceName), 1.0, g.getActor((String) parameter[3].getValue(), predicted)); g.add(derived); } } } } catch (InstantiationException ex) { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.SEVERE, null, ex); } } else { // dest==null Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Ground truth mode '" + (String) parameter[3].getValue() + "' has no actors"); } } else { // source==null Logger.getLogger(WekaClassifierMultiAttribute.class.getName()).log(Level.WARNING, "Source mode '" + (String) parameter[2].getValue() + "' has no actors"); } }
From source file:org.mcennis.graphrat.algorithm.machinelearning.WekaClassifierOneAttribute.java
License:Open Source License
@Override public void execute(Graph g) { Actor[] source = g.getActor((String) parameter[1].getValue()); if (source != null) { // create the Instance sets for each ac FastVector classTypes = new FastVector(); FastVector sourceTypes = new FastVector(); Actor[] dest = g.getActor((String) parameter[3].getValue()); if (dest != null) { for (int i = 0; i < dest.length; ++i) { classTypes.addElement(dest[i].getID()); }/* w w w . j av a 2 s .c o m*/ Attribute classAttribute = new Attribute((String) parameter[5].getValue(), classTypes); Instance[] trainingData = new Instance[source.length]; Instances masterSet = null; for (int i = 0; i < source.length; ++i) { // First, acquire the instance objects for each actor Property p = null; if ((Boolean) parameter[9].getValue()) { p = source[i].getProperty((String) parameter[2].getValue() + g.getID()); } else { p = source[i].getProperty((String) parameter[2].getValue()); } if (p != null) { Object[] values = p.getValue(); if (values.length > 0) { sourceTypes.addElement(source[i].getID()); trainingData[i] = (Instance) ((Instance) values[0]).copy(); // assume that this Instance has a backing dataset // that contains all Instance objects to be tested if (masterSet == null) { masterSet = new Instances(trainingData[i].dataset(), source.length); } masterSet.add(trainingData[i]); } else { trainingData[i] = null; Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have an Instance value of property ID " + p.getType()); } } else { trainingData[i] = null; Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Actor " + source[i].getType() + ":" + source[i].getID() + " does not have a property of ID " + p.getType()); } } // for every actor, fix the instance Attribute sourceID = new Attribute("sourceID", sourceTypes); masterSet.insertAttributeAt(sourceID, masterSet.numAttributes()); masterSet.insertAttributeAt(classAttribute, masterSet.numAttributes()); masterSet.setClass(classAttribute); for (int i = 0; i < source.length; ++i) { if (trainingData[i] != null) { trainingData[i].setValue(sourceID, source[i].getID()); Link[] link = g.getLinkBySource((String) parameter[4].getValue(), source[i]); if (link == null) { trainingData[i].setClassValue(Double.NaN); } else { trainingData[i].setClassValue(link[0].getDestination().getID()); } } } String[] opts = ((String) parameter[7].getValue()).split("\\s+"); Properties props = new Properties(); if ((Boolean) parameter[10].getValue()) { props.setProperty("LinkType", (String) parameter[5].getValue() + g.getID()); } else { props.setProperty("LinkType", (String) parameter[5].getValue()); } props.setProperty("LinkClass", "Basic"); try { for (int i = 0; i < (Integer) parameter[8].getValue(); ++i) { Instances test = masterSet.testCV((Integer) parameter[8].getValue(), i); Instances train = masterSet.testCV((Integer) parameter[8].getValue(), i); Classifier classifier = (Classifier) ((Class) parameter[6].getValue()).newInstance(); classifier.setOptions(opts); classifier.buildClassifier(train); for (int j = 0; j < test.numInstances(); ++j) { String sourceName = sourceID.value((int) test.instance(j).value(sourceID)); double result = classifier.classifyInstance(test.instance(j)); String predicted = masterSet.classAttribute().value((int) result); Link derived = LinkFactory.newInstance().create(props); derived.set(g.getActor((String) parameter[2].getValue(), sourceName), 1.0, g.getActor((String) parameter[3].getValue(), predicted)); g.add(derived); } } } catch (InstantiationException ex) { Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.SEVERE, null, ex); } } else { // dest==null Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Ground truth mode '" + (String) parameter[3].getValue() + "' has no actors"); } } else { // source==null Logger.getLogger(WekaClassifierOneAttribute.class.getName()).log(Level.WARNING, "Source mode '" + (String) parameter[2].getValue() + "' has no actors"); } }