Example usage for weka.core Instances add

List of usage examples for weka.core Instances add

Introduction

In this page you can find the example usage for weka.core Instances add.

Prototype

@Override
public boolean add(Instance instance) 

Source Link

Document

Adds one instance to the end of the set.

Usage

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.WekaContourDataClassifier.java

License:Open Source License

/**
 * {@inheritDoc}//from   w w  w  . ja  v a2s  . com
 */
@Override
public void buildClassifier(ContourDataGrid cDataGrid, VectorDataList bgData) throws Exception {
    m_cDataSelection.extractContourData(cDataGrid);
    m_tmpVec = new double[cDataGrid.numFeatures()];
    m_tmpInstance = new DenseInstance(1.0, m_tmpVec);

    if (m_clusterer != null) {
        //cluster contour data to create different models
        ArrayList<Attribute> attInfo = new ArrayList<Attribute>(m_cDataSelection.numFeatures());
        for (int i = 0; i < m_cDataSelection.numFeatures(); i++) {
            attInfo.add(new Attribute("" + i));
        }
        Instances instances = new Instances("contour_data", attInfo, m_cDataSelection.numVectors());
        for (int i = 0; i < m_cDataSelection.numVectors(); i++) {
            instances.add(new DenseInstance(m_cDataSelection.weight(i), m_cDataSelection.getVector(i)));
        }
        m_clusterer.buildClusterer(instances);

        for (int i = 0; i < m_cDataSelection.numVectors(); i++) {
            m_cDataSelection.setClusterIdx(i, m_clusterer.clusterInstance(
                    new DenseInstance(m_cDataSelection.weight(i), m_cDataSelection.getVector(i))));

        }
    }

    Instances data = initDataset(m_cDataSelection.numFeatures(),
            m_cDataSelection.numClusters() + bgData.numClusters(),
            m_cDataSelection.numVectors() + bgData.numVectors());

    m_numContourClusters = m_cDataSelection.numClusters();

    // positive training samples
    for (int n = 0; n < m_cDataSelection.numVectors(); n++) {
        if (m_cDataSelection.weight(n) > 0) {
            addInstance(m_cDataSelection.getVector(n), m_cDataSelection.weight(n),
                    m_cDataSelection.getClusterIdx(n), data);
        } else {
            //if weight == 0, add the according instance to the negative training samples
            addInstance(m_cDataSelection.getVector(n), 1,
                    m_cDataSelection.numClusters() + bgData.getClusterIdx(n), data);
        }
    }

    // negative training samples from background
    for (int n = 0; n < bgData.numVectors(); n++) {
        if (bgData.weight(n) > 0) {
            addInstance(bgData.getVector(n), 1, m_cDataSelection.numClusters() + bgData.getClusterIdx(n), data);
        }
    }

    // negative training samples from the data grid
    for (double[] vec : m_cDataSelection.nonContourVectors()) {
        addInstance(vec, 1, m_cDataSelection.numClusters(), data);
    }

    m_classifier.buildClassifier(data);
    m_tmpInstance.setDataset(data);
    m_data = data;

}

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.WekaContourDataClassifier.java

License:Open Source License

private void addInstance(double[] vec, double weight, double classValue, Instances dataset) {
    DenseInstance inst = new DenseInstance(weight, vec);
    inst.insertAttributeAt(inst.numAttributes());
    inst.setDataset(dataset);//from   ww w .jav a2 s .c om
    inst.setClassValue(classValue);
    dataset.add(inst);
}

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.WekaMIContourDataClassifier.java

License:Open Source License

/**
 * {@inheritDoc}// w  ww  . j  a v  a 2s.c om
 */
@Override
public void buildClassifier(ContourDataGrid cData, VectorDataList bgData) throws Exception {

    // transform input data to weka mi-instances
    m_data = initDataset(cData.numFeatures(), 2, cData.totalLength() + bgData.numVectors(), cData.width());

    for (int r = 0; r < cData.totalLength(); r++) {
        Instances bagData = new Instances(m_data.attribute(1).relation(), cData.width());
        for (int c = 0; c < cData.width(); c++) {
            int vecIdx = cData.getVectorIdx(c, r);
            Instance inst = new DenseInstance(cData.weight(vecIdx), cData.getVector(vecIdx));
            inst.setDataset(bagData);
            bagData.add(inst);
        }
        int value = m_data.attribute(1).addRelation(bagData);
        Instance newBag = new DenseInstance(3);
        newBag.setValue(0, r); // bag id
        newBag.setValue(2, 1); // class attribute
        newBag.setValue(1, value);
        newBag.setWeight(1);
        newBag.setDataset(m_data);
        m_data.add(newBag);
    }

    for (int i = 0; i < bgData.numVectors(); i++) {
        Instances bagData = new Instances(m_data.attribute(1).relation(), cData.width());
        Instance inst = new DenseInstance(bgData.weight(i), bgData.getVector(i));
        inst.setDataset(bagData);
        bagData.add(inst);
        int value = m_data.attribute(1).addRelation(bagData);
        Instance newBag = new DenseInstance(3);
        newBag.setValue(0, cData.totalLength() + i);
        newBag.setValue(2, 0);
        newBag.setValue(1, value);
        newBag.setWeight(1);
        newBag.setDataset(m_data);
        m_data.add(newBag);
    }

    m_classifier.buildClassifier(m_data);
}

From source file:org.knime.knip.suise.node.boundarymodel.contourdata.WekaMIContourDataClassifier.java

License:Open Source License

/**
 * {@inheritDoc}//from  w w w  .  ja  v a 2s . com
 */
@Override
public double contourProbability(double[] inst) throws Exception {
    Instances bagData = new Instances(m_data.attribute(1).relation(), 1);
    Instance i = new DenseInstance(1, inst);
    i.setDataset(bagData);

    bagData.add(i);

    Instance bag = new DenseInstance(3);
    bag.setDataset(m_data);
    int val = bag.attribute(1).addRelation(bagData);
    bag.setValue(1, val);

    return m_classifier.distributionForInstance(bag)[1];

}

From source file:org.knime.knip.suise.ops.BuildTrainingData.java

License:Open Source License

/**
 * {@inheritDoc}/*from  ww w .ja v a  2 s  .com*/
 */
@Override
public Instances compute(RandomAccessibleInterval<LabelingType<L>> lab, Img<T> img, Instances r) {
    Random rand = new Random();

    double[] extent = new double[lab.numDimensions()];
    for (int d = 0; d < m_dimIndices.length; d++) {
        extent[m_dimIndices[d]] = lab.max(m_dimIndices[d]);
    }
    RectangleRegionOfInterest roi = new RectangleRegionOfInterest(new double[lab.numDimensions()], extent);

    Cursor<LabelingType<L>> labCur = roi.getIterableIntervalOverROI(lab).localizingCursor();
    OutOfBounds<T> imgRA = new OutOfBoundsBorder<T>(img);

    LabelRegions<L> regions = new LabelRegions<L>(lab);
    // get the class distributions
    Map<L, Double> classDistr = null;
    if (m_balanceInstancePerClass) {
        long sum = 0;
        long area;
        Collection<L> labels = regions.getExistingLabels();
        classDistr = new HashMap<L, Double>(labels.size());
        for (L label : labels) {
            area = regions.getLabelRegion(label).size();
            sum += area;
            classDistr.put(label, new Double(area));
        }
        // determine the new sampling rate for each class individually
        double instancesPerClass = (double) sum / (double) labels.size();
        for (L label : labels) {
            Double sampleRate = instancesPerClass / classDistr.get(label) * m_samplingRate;
            classDistr.put(label, sampleRate);
        }
    }

    long[] tmpPos = new long[imgRA.numDimensions()];
    while (labCur.hasNext()) {
        labCur.fwd();
        for (int d = 0; d < m_dimIndices.length; d++) {
            imgRA.setPosition(labCur.getLongPosition(m_dimIndices[d]), m_dimIndices[d]);
            if (imgRA.isOutOfBounds()) {
                imgRA.localize(tmpPos);
                NodeLogger.getLogger(getClass()).warn("Labeling reaches beyond the feature image. Position "
                        + Arrays.toString(tmpPos) + " skipped.");
                continue;
            }

        }
        if (!labCur.get().isEmpty()) {

            if (m_balanceInstancePerClass) {
                if (rand.nextDouble() >= classDistr.get(labCur.get().iterator().next())) {
                    continue;
                }
            } else {
                if (rand.nextDouble() >= m_samplingRate) {
                    continue;
                }
            }

            double[] featVec = new double[(int) img.dimension(m_featDim)];
            for (int f = 0; f < img.dimension(m_featDim); f++) {
                imgRA.setPosition(f, m_featDim);
                featVec[f] = imgRA.get().getRealDouble();
            }
            for (L classLabel : labCur.get()) {
                Instance instance = new DenseInstance(1.0, featVec);
                instance.insertAttributeAt(instance.numAttributes());
                instance.setDataset(r);
                instance.setClassValue(classLabel.toString());

                r.add(instance);

            }
        }
    }
    return r;
}

From source file:org.knowrob.knowrob_sim_games.MongoSimGames.java

License:Open Source License

/**
 * Return the PCA of the 3d points/*from  www . ja  va 2  s.c  o  m*/
 */
public PrincipalComponents GetPCA(List<Point3d> points, boolean center_data) {

    // pca
    PrincipalComponents pca = new PrincipalComponents();

    // Create the x y z attributes
    FastVector atts = new FastVector();

    Attribute x = new Attribute("x");
    Attribute y = new Attribute("y");
    Attribute z = new Attribute("z");

    atts.addElement(x);
    atts.addElement(y);
    atts.addElement(z);

    // Create instances
    Instances points_dataset = new Instances("PointsPCA", atts, points.size());

    // iterate through all the points
    for (int i = 0; i < points.size(); i++) {

        // new intance of 3 values
        Instance inst = new SparseInstance(3);

        // get pos point
        Point3d pos = points.get(i);

        // Set instance's values for the attributes x, y, z
        inst.setValue(x, pos.x);
        inst.setValue(y, pos.y);
        inst.setValue(z, pos.z);

        // add instance to dataset
        points_dataset.add(inst);
    }

    // center data
    pca.setCenterData(center_data);

    try {
        // build evaluator
        pca.buildEvaluator(points_dataset);

    } catch (java.lang.Exception e) {
        e.printStackTrace();
    }
    //      System.out.println(points_dataset.toSummaryString());
    //      System.out.println(pca.toString());      
    return pca;
}

From source file:org.mcennis.graphrat.algorithm.clustering.WekaClassifierClusterer.java

License:Open Source License

@Override
public void execute(Graph g) {

    ActorByMode mode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode");
    mode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false);

    try {// w w  w  .  j  a  v  a  2  s  .  c  om

        Clusterer clusterer = (Clusterer) ((Class) parameter.get("Clusterer").get()).newInstance();
        String[] options = ((String) parameter.get("Options").get()).split("\\s+");

        ((OptionHandler) clusterer).setOptions(options);

        Iterator<Actor> actor = AlgorithmMacros.filterActor(parameter, g, mode, null, null);
        Instances dataSet = null;
        while (actor.hasNext()) {
            Actor a = actor.next();
            Property property = a.getProperty(
                    AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get()));

            if (!property.getValue().isEmpty()) {

                Instance value = (Instance) property.getValue().get(0);

                if ((dataSet == null) && (value.dataset() != null)) {
                    FastVector attributes = new FastVector();
                    for (int i = 0; i < value.dataset().numAttributes(); ++i) {
                        attributes.addElement(value.dataset().attribute(i));
                    }
                    dataSet = new Instances("Clustering", attributes, 1000);
                } else if ((dataSet == null)) {
                    FastVector attributes = new FastVector();
                    for (int i = 0; i < value.numAttributes(); ++i) {
                        Attribute element = new Attribute(Integer.toString(i));
                        attributes.addElement(element);
                    }
                    dataSet = new Instances("Clustering", attributes, 1000);
                }
                dataSet.add(value);
            }

        }
        clusterer.buildClusterer(dataSet);
        actor = AlgorithmMacros.filterActor(parameter, g, mode, null, null);
        HashMap<Integer, Graph> clusters = new HashMap<Integer, Graph>();
        while (actor.hasNext()) {
            Actor a = actor.next();
            Property property = a.getProperty(
                    AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get()));
            if (!property.getValue().isEmpty()) {

                Instance instance = (Instance) property.getValue().get(0);
                int cluster = -1;

                try {

                    cluster = clusterer.clusterInstance(instance);
                    if (!clusters.containsKey(cluster)) {
                        Graph graph = GraphFactory.newInstance().create(AlgorithmMacros.getDestID(parameter, g,
                                (String) parameter.get("GraphID").get() + cluster), parameter);
                        clusters.put(cluster, graph);
                    }
                    clusters.get(cluster).add(a);
                } catch (Exception ex) {

                    Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE,
                            "ClusterInstance on clusterer failed", ex);

                }

                Property clusterProperty = PropertyFactory.newInstance().create("BasicProperty", AlgorithmMacros
                        .getDestID(parameter, g, (String) parameter.get("DestinationProperty").get()),
                        Integer.class);

                clusterProperty.add(new Integer(cluster));

                a.add(clusterProperty);

            }
        }

        Iterator<Graph> graphIt = clusters.values().iterator();
        while (graphIt.hasNext()) {
            LinkQuery query = (LinkQuery) parameter.get("LinkQuery").get();
            Graph graph = graphIt.next();
            Iterator<Link> link = query.executeIterator(g, graph.getActor(), graph.getActor(), null);
            while (link.hasNext()) {
                graph.add(link.next());
            }
            if ((Boolean) parameter.get("AddContext").get()) {
                TreeSet<Actor> actorSet = new TreeSet<Actor>();
                actorSet.addAll(graph.getActor());
                link = query.executeIterator(g, actorSet, null, null);
                while (link.hasNext()) {
                    Link l = link.next();
                    Actor d = l.getDestination();
                    if (graph.getActor(d.getMode(), d.getID()) == null) {
                        graph.add(d);
                    }
                    if (graph.getLink(l.getRelation(), l.getSource(), l.getDestination()) == null) {
                        graph.add(l);
                    }
                }

                link = query.executeIterator(g, null, actorSet, null);
                while (link.hasNext()) {
                    Link l = link.next();
                    Actor d = l.getSource();
                    if (graph.getActor(d.getMode(), d.getID()) == null) {
                        graph.add(d);
                    }
                    if (graph.getLink(l.getRelation(), l.getSource(), l.getDestination()) == null) {
                        graph.add(l);
                    }
                }
            }
        }

    } catch (InstantiationException ex) {

        Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE, null, ex);

    } catch (IllegalAccessException ex) {

        Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE, null, ex);

    } catch (Exception ex) {

        Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE, null, ex);

    }

}

From source file:org.mcennis.graphrat.algorithm.clustering.WekaProbablisticClusterer.java

License:Open Source License

@Override

public void execute(Graph g) {
    ActorByMode mode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode");
    mode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false);

    try {/*w ww .j a  va 2s .  c o  m*/

        Clusterer clusterer = (Clusterer) ((Class) parameter.get("Clusterer").get()).newInstance();
        String[] options = ((String) parameter.get("Options").get()).split("\\s+");

        ((OptionHandler) clusterer).setOptions(options);

        Iterator<Actor> actor = AlgorithmMacros.filterActor(parameter, g, mode, null, null);
        Instances dataSet = null;
        while (actor.hasNext()) {
            Actor a = actor.next();
            Property property = a.getProperty(
                    AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get()));

            if (!property.getValue().isEmpty()) {

                Instance value = (Instance) property.getValue().get(0);

                if ((dataSet == null) && (value.dataset() != null)) {
                    FastVector attributes = new FastVector();
                    for (int i = 0; i < value.dataset().numAttributes(); ++i) {
                        attributes.addElement(value.dataset().attribute(i));
                    }
                    dataSet = new Instances("Clustering", attributes, 1000);
                } else if ((dataSet == null)) {
                    FastVector attributes = new FastVector();
                    for (int i = 0; i < value.numAttributes(); ++i) {
                        Attribute element = new Attribute(Integer.toString(i));
                        attributes.addElement(element);
                    }
                    dataSet = new Instances("Clustering", attributes, 1000);
                }
                dataSet.add(value);
            }

        }
        clusterer.buildClusterer(dataSet);
        actor = AlgorithmMacros.filterActor(parameter, g, mode, null, null);
        HashMap<Integer, Graph> clusters = new HashMap<Integer, Graph>();
        while (actor.hasNext()) {
            Actor a = actor.next();
            Property property = a.getProperty(
                    AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get()));
            if (!property.getValue().isEmpty()) {

                Instance instance = (Instance) property.getValue().get(0);
                double[] cluster = new double[] {};

                try {

                    cluster = clusterer.distributionForInstance(instance);
                } catch (Exception ex) {

                    Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE,
                            "ClusterInstance on clusterer failed", ex);

                }

                Property clusterProperty = PropertyFactory.newInstance().create("BasicProperty",
                        AlgorithmMacros.getDestID(parameter, g,
                                (String) parameter.get("DestinationProperty").get()),
                        (new double[] {}).getClass());

                clusterProperty.add(cluster);

                a.add(clusterProperty);

            }
        }

    } catch (InstantiationException ex) {

        Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE, null, ex);

    } catch (IllegalAccessException ex) {

        Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE, null, ex);

    } catch (Exception ex) {

        Logger.getLogger(WekaClassifierClusterer.class.getName()).log(Level.SEVERE, null, ex);

    }

}

From source file:org.mcennis.graphrat.algorithm.machinelearning.BuildClassifierPerActor.java

License:Open Source License

public void execute(Graph g) {
    // construct the queries to be used

    ActorByMode groundMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode");
    groundMode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false);

    ActorByMode targetMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode");
    targetMode.buildQuery((String) parameter.get("TargetMode").get(), ".*", false);

    LinkByRelation groundTruth = (LinkByRelation) LinkQueryFactory.newInstance().create("LinkByRelation");
    groundTruth.buildQuery((String) parameter.get("Relation").get(), false);

    // build a list of new artists
    TreeSet<Actor> artists = new TreeSet<Actor>();
    artists.addAll(AlgorithmMacros.filterActor(parameter, g, targetMode.execute(g, artists, null)));

    // collect the instance variables from the properties to be the 

    for (Actor i : artists) {
        TreeSet<Actor> artist = new TreeSet<Actor>();
        artist.add(i);//from w ww  .j  ava2 s .co  m
        Classifier classifier = createClassifier();
        Iterator<Actor> users = AlgorithmMacros.filterActor(parameter, g, groundMode, null, null);
        Instances dataSet = null;
        boolean firstRun = true;
        while (users.hasNext()) {
            TreeSet<Actor> user = new TreeSet<Actor>();
            user.add(users.next());
            Property property = user.first().getProperty(
                    AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get()));
            if (property.getPropertyClass().getName().contentEquals(Instance.class.getName())) {
                List values = property.getValue();
                if (!values.isEmpty()) {
                    // get the existing instance
                    Instance object = (Instance) values.get(0);
                    if (firstRun == true) {
                        firstRun = false;
                        Instances current = object.dataset();
                        FastVector attributes = new FastVector();
                        for (int j = 0; j < current.numAttributes(); ++j) {
                            attributes.addElement(current.attribute(j));
                        }
                        Attribute classValue = new Attribute(i.getID());
                        attributes.addElement(classValue);
                        dataSet = new Instances(i.getID(), attributes, 1000);
                        dataSet.setClassIndex(dataSet.numAttributes() - 1);
                    }

                    // for every artist, create a temporary artist classifer
                    double[] content = new double[object.numAttributes() + 1];
                    for (int j = 0; j < object.numAttributes() + 1; ++j) {
                        content[j] = object.value(j);
                    }

                    Iterator<Link> link = null;
                    if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) {
                        link = AlgorithmMacros.filterLink(parameter, g, groundTruth, user, artist, null);
                    } else {
                        link = AlgorithmMacros.filterLink(parameter, g, groundTruth, artist, user, null);
                    }
                    if (link.hasNext()) {
                        content[content.length - 1] = link.next().getStrength();
                    } else if ((Boolean) parameter.get("AbsenceIsMissing").get()) {
                        content[content.length - 1] = Double.NaN;
                    } else {
                        content[content.length - 1] = 0.0;
                    }
                    Instance base = new Instance(1.0, content);
                    base.setDataset(dataSet);
                    dataSet.add(base);
                }
            }
        }
        try {
            classifier.buildClassifier(dataSet);
            Property classifierProperty = PropertyFactory.newInstance().create(
                    AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("ClassifierProperty").get()),
                    (String) parameter.get("ClassifierProperty").getType(), weka.classifiers.Classifier.class);
            classifierProperty.add(classifier);
            i.add(classifierProperty);

            Property instancesProperty = PropertyFactory.newInstance().create(
                    AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("InstancesProperty").get()),
                    (String) parameter.get("InstancesProperty").getType(), weka.core.Instances.class);
            instancesProperty.add(classifier);
            i.add(instancesProperty);
        } catch (Exception ex) {
            Logger.getLogger(BuildClassifierPerActor.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:org.mcennis.graphrat.algorithm.machinelearning.BuildClassifierSingleAttribute.java

License:Open Source License

public void execute(Graph g) {
    // construct the queries to be used

    ActorByMode groundMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode");
    groundMode.buildQuery((String) parameter.get("GroundMode").get(), ".*", false);

    ActorByMode targetMode = (ActorByMode) ActorQueryFactory.newInstance().create("ActorByMode");
    targetMode.buildQuery((String) parameter.get("TargetMode").get(), ".*", false);

    LinkByRelation groundTruth = (LinkByRelation) LinkQueryFactory.newInstance().create("LinkByRelation");
    groundTruth.buildQuery((String) parameter.get("Relation").get(), false);

    // build a list of new artists
    TreeSet<Actor> artists = new TreeSet<Actor>();
    artists.addAll(AlgorithmMacros.filterActor(parameter, g, targetMode.execute(g, artists, null)));

    // collect the instance variables from the properties to be the 

    Classifier classifier = createClassifier();
    Iterator<Actor> users = AlgorithmMacros.filterActor(parameter, g, groundMode, null, null);
    Instances dataSet = null;
    boolean firstEntry = true;
    while (users.hasNext()) {
        TreeSet<Actor> user = new TreeSet<Actor>();
        user.add(users.next());/*from  w ww. j ava 2  s.  com*/
        Property property = user.first().getProperty(
                AlgorithmMacros.getSourceID(parameter, g, (String) parameter.get("SourceProperty").get()));
        if (property.getPropertyClass().getName().contentEquals(Instance.class.getName())) {
            List values = property.getValue();
            if (!values.isEmpty()) {
                // get the existing instance
                Instance object = (Instance) values.get(0);
                if (firstEntry) {
                    firstEntry = false;
                    Instances current = object.dataset();
                    FastVector attributes = new FastVector();
                    for (int j = 0; j < current.numAttributes(); ++j) {
                        attributes.addElement(current.attribute(j));
                    }
                    FastVector targetNames = new FastVector();
                    Iterator<Actor> artistIt = targetMode.executeIterator(g, null, null);
                    while (artistIt.hasNext()) {
                        targetNames.addElement(artistIt.next().getID());
                    }
                    Attribute classValue = new Attribute("TargetID", targetNames);
                    attributes.addElement(classValue);
                    dataSet = new Instances("Training", attributes, 1000);
                    dataSet.setClassIndex(dataSet.numAttributes() - 1);
                }

                // for every artist, create a temporary artist classifer
                double[] content = new double[object.numAttributes() + 1];
                for (int j = 0; j < object.numAttributes() + 1; ++j) {
                    content[j] = object.value(j);
                }

                Iterator<Link> link = null;
                if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) {
                    link = AlgorithmMacros.filterLink(parameter, g, groundTruth, user, null, null);
                } else {
                    link = AlgorithmMacros.filterLink(parameter, g, groundTruth, null, user, null);
                }
                if (link.hasNext()) {
                    double strength = Double.NEGATIVE_INFINITY;
                    Actor target = null;
                    while (link.hasNext()) {
                        Link l = link.next();
                        if (l.getStrength() > strength) {
                            strength = l.getStrength();
                            if ((LinkEnd) parameter.get("LinkEnd").get() == LinkEnd.SOURCE) {
                                target = l.getDestination();
                            } else {
                                target = l.getSource();
                            }
                        }
                    }
                    content[content.length - 1] = dataSet.attribute(dataSet.numAttributes() - 1)
                            .indexOfValue(target.getID());
                } else {
                    content[content.length - 1] = Double.NaN;
                }
                Instance base = new Instance(1.0, content);
                base.setDataset(dataSet);
                dataSet.add(base);
            }
        }
    }
    try {
        classifier.buildClassifier(dataSet);
        Property classifierProperty = PropertyFactory.newInstance().create("BasicProperty",
                AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("ClassifierProperty").get()),
                weka.classifiers.Classifier.class);
        classifierProperty.add(classifier);
        g.add(classifierProperty);

        Property instancesProperty = PropertyFactory.newInstance().create("BasicProperty",
                AlgorithmMacros.getDestID(parameter, g, (String) parameter.get("InstancesProperty").get()),
                weka.core.Instances.class);
        instancesProperty.add(classifier);
        g.add(instancesProperty);
    } catch (Exception ex) {
        Logger.getLogger(BuildClassifierSingleAttribute.class.getName()).log(Level.SEVERE, null, ex);
    }
}