Example usage for weka.core Instance setClassValue

List of usage examples for weka.core Instance setClassValue

Introduction

In this page you can find the example usage for weka.core Instance setClassValue.

Prototype

public void setClassValue(String value);

Source Link

Document

Sets the class value of an instance to the given value.

Usage

From source file:org.knime.knip.suise.ops.BuildTrainingData.java

License:Open Source License

/**
 * {@inheritDoc}/* w  w w. jav  a 2 s  . c  o m*/
 */
@Override
public Instances compute(RandomAccessibleInterval<LabelingType<L>> lab, Img<T> img, Instances r) {
    Random rand = new Random();

    double[] extent = new double[lab.numDimensions()];
    for (int d = 0; d < m_dimIndices.length; d++) {
        extent[m_dimIndices[d]] = lab.max(m_dimIndices[d]);
    }
    RectangleRegionOfInterest roi = new RectangleRegionOfInterest(new double[lab.numDimensions()], extent);

    Cursor<LabelingType<L>> labCur = roi.getIterableIntervalOverROI(lab).localizingCursor();
    OutOfBounds<T> imgRA = new OutOfBoundsBorder<T>(img);

    LabelRegions<L> regions = new LabelRegions<L>(lab);
    // get the class distributions
    Map<L, Double> classDistr = null;
    if (m_balanceInstancePerClass) {
        long sum = 0;
        long area;
        Collection<L> labels = regions.getExistingLabels();
        classDistr = new HashMap<L, Double>(labels.size());
        for (L label : labels) {
            area = regions.getLabelRegion(label).size();
            sum += area;
            classDistr.put(label, new Double(area));
        }
        // determine the new sampling rate for each class individually
        double instancesPerClass = (double) sum / (double) labels.size();
        for (L label : labels) {
            Double sampleRate = instancesPerClass / classDistr.get(label) * m_samplingRate;
            classDistr.put(label, sampleRate);
        }
    }

    long[] tmpPos = new long[imgRA.numDimensions()];
    while (labCur.hasNext()) {
        labCur.fwd();
        for (int d = 0; d < m_dimIndices.length; d++) {
            imgRA.setPosition(labCur.getLongPosition(m_dimIndices[d]), m_dimIndices[d]);
            if (imgRA.isOutOfBounds()) {
                imgRA.localize(tmpPos);
                NodeLogger.getLogger(getClass()).warn("Labeling reaches beyond the feature image. Position "
                        + Arrays.toString(tmpPos) + " skipped.");
                continue;
            }

        }
        if (!labCur.get().isEmpty()) {

            if (m_balanceInstancePerClass) {
                if (rand.nextDouble() >= classDistr.get(labCur.get().iterator().next())) {
                    continue;
                }
            } else {
                if (rand.nextDouble() >= m_samplingRate) {
                    continue;
                }
            }

            double[] featVec = new double[(int) img.dimension(m_featDim)];
            for (int f = 0; f < img.dimension(m_featDim); f++) {
                imgRA.setPosition(f, m_featDim);
                featVec[f] = imgRA.get().getRealDouble();
            }
            for (L classLabel : labCur.get()) {
                Instance instance = new DenseInstance(1.0, featVec);
                instance.insertAttributeAt(instance.numAttributes());
                instance.setDataset(r);
                instance.setClassValue(classLabel.toString());

                r.add(instance);

            }
        }
    }
    return r;
}

From source file:org.mcennis.graphrat.algorithm.machinelearning.MultiInstanceSVM.java

License:Open Source License

protected void addInstances(Graph g, Instances dataSet, Actor artist, int skipCount, int positiveSkipCount) {
    int skipCounter = 0;
    int positiveSkipCounter = 0;
    for (int i = 0; i < user.length; ++i) {
        String result = "false";
        if (g.getLink((String) parameter[3].getValue(), user[i], artist) != null) {
            result = "true";
        }/*from   w  ww.jav  a2s. c  o  m*/
        Link[] interests = g.getLinkBySource((String) parameter[4].getValue(), user[i]);
        if (interests != null) {
            for (int j = 0; j < interests.length; ++j) {
                Link[] music = g.getLink((String) parameter[5].getValue(), user[i],
                        interests[j].getDestination());
                Link[] given = g.getLinkBySource((String) parameter[3].getValue(),
                        interests[j].getDestination());
                if ((given != null) && (music != null)) {
                    if (((result.contentEquals("true")) && (positiveSkipCounter % positiveSkipCount == 0))
                            || ((result.contentEquals("false")) && (skipCounter % skipCount == 0))) {
                        double[] values = new double[artists.length + 3];
                        java.util.Arrays.fill(values, 0.0);
                        values[0] = interests[j].getStrength();
                        values[1] = music[0].getStrength();
                        for (int k = 0; k < given.length; ++k) {
                            values[java.util.Arrays.binarySearch(artists, given[k].getDestination()) + 2] = 1.0;
                        }
                        if (result.compareTo("true") == 0) {
                            values[values.length - 1] = 1.0;
                        }
                        Instance instance = new SparseInstance(3 + artists.length, values);
                        instance.setDataset(dataSet);
                        instance.setClassValue(result);
                        dataSet.add(instance);
                        //                            System.out.println("Adding instance for user "+i);
                        if (result.contentEquals("false")) {
                            skipCounter++;
                        } else {
                            positiveSkipCounter++;
                        }
                    } else if (result.contentEquals("false")) {
                        skipCounter++;
                    } else {
                        positiveSkipCounter++;
                    }
                }
            }
        }
    }
}

From source file:org.mcennis.graphrat.algorithm.machinelearning.SVM.java

License:Open Source License

protected void addInstances(Graph g, Instances dataSet, Actor artist, int skipCount, int positiveSkipCount) {
    int skipCounter = 0;
    int positiveSkipCounter = 0;
    for (int i = 0; i < user.length; ++i) {
        String result = "false";
        if (g.getLink((String) parameter[3].getValue(), user[i], artist) != null) {
            result = "true";
        }/*www  .j  a  va 2s.c  om*/
        Link[] given = g.getLinkBySource((String) parameter[3].getValue(), user[i]);
        if (given != null) {
            if (((result.contentEquals("true")) && (positiveSkipCounter % positiveSkipCount == 0))
                    || ((result.contentEquals("false")) && (skipCounter % skipCount == 0))) {
                double[] values = new double[artists.length + 1];
                java.util.Arrays.fill(values, 0.0);
                for (int k = 0; k < given.length; ++k) {
                    if (given[k].getDestination() == artist) {
                        values[java.util.Arrays.binarySearch(artists, given[k].getDestination())] = Double.NaN;
                    } else {
                        values[java.util.Arrays.binarySearch(artists, given[k].getDestination())] = 1.0;
                    }
                }
                if (result.compareTo("true") == 0) {
                    values[values.length - 1] = 1.0;
                }
                Instance instance = new SparseInstance(1 + artists.length, values);
                instance.setDataset(dataSet);
                instance.setClassValue(result);
                dataSet.add(instance);
                //                            System.out.println("Adding instance for user "+i);
                if (result.contentEquals("false")) {
                    skipCounter++;
                } else {
                    positiveSkipCounter++;
                }
            } else if (result.contentEquals("false")) {
                skipCounter++;
            } else {
                positiveSkipCounter++;
            }
        }
    }
}

From source file:org.processmining.analysis.clusteranalysis.ClusterDecisionAnalyzer.java

License:Open Source License

public Instances getDataInfo() {
    // create attribute information
    FastVector attributeInfo = new FastVector();
    // make attribute
    // clean the relevant attribute list and re-fill based on new selection
    // scope/*w w w  . ja  va  2  s  . com*/
    for (int i = 0; i < agProfiles.numberOfItems(); i++) {
        if (checks[i].isSelected()) {
            String name = CpnUtils.replaceSpecialCharacters(agProfiles.getItemKey(i));
            Attribute wekaAtt = new Attribute(name);
            attributeInfo.addElement(wekaAtt);
        }
    }
    // for target concept
    FastVector my_nominal_values = new FastVector(clusters.getClusters().size());
    Attribute targetConcept = null;
    for (Cluster aCluster : clusters.getClusters()) {
        my_nominal_values.addElement(aCluster.getName());
    }
    targetConcept = new Attribute("Cluster", my_nominal_values);
    attributeInfo.addElement(targetConcept);
    attributeInfo.trimToSize();

    // learning
    Instances data = new Instances("Clustering", attributeInfo, 0);
    data.setClassIndex(data.numAttributes() - 1);

    for (Cluster aCluster : clusters.getClusters()) {
        String clusterName = aCluster.getName();
        for (Integer i : aCluster.getTraceIndices()) {
            Instance instance0 = new Instance(attributeInfo.size());
            for (int j = 0; j < agProfiles.numberOfItems(); j++) {
                if (checks[j].isSelected()) {
                    String name = CpnUtils.replaceSpecialCharacters(agProfiles.getItemKey(j));
                    Attribute wekaAtt = data.attribute(name);
                    if (wekaAtt != null) {
                        double doubleAttValue = (new Double(agProfiles.getValue(i, j))).doubleValue();
                        instance0.setValue(wekaAtt, doubleAttValue);
                    } else {
                        System.out.println("fail to add");
                    }
                }
            }
            instance0.setDataset(data);
            instance0.setClassValue(clusterName);
            data.add(instance0);
        }
    }
    return data;
}

From source file:org.processmining.analysis.decisionmining.DecisionAnalyser.java

License:Open Source License

/**
 * Analyses the given list of decision points according to the context
 * specified. Furthermore, the context is provided with some visualization
 * of the analysis result.//from w  w  w.  j  a  v  a2 s  . c o m
 * 
 * @param decisionPoints
 *            the list of decision points to be analysed
 * @param log
 *            the log to be analysed
 * @param highLevelPN
 *            the simulation model to export discovered data dependencies
 */
public void analyse(List<DecisionPoint> decisionPoints, DecisionMiningLogReader log, HLPetriNet highLevelPN) {

    Iterator<DecisionPoint> allDecisionPoints = decisionPoints.iterator();
    while (allDecisionPoints.hasNext()) {
        DecisionPoint currentDP = allDecisionPoints.next();

        // initialize the classifying data structure
        initClassifier();

        // create attribute information
        FastVector attributeInfo = currentDP.getContext().getAttributeInfo();

        // create empty data set with attribute information
        Instances data = new Instances(currentDP.getName(), attributeInfo, 0);
        data.setClassIndex(data.numAttributes() - 1);

        // create learning instances
        List<DecisionCategory> allCategories = currentDP.getTargetConcept();
        Iterator<DecisionCategory> categoryIterator = allCategories.iterator();
        while (categoryIterator.hasNext()) {
            DecisionCategory branch = categoryIterator.next();
            // create all instances for one class at once
            ArrayList belongingTraces = log.getTracesInCategory(branch);
            Iterator traceIterator = belongingTraces.iterator();
            while (traceIterator.hasNext()) {
                DecisionMiningLogTrace trace = (DecisionMiningLogTrace) traceIterator.next();
                // one instance per trace
                // (future work: loops may result in multiple instances per
                // trace!)
                Instance instance = trace.makeInstance(data, attributeInfo.size(), branch, log,
                        currentDP.getContext());
                // classify instance
                instance.setClassValue(branch.toString());
                data.add(instance);
            }
        }

        // in case no single learning instance can be provided (as decision
        // point is never
        // reached, or decision classes cannot specified properly) --> do
        // not call algorithm
        if (data.numInstances() == 0) {
            currentDP.getContext().setResultViewPanel(createMessagePanel("No learning instances available"));
        }
        // actually solve the classification problem
        else {
            try {
                myClassifier.buildClassifier(data);
                // build up result visualization
                currentDP.getContext().setResultViewPanel(createResultVisualization());
                // create evaluation statistics of classifier for the user
                currentDP.getContext().setEvaluationViewPanel(createEvaluationVisualization(data));
                // only derive discovered data dependencies for decision
                // point if tree is not trivial
                if (((J48) myClassifier).measureNumRules() > 0) {
                    // TODO - derive the rules in a
                    // a) classifier-independent way
                    // b) cpn-independent way
                    currentDP.setDataDependencies(((J48) myClassifier).prefix(), highLevelPN);
                }
            } catch (Exception ex) {
                ex.printStackTrace();
                currentDP.getContext().setResultViewPanel(
                        createMessagePanel("Error while solving the classification problem"));
            }
        }
    }
}

From source file:org.processmining.analysis.decisionmining.DecisionAnalyserForAuLdg.java

License:Open Source License

/**
 * Analyses the given list of decision points according to the context
 * specified. Furthermore, the context is provided with some visualization of
 * the analysis result./*  w w w .  j a  v  a  2  s  .  c om*/
 * 
 * @param decisionPoints
 *          the list of decision points to be analysed
 * @param log
 *          the log to be analysed
 * @param highLevelPN
 *          the simulation model to export discovered data dependencies
 */
public void analyse(List<DecisionPointForAuLdg> decisionPoints, DecisionMiningLogReaderForAuLdg log,
        HLPetriNet highLevelPN) {

    Iterator<DecisionPointForAuLdg> allDecisionPoints = decisionPoints.iterator();
    while (allDecisionPoints.hasNext()) {
        DecisionPointForAuLdg currentDP = allDecisionPoints.next();

        // initialize the classifying data structure
        initClassifier();

        // create attribute information
        FastVector attributeInfo = currentDP.getContext().getAttributeInfo();

        // create empty data set with attribute information
        Instances data = new Instances(currentDP.getName(), attributeInfo, 0);
        data.setClassIndex(data.numAttributes() - 1);

        // create learning instances
        List<DecisionCategoryForAuLdg> allCategories = currentDP.getTargetConcept();
        Iterator<DecisionCategoryForAuLdg> categoryIterator = allCategories.iterator();
        while (categoryIterator.hasNext()) {
            DecisionCategoryForAuLdg branch = categoryIterator.next();
            // create all instances for one class at once
            ArrayList belongingTraces = log.getTracesInCategory(branch);
            Iterator traceIterator = belongingTraces.iterator();
            while (traceIterator.hasNext()) {
                DecisionMiningLogTraceForAuLdg trace = (DecisionMiningLogTraceForAuLdg) traceIterator.next();
                // one instance per trace
                // (future work: loops may result in multiple instances per trace!)
                Instance instance = trace.makeInstance(data, attributeInfo.size(), branch, log,
                        currentDP.getContext());
                // classify instance
                instance.setClassValue(branch.toString());
                data.add(instance);
            }
        }

        // in case no single learning instance can be provided (as decision point
        // is never
        // reached, or decision classes cannot specified properly) --> do not call
        // algorithm
        if (data.numInstances() == 0) {
            currentDP.getContext().setResultViewPanel(createMessagePanel("No learning instances available"));
        }
        // actually solve the classification problem
        else {
            try {
                myClassifier.buildClassifier(data);
                // build up result visualization
                currentDP.getContext().setResultViewPanel(createResultVisualization());
                // create evaluation statistics of classifier for the user
                currentDP.getContext().setEvaluationViewPanel(createEvaluationVisualization(data));
                // only derive discovered data dependencies for decision point if tree
                // is not trivial
                if (((J48) myClassifier).measureNumRules() > 0) {
                    // TODO - derive the rules in a
                    // a) classifier-independent way
                    // b) cpn-independent way
                    currentDP.setDataDependencies(((J48) myClassifier).prefix(), highLevelPN);
                }
            } catch (Exception ex) {
                ex.printStackTrace();
                currentDP.getContext().setResultViewPanel(
                        createMessagePanel("Error while solving the classification problem"));
            }
        }
    }
}

From source file:org.prom5.analysis.clusteranalysis.ClusterDecisionAnalyzer.java

License:Open Source License

public Instances getDataInfo() {
    // create attribute information
    FastVector attributeInfo = new FastVector();
    // make attribute
    // clean the relevant attribute list and re-fill based on new selection scope
    for (int i = 0; i < agProfiles.numberOfItems(); i++) {
        if (checks[i].isSelected()) {
            String name = CpnUtils.replaceSpecialCharacters(agProfiles.getItemKey(i));
            Attribute wekaAtt = new Attribute(name);
            attributeInfo.addElement(wekaAtt);
        }/*from  ww  w  .j  a va2s  .  c o m*/
    }
    // for target concept
    FastVector my_nominal_values = new FastVector(clusters.getClusters().size());
    Attribute targetConcept = null;
    for (Cluster aCluster : clusters.getClusters()) {
        my_nominal_values.addElement(aCluster.getName());
    }
    targetConcept = new Attribute("Cluster", my_nominal_values);
    attributeInfo.addElement(targetConcept);
    attributeInfo.trimToSize();

    // learning
    Instances data = new Instances("Clustering", attributeInfo, 0);
    data.setClassIndex(data.numAttributes() - 1);

    for (Cluster aCluster : clusters.getClusters()) {
        String clusterName = aCluster.getName();
        for (Integer i : aCluster.getTraceIndices()) {
            Instance instance0 = new Instance(attributeInfo.size());
            for (int j = 0; j < agProfiles.numberOfItems(); j++) {
                if (checks[j].isSelected()) {
                    String name = CpnUtils.replaceSpecialCharacters(agProfiles.getItemKey(j));
                    Attribute wekaAtt = data.attribute(name);
                    if (wekaAtt != null) {
                        double doubleAttValue = (new Double(agProfiles.getValue(i, j))).doubleValue();
                        instance0.setValue(wekaAtt, doubleAttValue);
                    } else {
                        System.out.println("fail to add");
                    }
                }
            }
            instance0.setDataset(data);
            instance0.setClassValue(clusterName);
            data.add(instance0);
        }
    }
    return data;
}

From source file:org.prom5.analysis.decisionmining.DecisionAnalyser.java

License:Open Source License

/**
 * Analyses the given list of decision points according to the context specified.
 * Furthermore, the context is provided with some visualization of the analysis result.
 * @param decisionPoints the list of decision points to be analysed
 * @param log the log to be analysed//from  w ww  .  j a  v a2 s  .  co  m
 * @param highLevelPN the simulation model to export discovered data dependencies
 */
public void analyse(List<DecisionPoint> decisionPoints, DecisionMiningLogReader log, HLPetriNet highLevelPN) {

    Iterator<DecisionPoint> allDecisionPoints = decisionPoints.iterator();
    while (allDecisionPoints.hasNext()) {
        DecisionPoint currentDP = allDecisionPoints.next();

        // initialize the classifying data structure
        initClassifier();

        // create attribute information
        FastVector attributeInfo = currentDP.getContext().getAttributeInfo();

        // create empty data set with attribute information
        Instances data = new Instances(currentDP.getName(), attributeInfo, 0);
        data.setClassIndex(data.numAttributes() - 1);

        // create learning instances
        List<DecisionCategory> allCategories = currentDP.getTargetConcept();
        Iterator<DecisionCategory> categoryIterator = allCategories.iterator();
        while (categoryIterator.hasNext()) {
            DecisionCategory branch = categoryIterator.next();
            // create all instances for one class at once
            ArrayList belongingTraces = log.getTracesInCategory(branch);
            Iterator traceIterator = belongingTraces.iterator();
            while (traceIterator.hasNext()) {
                DecisionMiningLogTrace trace = (DecisionMiningLogTrace) traceIterator.next();
                // one instance per trace
                // (future work: loops may result in multiple instances per trace!)
                Instance instance = trace.makeInstance(data, attributeInfo.size(), branch, log,
                        currentDP.getContext());
                // classify instance
                instance.setClassValue(branch.toString());
                data.add(instance);
            }
        }

        // in case no single learning instance can be provided (as decision point is never
        // reached, or decision classes cannot specified properly) --> do not call algorithm
        if (data.numInstances() == 0) {
            currentDP.getContext().setResultViewPanel(createMessagePanel("No learning instances available"));
        }
        // actually solve the classification problem
        else {
            try {
                myClassifier.buildClassifier(data);
                // build up result visualization
                currentDP.getContext().setResultViewPanel(createResultVisualization());
                // create evaluation statistics of classifier for the user
                currentDP.getContext().setEvaluationViewPanel(createEvaluationVisualization(data));
                // only derive discovered data dependencies for decision point if tree is not trivial
                if (((J48) myClassifier).measureNumRules() > 0) {
                    // TODO - derive the rules in a
                    // a) classifier-independent way
                    // b) cpn-independent way
                    currentDP.setDataDependencies(((J48) myClassifier).prefix(), highLevelPN);
                }
            } catch (Exception ex) {
                ex.printStackTrace();
                currentDP.getContext().setResultViewPanel(
                        createMessagePanel("Error while solving the classification problem"));
            }
        }
    }
}

From source file:oxis.yologp.YOLogPDescriptor.java

License:Open Source License

/**
 * Predict the LogP.//from  w  ww. j  a  v a 2s.  c  o m
 *
 */
private void predict() throws Exception {

    Instances instances = buildDataset();

    Map<Object, Object> properties;
    for (DrugStruct drugStruct : listDrug) {

        if (drugStruct.drug.getProperty("flag")) {
            properties = drugStruct.drug.getProperties();
            Instance instance = new DenseInstance(instances.numAttributes()); //28 + 1024
            instance.setDataset(instances);
            for (Object propKey : properties.keySet()) {
                if (!(propKey.equals("hash") || propKey.equals("flag") || propKey.equals("smiles"))) {
                    try {
                        instance.setValue(instances.attribute(propKey.toString()),
                                Double.parseDouble(properties.get(propKey).toString()));
                    } catch (NullPointerException ex) {
                        Logger.getLogger(YOLogPDescriptor.class.getName()).log(Level.WARNING,
                                "Property not used: {0}", propKey.toString());
                    }
                }
            }

            double predicted = model.classifyInstance(instance);
            predicted = Math.round(predicted * 100) / 100.0d;
            instance.setClassValue(predicted);
            instances.add(instance);
            drugStruct.drug.setProperty("predicted", predicted);
        }
    }
}

From source file:oxis.yologp.YOLogPDescriptor.java

License:Open Source License

/**
 * Train a model, erase the other one/*w  w  w  .j  a  va2 s  . c om*/
 *
 * @param String name of the model to save
 */
public void train(String name) throws Exception {

    compute();

    Instances instances = buildDataset();

    model = new RandomForest();

    Map<Object, Object> properties;
    for (DrugStruct drugStruct : listDrug) {

        if (drugStruct.drug.getProperty("flag")) {
            properties = drugStruct.drug.getProperties();
            Instance instance = new DenseInstance(instances.numAttributes()); //28 + 1024
            instance.setDataset(instances);
            for (Object propKey : properties.keySet()) {
                if (!(propKey.equals("hash") || propKey.equals("flag") || propKey.equals("smiles"))) {
                    try {
                        instance.setValue(instances.attribute(propKey.toString()),
                                Double.parseDouble(properties.get(propKey).toString()));
                    } catch (NullPointerException ex) {
                        Logger.getLogger(YOLogPDescriptor.class.getName()).log(Level.WARNING,
                                "Property not used: {0}", propKey.toString());
                    }
                }
            }
            instance.setClassValue(drugStruct.getLogP());
            instances.add(instance);
        }
    }
    model.setNumFeatures(200);
    model.setNumTrees(400);
    model.setMaxDepth(0);
    model.buildClassifier(instances);

    weka.core.SerializationHelper.write(path + name, model);
}