Example usage for weka.core Instance setDataset

List of usage examples for weka.core Instance setDataset

Introduction

In this page you can find the example usage for weka.core Instance setDataset.

Prototype

public void setDataset(Instances instances);

Source Link

Document

Sets the reference to the dataset.

Usage

From source file:lineage.LineageClassifier.java

License:Open Source License

public static final boolean classify(final double[] vector) throws Exception {

    // Obtain or generate a Thread-local instance

    Operator op;/*from   ww  w . j  a  v a2  s . com*/
    synchronized (table) { // avoid clashes within weka
        final Thread t = Thread.currentThread();
        op = table.get(t);
        if (null == op) {
            op = new Operator();
            table.put(t, op);
        }
    }

    // Future weka versions will use new DenseInstance(1, vector) instead
    final Instance ins = new DenseInstance(1, vector);
    ins.setDataset(op.data);
    // Was trained to return true or false, represented in weka as 0 or 1
    return 1 == ((int) Math.round(op.c.classifyInstance(ins)));
}

From source file:linqs.gaia.model.oc.ncc.WekaClassifier.java

License:Open Source License

/**
 * Create Weka instance//from  w  w  w .  j  ava 2 s.c  o m
 * 
 * @param intances Weka instances
 * @param di Decorable item to convert
 * @param attInfo Weka attributes
 * @param ispredict Is this item created for training or testing
 */
private void createInstance(Instances instances, Decorable di, boolean ispredict) {
    double[] instvalues = new double[attinfosize];
    int attindex = 0;

    Schema schema = di.getSchema();
    for (String fid : featureids) {
        FeatureValue fvalue = di.getFeatureValue(fid);
        Attribute a = instances.attribute(attindex);

        Feature f = schema.getFeature(fid);
        if (!(f instanceof CompositeFeature)) {
            // Handle non multi-valued feature
            instvalues[attindex] = this.gaiavalues2weka(f, fid, fvalue, a, ispredict);
            attindex++;
        } else {
            // Handle multi-valued feature
            CompositeFeature mv = (CompositeFeature) f;
            UnmodifiableList<SimplePair<String, CVFeature>> mvfeatures = mv.getFeatures();
            CompositeValue mvvalue = (CompositeValue) di.getFeatureValue(fid);
            UnmodifiableList<FeatureValue> mvfvalues = mvvalue.getFeatureValues();
            int num = mvfvalues.size();
            for (int j = 0; j < num; j++) {
                if (fvalue.equals(FeatureValue.UNKNOWN_VALUE)) {
                    attindex++;
                    continue;
                }

                a = instances.attribute(attindex);
                f = mvfeatures.get(j).getSecond();
                fvalue = mvfvalues.get(j);
                instvalues[attindex] = this.gaiavalues2weka(f, fid, fvalue, a, ispredict);
                attindex++;
            }
        }
    }

    // Create instance of weight 1 and the specified values
    Instance inst = new SparseInstance(1, instvalues);
    inst.setDataset(instances);

    instances.add(inst);
}

From source file:lu.lippmann.cdb.ext.hydviga.data.StationsDataProvider.java

License:Open Source License

private Instances getDataSetForMap(final Collection<String> sel, final Collection<String> usable) {
    final Instances ds = new Instances("ds", new ArrayList<Attribute>(), 0);
    ds.insertAttributeAt(new Attribute("name", new ArrayList<String>(this.coordinatesMap.keySet())),
            ds.numAttributes());//from  www .j  a v  a 2 s.co m
    ds.insertAttributeAt(new Attribute("x"), ds.numAttributes());
    ds.insertAttributeAt(new Attribute("y"), ds.numAttributes());
    ds.insertAttributeAt(
            new Attribute("status",
                    Arrays.asList(new String[] { SELECTED_STATUS, USABLE_STATUS, NOT_USABLE_STATUS })),
            ds.numAttributes());
    ds.setClassIndex(ds.numAttributes() - 1);

    final Set<String> coordSelected = new HashSet<String>();
    for (final String ssel : sel) {
        final String coordsKey = coordinatesMap.get(ssel)[0] + "-" + coordinatesMap.get(ssel)[1];
        coordSelected.add(coordsKey);
    }
    final Set<String> coordUsable = new HashSet<String>();
    for (final String uu : usable) {
        final String coordsKey = coordinatesMap.get(uu)[0] + "-" + coordinatesMap.get(uu)[1];
        coordUsable.add(coordsKey);
    }

    final Set<String> coordAlreadyLoaded = new HashSet<String>();
    for (final Map.Entry<String, double[]> entry : this.coordinatesMap.entrySet()) {
        final String coordsKey = entry.getValue()[0] + "-" + entry.getValue()[1];
        if (coordAlreadyLoaded.contains(coordsKey))
            continue;
        final Instance inst = new DenseInstance(1.0d, new double[] { 0d, 0d, 0d, 0d });
        inst.setDataset(ds);
        inst.setValue(0, entry.getKey());
        inst.setValue(1, entry.getValue()[0]);
        inst.setValue(2, entry.getValue()[1]);
        //System.out.println(sel+" "+entry.getKey());
        inst.setValue(3, (coordSelected.contains(coordsKey)) ? SELECTED_STATUS
                : ((coordUsable.contains(coordsKey)) ? USABLE_STATUS : NOT_USABLE_STATUS));
        ds.add(inst);
        coordAlreadyLoaded.add(coordsKey);
    }

    return ds;
}

From source file:lu.lippmann.cdb.ext.hydviga.gaps.GapFillerClassifier.java

License:Open Source License

/**
 * {@inheritDoc}//from  w w  w. ja  v  a 2  s  . c om
 */
@Override
Instances fillGaps0(final Instances ds) throws Exception {
    final Instances newds = WekaDataProcessingUtil.buildDataSetWithoutConstantAttributes(ds);

    final int attrWithMissingIdx = WekaDataStatsUtil.getFirstAttributeWithMissingValue(newds);
    if (attrWithMissingIdx == -1)
        throw new IllegalStateException();

    final Instances trainingSet = new Instances(newds, 0);
    for (int i = 0; i < newds.numInstances(); i++) {
        if (!newds.instance(i).hasMissingValue())
            trainingSet.add(newds.instance(i));
    }
    //System.out.println(trainingSet);      
    trainingSet.setClassIndex(attrWithMissingIdx);

    //System.out.println("Training (size="+trainingSet.numInstances()+") ...");      
    this.classifier.buildClassifier(trainingSet);
    //System.out.println("... trained!");

    newds.setClassIndex(attrWithMissingIdx);
    for (int i = 0; i < newds.numInstances(); i++) {
        if (newds.instance(i).isMissing(attrWithMissingIdx)) {
            final Instance newrecord = new DenseInstance(newds.instance(i));
            newrecord.setDataset(newds);
            final double newval = this.classifier.classifyInstance(newrecord);
            newds.instance(i).setValue(attrWithMissingIdx, newval);
        }
    }

    //System.out.println("initial -> "+ds.toSummaryString());
    //System.out.println("corrected -> "+newds.toSummaryString());

    this.model = this.classifier.toString();

    return newds;
}

From source file:mao.datamining.RemoveUselessColumnsByMissingValues.java

License:Open Source License

/**
 * Input an instance for filtering.//www.ja  v  a2s.c o m
 *
 * @param instance the input instance
 * @return true if the filtered instance may now be
 * collected with output().
 */
public boolean input(Instance instance) {

    if (getInputFormat() == null) {
        throw new IllegalStateException("No input instance format defined");
    }
    if (m_NewBatch) {
        resetQueue();
        m_NewBatch = false;
    }
    if (m_removeFilter != null) {
        m_removeFilter.input(instance);
        Instance processed = m_removeFilter.output();
        processed.setDataset(getOutputFormat());
        copyValues(processed, false, instance.dataset(), getOutputFormat());
        push(processed);
        return true;
    }
    bufferInput(instance);
    return false;
}

From source file:mao.datamining.RemoveUselessColumnsByMissingValues.java

License:Open Source License

/**
 * Signify that this batch of input to the filter is finished.
 *
 * @return true if there are instances pending output
 * @throws Exception if no input format defined
 *//*from w  ww.ja  va 2 s . co  m*/
public boolean batchFinished() throws Exception {

    if (getInputFormat() == null) {
        throw new IllegalStateException("No input instance format defined");
    }
    if (m_removeFilter == null) {

        // establish attributes to remove from first batch

        Instances toFilter = getInputFormat();
        int[] attsToDelete = new int[toFilter.numAttributes()];
        int numToDelete = 0;
        for (int i = 0; i < toFilter.numAttributes(); i++) {
            if (i == toFilter.classIndex())
                continue; // skip class
            AttributeStats stats = toFilter.attributeStats(i);

            //remove those attributes who has high ratio of missing values
            if ((stats.missingCount * 100) / stats.totalCount > m_maxMissingPercentage) {
                //            System.out.println("stats.missingPercentage: " + (stats.missingCount*100)/stats.totalCount+"%");            
                attsToDelete[numToDelete++] = i;
            }
            //remove those columns defined in the list by manual check
            if (this.column2DeleteSet.contains(toFilter.attribute(i).name())) {
                attsToDelete[numToDelete++] = i;
            }
        }

        int[] finalAttsToDelete = new int[numToDelete];
        System.arraycopy(attsToDelete, 0, finalAttsToDelete, 0, numToDelete);

        m_removeFilter = new Remove();
        m_removeFilter.setAttributeIndicesArray(finalAttsToDelete);
        m_removeFilter.setInvertSelection(false);
        m_removeFilter.setInputFormat(toFilter);

        for (int i = 0; i < toFilter.numInstances(); i++) {
            m_removeFilter.input(toFilter.instance(i));
        }
        m_removeFilter.batchFinished();

        Instance processed;
        Instances outputDataset = m_removeFilter.getOutputFormat();

        // restore old relation name to hide attribute filter stamp
        outputDataset.setRelationName(toFilter.relationName());

        setOutputFormat(outputDataset);
        while ((processed = m_removeFilter.output()) != null) {
            processed.setDataset(outputDataset);
            push(processed);
        }
    }
    flushInput();

    m_NewBatch = true;
    return (numPendingOutput() != 0);
}

From source file:marytts.tools.newlanguage.LTSTrainer.java

License:Open Source License

/**
 * Train the tree, using binary decision nodes.
 * //  w  w  w  .java2 s  .co  m
 * @param minLeafData
 *            the minimum number of instances that have to occur in at least two subsets induced by split
 * @return bigTree
 * @throws IOException
 *             IOException
 */
public CART trainTree(int minLeafData) throws IOException {

    Map<String, List<String[]>> grapheme2align = new HashMap<String, List<String[]>>();
    for (String gr : this.graphemeSet) {
        grapheme2align.put(gr, new ArrayList<String[]>());
    }

    Set<String> phChains = new HashSet<String>();

    // for every alignment pair collect counts
    for (int i = 0; i < this.inSplit.size(); i++) {

        StringPair[] alignment = this.getAlignment(i);

        for (int inNr = 0; inNr < alignment.length; inNr++) {

            // System.err.println(alignment[inNr]);

            // quotation signs needed to represent empty string
            String outAlNr = "'" + alignment[inNr].getString2() + "'";

            // TODO: don't consider alignments to more than three characters
            if (outAlNr.length() > 5)
                continue;

            phChains.add(outAlNr);

            // storing context and target
            String[] datapoint = new String[2 * context + 2];

            for (int ct = 0; ct < 2 * context + 1; ct++) {
                int pos = inNr - context + ct;

                if (pos >= 0 && pos < alignment.length) {
                    datapoint[ct] = alignment[pos].getString1();
                } else {
                    datapoint[ct] = "null";
                }

            }

            // set target
            datapoint[2 * context + 1] = outAlNr;

            // add datapoint
            grapheme2align.get(alignment[inNr].getString1()).add(datapoint);
        }
    }

    // for conversion need feature definition file
    FeatureDefinition fd = this.graphemeFeatureDef(phChains);

    int centerGrapheme = fd.getFeatureIndex("att" + (context + 1));

    List<CART> stl = new ArrayList<CART>(fd.getNumberOfValues(centerGrapheme));

    for (String gr : fd.getPossibleValues(centerGrapheme)) {
        System.out.println("      Training decision tree for: " + gr);
        logger.debug("      Training decision tree for: " + gr);

        ArrayList<Attribute> attributeDeclarations = new ArrayList<Attribute>();

        // attributes with values
        for (int att = 1; att <= context * 2 + 1; att++) {

            // ...collect possible values
            ArrayList<String> attVals = new ArrayList<String>();

            String featureName = "att" + att;

            for (String usableGrapheme : fd.getPossibleValues(fd.getFeatureIndex(featureName))) {
                attVals.add(usableGrapheme);
            }

            attributeDeclarations.add(new Attribute(featureName, attVals));
        }

        List<String[]> datapoints = grapheme2align.get(gr);

        // maybe training is faster with targets limited to grapheme
        Set<String> graphSpecPh = new HashSet<String>();
        for (String[] dp : datapoints) {
            graphSpecPh.add(dp[dp.length - 1]);
        }

        // targetattribute
        // ...collect possible values
        ArrayList<String> targetVals = new ArrayList<String>();
        for (String phc : graphSpecPh) {// todo: use either fd of phChains
            targetVals.add(phc);
        }
        attributeDeclarations.add(new Attribute(TrainedLTS.PREDICTED_STRING_FEATURENAME, targetVals));

        // now, create the dataset adding the datapoints
        Instances data = new Instances(gr, attributeDeclarations, 0);

        // datapoints
        for (String[] point : datapoints) {

            Instance currInst = new DenseInstance(data.numAttributes());
            currInst.setDataset(data);

            for (int i = 0; i < point.length; i++) {

                currInst.setValue(i, point[i]);
            }

            data.add(currInst);
        }

        // Make the last attribute be the class
        data.setClassIndex(data.numAttributes() - 1);

        // build the tree without using the J48 wrapper class
        // standard parameters are:
        // binary split selection with minimum x instances at the leaves, tree is pruned, confidenced value, subtree raising,
        // cleanup, don't collapse
        // Here is used a modifed version of C45PruneableClassifierTree that allow using Unary Classes (see Issue #51)
        C45PruneableClassifierTree decisionTree;
        try {
            decisionTree = new C45PruneableClassifierTreeWithUnary(
                    new BinC45ModelSelection(minLeafData, data, true), true, 0.25f, true, true, false);
            decisionTree.buildClassifier(data);
        } catch (Exception e) {
            throw new RuntimeException("couldn't train decisiontree using weka: ", e);
        }

        CART maryTree = TreeConverter.c45toStringCART(decisionTree, fd, data);

        stl.add(maryTree);
    }

    DecisionNode.ByteDecisionNode rootNode = new DecisionNode.ByteDecisionNode(centerGrapheme, stl.size(), fd);
    for (CART st : stl) {
        rootNode.addDaughter(st.getRootNode());
    }

    Properties props = new Properties();
    props.setProperty("lowercase", String.valueOf(convertToLowercase));
    props.setProperty("stress", String.valueOf(considerStress));
    props.setProperty("context", String.valueOf(context));

    CART bigTree = new CART(rootNode, fd, props);

    return bigTree;
}

From source file:marytts.tools.voiceimport.PauseDurationTrainer.java

License:Open Source License

private Instance createInstance(Instances data, FeatureDefinition fd, FeatureVector fv) {
    // relevant features + one target
    Instance currInst = new DenseInstance(data.numAttributes());
    currInst.setDataset(data);

    // read only relevant features
    for (String attName : this.featureNames) {
        int featNr = fd.getFeatureIndex(attName);

        String value = fv.getFeatureAsString(featNr, fd);
        currInst.setValue(data.attribute(attName), value);
    }/*from  ww w  .  ja  v  a 2 s.  com*/

    return currInst;
}

From source file:matres.MatResUI.java

private void doClassification() {
    J48 m_treeResiko;//from w  ww  .  j  a  v  a  2  s.  com
    J48 m_treeAksi;
    NaiveBayes m_nbResiko;
    NaiveBayes m_nbAksi;
    FastVector m_fvInstanceRisks;
    FastVector m_fvInstanceActions;

    InputStream isRiskTree = getClass().getResourceAsStream("data/ResikoTree.model");
    InputStream isRiskNB = getClass().getResourceAsStream("data/ResikoNB.model");
    InputStream isActionTree = getClass().getResourceAsStream("data/AksiTree.model");
    InputStream isActionNB = getClass().getResourceAsStream("data/AksiNB.model");

    m_treeResiko = new J48();
    m_treeAksi = new J48();
    m_nbResiko = new NaiveBayes();
    m_nbAksi = new NaiveBayes();
    try {
        //m_treeResiko = (J48) weka.core.SerializationHelper.read("ResikoTree.model");
        m_treeResiko = (J48) weka.core.SerializationHelper.read(isRiskTree);
        //m_nbResiko = (NaiveBayes) weka.core.SerializationHelper.read("ResikoNB.model");
        m_nbResiko = (NaiveBayes) weka.core.SerializationHelper.read(isRiskNB);
        //m_treeAksi = (J48) weka.core.SerializationHelper.read("AksiTree.model");
        m_treeAksi = (J48) weka.core.SerializationHelper.read(isActionTree);
        //m_nbAksi = (NaiveBayes) weka.core.SerializationHelper.read("AksiNB.model");
        m_nbAksi = (NaiveBayes) weka.core.SerializationHelper.read(isActionNB);
    } catch (Exception ex) {
        Logger.getLogger(MatResUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    System.out.println("Setting up an Instance...");
    // Values for LIKELIHOOD OF OCCURRENCE
    FastVector fvLO = new FastVector(5);
    fvLO.addElement("> 10 in 1 year");
    fvLO.addElement("1 - 10 in 1 year");
    fvLO.addElement("1 in 1 year to 1 in 10 years");
    fvLO.addElement("1 in 10 years to 1 in 100 years");
    fvLO.addElement("1 in more than 100 years");
    // Values for SAFETY
    FastVector fvSafety = new FastVector(5);
    fvSafety.addElement("near miss");
    fvSafety.addElement("first aid injury, medical aid injury");
    fvSafety.addElement("lost time injury / temporary disability");
    fvSafety.addElement("permanent disability");
    fvSafety.addElement("fatality");
    // Values for EXTRA FUEL COST
    FastVector fvEFC = new FastVector(5);
    fvEFC.addElement("< 100 million rupiah");
    fvEFC.addElement("0,1 - 1 billion rupiah");
    fvEFC.addElement("1 - 10 billion rupiah");
    fvEFC.addElement("10 - 100  billion rupiah");
    fvEFC.addElement("> 100 billion rupiah");
    // Values for SYSTEM RELIABILITY
    FastVector fvSR = new FastVector(5);
    fvSR.addElement("< 100 MWh");
    fvSR.addElement("0,1 - 1 GWh");
    fvSR.addElement("1 - 10 GWh");
    fvSR.addElement("10 - 100 GWh");
    fvSR.addElement("> 100 GWh");
    // Values for EQUIPMENT COST
    FastVector fvEC = new FastVector(5);
    fvEC.addElement("< 50 million rupiah");
    fvEC.addElement("50 - 500 million rupiah");
    fvEC.addElement("0,5 - 5 billion rupiah");
    fvEC.addElement("5 -50 billion rupiah");
    fvEC.addElement("> 50 billion rupiah");
    // Values for CUSTOMER SATISFACTION SOCIAL FACTOR
    FastVector fvCSSF = new FastVector(5);
    fvCSSF.addElement("Complaint from the VIP customer");
    fvCSSF.addElement("Complaint from industrial customer");
    fvCSSF.addElement("Complaint from community");
    fvCSSF.addElement("Complaint from community that have potential riot");
    fvCSSF.addElement("High potential riot");
    // Values for RISK
    FastVector fvRisk = new FastVector(4);
    fvRisk.addElement("Low");
    fvRisk.addElement("Moderate");
    fvRisk.addElement("High");
    fvRisk.addElement("Extreme");
    // Values for ACTION
    FastVector fvAction = new FastVector(3);
    fvAction.addElement("Life Extension Program");
    fvAction.addElement("Repair/Refurbish");
    fvAction.addElement("Replace/Run to Fail + Investment");

    // Defining Attributes, including Class(es) Attributes
    Attribute attrLO = new Attribute("LO", fvLO);
    Attribute attrSafety = new Attribute("Safety", fvSafety);
    Attribute attrEFC = new Attribute("EFC", fvEFC);
    Attribute attrSR = new Attribute("SR", fvSR);
    Attribute attrEC = new Attribute("EC", fvEC);
    Attribute attrCSSF = new Attribute("CSSF", fvCSSF);
    Attribute attrRisk = new Attribute("Risk", fvRisk);
    Attribute attrAction = new Attribute("Action", fvAction);

    m_fvInstanceRisks = new FastVector(7);
    m_fvInstanceRisks.addElement(attrLO);
    m_fvInstanceRisks.addElement(attrSafety);
    m_fvInstanceRisks.addElement(attrEFC);
    m_fvInstanceRisks.addElement(attrSR);
    m_fvInstanceRisks.addElement(attrEC);
    m_fvInstanceRisks.addElement(attrCSSF);
    m_fvInstanceRisks.addElement(attrRisk);

    m_fvInstanceActions = new FastVector(7);
    m_fvInstanceActions.addElement(attrLO);
    m_fvInstanceActions.addElement(attrSafety);
    m_fvInstanceActions.addElement(attrEFC);
    m_fvInstanceActions.addElement(attrSR);
    m_fvInstanceActions.addElement(attrEC);
    m_fvInstanceActions.addElement(attrCSSF);
    m_fvInstanceActions.addElement(attrAction);

    Instances dataRisk = new Instances("A-Risk-instance-to-classify", m_fvInstanceRisks, 0);
    Instances dataAction = new Instances("An-Action-instance-to-classify", m_fvInstanceActions, 0);
    double[] riskValues = new double[dataRisk.numAttributes()];
    double[] actionValues = new double[dataRisk.numAttributes()];

    String strLO = (String) m_cmbLO.getSelectedItem();
    String strSafety = (String) m_cmbSafety.getSelectedItem();
    String strEFC = (String) m_cmbEFC.getSelectedItem();
    String strSR = (String) m_cmbSR.getSelectedItem();
    String strEC = (String) m_cmbEC.getSelectedItem();
    String strCSSF = (String) m_cmbCSSF.getSelectedItem();

    Instance instRisk = new DenseInstance(7);
    Instance instAction = new DenseInstance(7);

    if (strLO.equals("-- none --")) {
        instRisk.setMissing(0);
        instAction.setMissing(0);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(0), strLO);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(0), strLO);
    }
    if (strSafety.equals("-- none --")) {
        instRisk.setMissing(1);
        instAction.setMissing(1);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(1), strSafety);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(1), strSafety);
    }
    if (strEFC.equals("-- none --")) {
        instRisk.setMissing(2);
        instAction.setMissing(2);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(2), strEFC);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(2), strEFC);
    }
    if (strSR.equals("-- none --")) {
        instRisk.setMissing(3);
        instAction.setMissing(3);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(3), strSR);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(3), strSR);
    }
    if (strEC.equals("-- none --")) {
        instRisk.setMissing(4);
        instAction.setMissing(4);
    } else {
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(4), strEC);
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(4), strEC);
    }
    if (strCSSF.equals("-- none --")) {
        instRisk.setMissing(5);
        instAction.setMissing(5);
    } else {
        instAction.setValue((Attribute) m_fvInstanceActions.elementAt(5), strCSSF);
        instRisk.setValue((Attribute) m_fvInstanceRisks.elementAt(5), strCSSF);
    }
    instRisk.setMissing(6);
    instAction.setMissing(6);

    dataRisk.add(instRisk);
    instRisk.setDataset(dataRisk);
    dataRisk.setClassIndex(dataRisk.numAttributes() - 1);

    dataAction.add(instAction);
    instAction.setDataset(dataAction);
    dataAction.setClassIndex(dataAction.numAttributes() - 1);

    System.out.println("Instance Resiko: " + dataRisk.instance(0));
    System.out.println("\tNum Attributes : " + dataRisk.numAttributes());
    System.out.println("\tNum instances  : " + dataRisk.numInstances());
    System.out.println("Instance Action: " + dataAction.instance(0));
    System.out.println("\tNum Attributes : " + dataAction.numAttributes());
    System.out.println("\tNum instances  : " + dataAction.numInstances());

    int classIndexRisk = 0;
    int classIndexAction = 0;
    String strClassRisk = null;
    String strClassAction = null;

    try {
        //classIndexRisk = (int) m_treeResiko.classifyInstance(dataRisk.instance(0));
        classIndexRisk = (int) m_treeResiko.classifyInstance(instRisk);
        classIndexAction = (int) m_treeAksi.classifyInstance(instAction);
    } catch (Exception ex) {
        Logger.getLogger(MatResUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    strClassRisk = (String) fvRisk.elementAt(classIndexRisk);
    strClassAction = (String) fvAction.elementAt(classIndexAction);
    System.out.println("[Risk  Class Index: " + classIndexRisk + " Class Label: " + strClassRisk + "]");
    System.out.println("[Action  Class Index: " + classIndexAction + " Class Label: " + strClassAction + "]");
    if (strClassRisk != null) {
        m_txtRisk.setText(strClassRisk);
    }

    double[] riskDist = null;
    double[] actionDist = null;
    try {
        riskDist = m_nbResiko.distributionForInstance(dataRisk.instance(0));
        actionDist = m_nbAksi.distributionForInstance(dataAction.instance(0));
        String strProb;
        // set up RISK progress bars
        m_jBarRiskLow.setValue((int) (100 * riskDist[0]));
        m_jBarRiskLow.setString(String.format("%6.3f%%", 100 * riskDist[0]));
        m_jBarRiskModerate.setValue((int) (100 * riskDist[1]));
        m_jBarRiskModerate.setString(String.format("%6.3f%%", 100 * riskDist[1]));
        m_jBarRiskHigh.setValue((int) (100 * riskDist[2]));
        m_jBarRiskHigh.setString(String.format("%6.3f%%", 100 * riskDist[2]));
        m_jBarRiskExtreme.setValue((int) (100 * riskDist[3]));
        m_jBarRiskExtreme.setString(String.format("%6.3f%%", 100 * riskDist[3]));
    } catch (Exception ex) {
        Logger.getLogger(MatResUI.class.getName()).log(Level.SEVERE, null, ex);
    }

    double predictedProb = 0.0;
    String predictedClass = "";

    // Loop over all the prediction labels in the distribution.
    for (int predictionDistributionIndex = 0; predictionDistributionIndex < riskDist.length; predictionDistributionIndex++) {
        // Get this distribution index's class label.
        String predictionDistributionIndexAsClassLabel = dataRisk.classAttribute()
                .value(predictionDistributionIndex);
        int classIndex = dataRisk.classAttribute().indexOfValue(predictionDistributionIndexAsClassLabel);
        // Get the probability.
        double predictionProbability = riskDist[predictionDistributionIndex];

        if (predictionProbability > predictedProb) {
            predictedProb = predictionProbability;
            predictedClass = predictionDistributionIndexAsClassLabel;
        }

        System.out.printf("[%2d %10s : %6.3f]", classIndex, predictionDistributionIndexAsClassLabel,
                predictionProbability);
    }
    m_txtRiskNB.setText(predictedClass);
}

From source file:meka.classifiers.multilabel.cc.CNode.java

License:Open Source License

/**
 * Transform - turn [y1,y2,y3,x1,x2] into [y1,y2,x1,x2].
 * @return transformed Instance//from   w  w w .  j  a v  a  2 s .c o  m
 */
public Instance transform(Instance x, double ypred[]) throws Exception {
    x = (Instance) x.copy();
    int L = x.classIndex();
    int L_c = (paY.length + 1);
    x.setDataset(null);
    for (int j = 0; j < (L - L_c); j++) {
        x.deleteAttributeAt(0);
    }
    for (int pa : paY) {
        //System.out.println("x_["+map[pa]+"] <- "+ypred[pa]);
        x.setValue(map[pa], ypred[pa]);
    }
    x.setDataset(T);
    x.setClassMissing();
    return x;
}