Example usage for weka.core Instances numAttributes

List of usage examples for weka.core Instances numAttributes

Introduction

In this page you can find the example usage for weka.core Instances numAttributes.

Prototype


publicint numAttributes() 

Source Link

Document

Returns the number of attributes.

Usage

From source file:adams.flow.transformer.WekaInstancesMerge.java

License:Open Source License

/**
 * Prefixes the attributes.//w  w w  .  java  2s .  c  o  m
 *
 * @param index   the index of the dataset
 * @param inst   the data to process
 * @return      the processed data
 */
protected Instances prefixAttributes(Instances inst, int index) {
    Instances result;
    String prefix;
    ArrayList<Attribute> atts;
    int i;

    prefix = createPrefix(inst, index);

    // header
    atts = new ArrayList<>();
    for (i = 0; i < inst.numAttributes(); i++)
        atts.add(inst.attribute(i).copy(prefix + inst.attribute(i).name()));

    // data
    result = new Instances(inst.relationName(), atts, inst.numInstances());
    result.setClassIndex(inst.classIndex());
    for (i = 0; i < inst.numInstances(); i++)
        result.add((Instance) inst.instance(i).copy());

    return result;
}

From source file:adams.flow.transformer.WekaInstancesMerge.java

License:Open Source License

/**
 * Merges the datasets based on the collected IDs.
 *
 * @param orig   the original datasets//from w  w w  .ja  v a 2 s  . c o  m
 * @param inst   the processed datasets to merge into one
 * @param ids      the IDs for identifying the rows
 * @return      the merged dataset
 */
protected Instances merge(Instances[] orig, Instances[] inst, HashSet ids) {
    Instances result;
    ArrayList<Attribute> atts;
    int i;
    int n;
    int m;
    int index;
    String relation;
    List sortedIDs;
    Attribute att;
    int[] indexStart;
    double value;
    double[] values;
    HashMap<Integer, Integer> hashmap;
    HashSet<Instance> hs;

    // create header
    if (isLoggingEnabled())
        getLogger().info("Creating merged header...");
    atts = new ArrayList<>();
    relation = "";
    indexStart = new int[inst.length];
    for (i = 0; i < inst.length; i++) {
        indexStart[i] = atts.size();
        for (n = 0; n < inst[i].numAttributes(); n++)
            atts.add((Attribute) inst[i].attribute(n).copy());
        // assemble relation name
        if (i > 0)
            relation += "_";
        relation += inst[i].relationName();
    }
    result = new Instances(relation, atts, ids.size());

    // fill with missing values
    if (isLoggingEnabled())
        getLogger().info("Filling with missing values...");
    for (i = 0; i < ids.size(); i++) {
        if (isStopped())
            return null;
        // progress
        if (isLoggingEnabled() && ((i + 1) % 1000 == 0))
            getLogger().info("" + (i + 1));
        result.add(new DenseInstance(result.numAttributes()));
    }

    // sort IDs
    if (isLoggingEnabled())
        getLogger().info("Sorting indices...");
    sortedIDs = new ArrayList(ids);
    Collections.sort(sortedIDs);

    // generate rows
    hashmap = new HashMap<>();
    for (i = 0; i < inst.length; i++) {
        if (isStopped())
            return null;
        if (isLoggingEnabled())
            getLogger().info("Adding file #" + (i + 1));
        att = orig[i].attribute(m_UniqueID);
        for (n = 0; n < inst[i].numInstances(); n++) {
            // progress
            if (isLoggingEnabled() && ((n + 1) % 1000 == 0))
                getLogger().info("" + (n + 1));

            // determine index of row
            if (m_AttType == Attribute.NUMERIC)
                index = Collections.binarySearch(sortedIDs, inst[i].instance(n).value(att));
            else
                index = Collections.binarySearch(sortedIDs, inst[i].instance(n).stringValue(att));
            if (index < 0)
                throw new IllegalStateException(
                        "Failed to determine index for row #" + (n + 1) + " of dataset #" + (i + 1) + "!");

            if (!hashmap.containsKey(index))
                hashmap.put(index, 0);
            hashmap.put(index, hashmap.get(index) + 1);

            // use internal representation for faster access
            values = result.instance(index).toDoubleArray();

            // add attribute values
            for (m = 0; m < inst[i].numAttributes(); m++) {
                // missing value?
                if (inst[i].instance(n).isMissing(m))
                    continue;

                switch (inst[i].attribute(m).type()) {
                case Attribute.NUMERIC:
                case Attribute.DATE:
                case Attribute.NOMINAL:
                    values[indexStart[i] + m] = inst[i].instance(n).value(m);
                    break;

                case Attribute.STRING:
                    value = result.attribute(indexStart[i] + m)
                            .addStringValue(inst[i].instance(n).stringValue(m));
                    values[indexStart[i] + m] = value;
                    break;

                case Attribute.RELATIONAL:
                    value = result.attribute(indexStart[i] + m)
                            .addRelation(inst[i].instance(n).relationalValue(m));
                    values[indexStart[i] + m] = value;
                    break;

                default:
                    throw new IllegalStateException("Unhandled attribute type: " + inst[i].attribute(m).type());
                }
            }

            // update row
            result.set(index, new DenseInstance(1.0, values));
        }
    }

    if (getRemove()) {
        hs = new HashSet<>();
        for (Integer x : hashmap.keySet()) {
            if (hashmap.get(x) != inst.length)
                hs.add(result.get(x));
        }
        result.removeAll(hs);
    }

    return result;
}

From source file:adams.flow.transformer.WekaInstancesStatistic.java

License:Open Source License

/**
 * Executes the flow item.//from  w ww  . ja  v a  2s .com
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    SpreadSheet sheet;
    Instances data;
    int i;
    int n;
    Index index;
    AbstractArrayStatistic stat;

    result = null;

    try {
        sheet = null;
        data = (Instances) m_InputToken.getPayload();
        stat = m_Statistic.shallowCopy(true);

        for (i = 0; i < m_Locations.length; i++) {
            switch (m_DataType) {
            case ROW_BY_INDEX:
                index = new Index(m_Locations[i].stringValue());
                index.setMax(data.numInstances());
                stat.add(StatUtils.toNumberArray(data.instance(index.getIntIndex()).toDoubleArray()));
                break;

            case COLUMN_BY_INDEX:
                index = new WekaAttributeIndex(m_Locations[i].stringValue());
                ((WekaAttributeIndex) index).setData(data);
                stat.add(StatUtils.toNumberArray(data.attributeToDoubleArray(index.getIntIndex())));
                break;

            case COLUMN_BY_REGEXP:
                for (n = 0; n < data.numAttributes(); n++) {
                    if (data.attribute(n).name().matches(m_Locations[i].stringValue())) {
                        stat.add(StatUtils.toNumberArray(data.attributeToDoubleArray(n)));
                        break;
                    }
                }
                break;

            default:
                throw new IllegalStateException("Unhandled data type: " + m_DataType);
            }
        }

        sheet = stat.calculate().toSpreadSheet();
    } catch (Exception e) {
        result = handleException("Error generating the statistic: ", e);
        sheet = null;
    }

    if (sheet != null)
        m_OutputToken = new Token(sheet);

    return result;
}

From source file:adams.flow.transformer.WekaMultiLabelSplitter.java

License:Open Source License

/**
 * Returns the generated token.//  w  w  w .  j  a v  a 2s. com
 *
 * @return      the generated token
 */
@Override
public Token output() {
    Token result;
    int index;
    Remove remove;
    Reorder reorder;
    StringBuilder indices;
    int i;
    int newIndex;
    Instances processed;

    result = null;

    index = m_AttributesToProcess.remove(0);
    remove = new Remove();
    indices = new StringBuilder();
    for (i = 0; i < m_ClassAttributes.size(); i++) {
        if (m_ClassAttributes.get(i) == index)
            continue;
        if (indices.length() > 0)
            indices.append(",");
        indices.append("" + (m_ClassAttributes.get(i) + 1));
    }
    remove.setAttributeIndices(indices.toString());

    try {
        remove.setInputFormat(m_Dataset);
        processed = weka.filters.Filter.useFilter(m_Dataset, remove);
        if (m_UpdateRelationName)
            processed.setRelationName(m_Dataset.attribute(index).name());
        result = new Token(processed);
    } catch (Exception e) {
        processed = null;
        handleException(
                "Failed to process dataset with following filter setup:\n" + OptionUtils.getCommandLine(remove),
                e);
    }

    if (m_MakeClassLast && (processed != null)) {
        newIndex = processed.attribute(m_Dataset.attribute(index).name()).index();
        indices = new StringBuilder();
        for (i = 0; i < processed.numAttributes(); i++) {
            if (i == newIndex)
                continue;
            if (indices.length() > 0)
                indices.append(",");
            indices.append("" + (i + 1));
        }
        if (indices.length() > 0)
            indices.append(",");
        indices.append("" + (newIndex + 1));
        reorder = new Reorder();
        try {
            reorder.setAttributeIndices(indices.toString());
            reorder.setInputFormat(processed);
            processed = weka.filters.Filter.useFilter(processed, reorder);
            if (m_UpdateRelationName)
                processed.setRelationName(m_Dataset.attribute(index).name());
            result = new Token(processed);
        } catch (Exception e) {
            handleException("Failed to process dataset with following filter setup:\n"
                    + OptionUtils.getCommandLine(reorder), e);
        }
    }

    return result;
}

From source file:adams.flow.transformer.WekaNewInstance.java

License:Open Source License

/**
 * Executes the flow item.//from  w  w  w .  j  a  va  2  s.  c o m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances data;
    Instance inst;
    Class cls;
    Constructor constr;

    result = null;

    data = (Instances) m_InputToken.getPayload();

    try {
        cls = m_InstanceClass.classValue();
        constr = cls.getConstructor(new Class[] { Integer.TYPE });
        inst = (Instance) constr.newInstance(new Object[] { data.numAttributes() });
        inst.setDataset(data);
        m_OutputToken = new Token(inst);
    } catch (Exception e) {
        result = handleException("Failed to create new instance: ", e);
    }

    return result;
}

From source file:adams.flow.transformer.WekaPredictionsToInstances.java

License:Open Source License

/**
 * Executes the flow item./*  w w  w  .  j  ava 2  s  . c  o m*/
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Evaluation eval;
    int i;
    int n;
    int indexErr;
    int indexProb;
    int indexDist;
    int indexWeight;
    boolean nominal;
    Instances header;
    ArrayList<Attribute> atts;
    ArrayList<String> values;
    ArrayList<Prediction> predictions;
    Prediction pred;
    double[] vals;
    Instances data;
    Instances testData;
    int[] indices;

    result = null;

    if (m_InputToken.getPayload() instanceof WekaEvaluationContainer) {
        eval = (Evaluation) ((WekaEvaluationContainer) m_InputToken.getPayload())
                .getValue(WekaEvaluationContainer.VALUE_EVALUATION);
        indices = (int[]) ((WekaEvaluationContainer) m_InputToken.getPayload())
                .getValue(WekaEvaluationContainer.VALUE_ORIGINALINDICES);
        testData = (Instances) ((WekaEvaluationContainer) m_InputToken.getPayload())
                .getValue(WekaEvaluationContainer.VALUE_TESTDATA);
    } else {
        eval = (Evaluation) m_InputToken.getPayload();
        indices = null;
        testData = null;
    }
    header = eval.getHeader();
    nominal = header.classAttribute().isNominal();
    predictions = eval.predictions();

    if (predictions != null) {
        // create header
        atts = new ArrayList<>();
        // actual
        if (nominal && m_AddLabelIndex) {
            values = new ArrayList<>();
            for (i = 0; i < header.classAttribute().numValues(); i++)
                values.add((i + 1) + ":" + header.classAttribute().value(i));
            atts.add(new Attribute(m_MeasuresPrefix + "Actual", values));
        } else {
            atts.add(header.classAttribute().copy(m_MeasuresPrefix + "Actual"));
        }
        // predicted
        if (nominal && m_AddLabelIndex) {
            values = new ArrayList<>();
            for (i = 0; i < header.classAttribute().numValues(); i++)
                values.add((i + 1) + ":" + header.classAttribute().value(i));
            atts.add(new Attribute(m_MeasuresPrefix + "Predicted", values));
        } else {
            atts.add(header.classAttribute().copy(m_MeasuresPrefix + "Predicted"));
        }
        // error
        indexErr = -1;
        if (m_ShowError) {
            indexErr = atts.size();
            if (nominal) {
                values = new ArrayList<>();
                values.add("n");
                values.add("y");
                atts.add(new Attribute(m_MeasuresPrefix + "Error", values));
            } else {
                atts.add(new Attribute(m_MeasuresPrefix + "Error"));
            }
        }
        // probability
        indexProb = -1;
        if (m_ShowProbability && nominal) {
            indexProb = atts.size();
            atts.add(new Attribute(m_MeasuresPrefix + "Probability"));
        }
        // distribution
        indexDist = -1;
        if (m_ShowDistribution && nominal) {
            indexDist = atts.size();
            for (n = 0; n < header.classAttribute().numValues(); n++)
                atts.add(new Attribute(
                        m_MeasuresPrefix + "Distribution (" + header.classAttribute().value(n) + ")"));
        }
        // weight
        indexWeight = -1;
        if (m_ShowWeight) {
            indexWeight = atts.size();
            atts.add(new Attribute(m_MeasuresPrefix + "Weight"));
        }

        data = new Instances("Predictions", atts, predictions.size());
        data.setClassIndex(1); // predicted

        // add data
        if ((indices != null) && m_UseOriginalIndices)
            predictions = CrossValidationHelper.alignPredictions(predictions, indices);
        for (i = 0; i < predictions.size(); i++) {
            pred = predictions.get(i);
            vals = new double[data.numAttributes()];
            // actual
            vals[0] = pred.actual();
            // predicted
            vals[1] = pred.predicted();
            // error
            if (m_ShowError) {
                if (nominal) {
                    vals[indexErr] = ((pred.actual() != pred.predicted()) ? 1.0 : 0.0);
                } else {
                    if (m_UseAbsoluteError)
                        vals[indexErr] = Math.abs(pred.actual() - pred.predicted());
                    else
                        vals[indexErr] = pred.actual() - pred.predicted();
                }
            }
            // probability
            if (m_ShowProbability && nominal) {
                vals[indexProb] = StatUtils.max(((NominalPrediction) pred).distribution());
            }
            // distribution
            if (m_ShowDistribution && nominal) {
                for (n = 0; n < header.classAttribute().numValues(); n++)
                    vals[indexDist + n] = ((NominalPrediction) pred).distribution()[n];
            }
            // weight
            if (m_ShowWeight) {
                vals[indexWeight] = pred.weight();
            }
            // add row
            data.add(new DenseInstance(1.0, vals));
        }

        // add test data?
        if ((testData != null) && !m_TestAttributes.isEmpty()) {
            testData = filterTestData(testData);
            if (testData != null)
                data = Instances.mergeInstances(data, testData);
        }

        // generate output token
        m_OutputToken = new Token(data);
    } else {
        getLogger().severe("No predictions available from Evaluation object!");
    }

    return result;
}

From source file:adams.flow.transformer.WekaRegexToRange.java

License:Open Source License

/**
 * Executes the flow item.//from   w  ww .java 2s  .c o m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    String range;
    Instances inst;

    result = null;
    range = "";

    if (m_InputToken.getPayload() instanceof Instances)
        inst = (Instances) m_InputToken.getPayload();
    else
        inst = ((Instance) m_InputToken.getPayload()).dataset();

    int firstInRange = Integer.MIN_VALUE;
    int lastInRange = Integer.MIN_VALUE;
    int last = Integer.MIN_VALUE;

    for (int i = 0; i < inst.numAttributes(); i++) {
        if (match(inst.attribute(i).name())) {
            if (i == last + 1) {
                lastInRange = i;
            } else {
                if (firstInRange != Integer.MIN_VALUE) {
                    if (!range.equals("")) {
                        range += ",";
                    }
                    if (firstInRange - lastInRange == 0) {
                        range += "" + (firstInRange + 1);
                    } else {
                        range += "" + (firstInRange + 1) + "-" + (lastInRange + 1);
                    }
                }

                firstInRange = i;
                lastInRange = i;
            }
            last = i;
        }
    }
    if (!range.equals("")) {
        range += ",";
    }
    if (firstInRange < 0) {
        range = "";
    } else if (lastInRange < 0 || lastInRange == firstInRange) {
        range += "" + (firstInRange + 1);
    } else {
        range += "" + (firstInRange + 1) + "-" + (lastInRange + 1);
    }

    m_OutputToken = new Token(range);

    return result;
}

From source file:adams.flow.transformer.WekaReorderAttributesToReference.java

License:Open Source License

/**
 * Executes the flow item.//from   ww  w  .  ja  va2 s . c o  m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances dataOld;
    Instance instOld;
    Instances dataNew;
    Instance instNew;
    Attribute att;
    int i;
    StringBuilder order;
    List<Add> adds;
    Add add;
    int index;
    StringBuilder labels;
    int n;
    List<Filter> filters;
    Reorder reorder;

    result = null;

    if (m_OnTheFly && (m_Reference == null)) {
        result = setUpReference();
        if (result != null)
            return result;
    }

    dataNew = null;
    instNew = null;

    // get input data
    if (m_InputToken.getPayload() instanceof Instance) {
        instOld = (Instance) m_InputToken.getPayload();
        dataOld = instOld.dataset();
    } else {
        instOld = null;
        dataOld = (Instances) m_InputToken.getPayload();
    }

    // do we need to initialize filter?
    if (m_InitializeOnce || (m_Reorder == null)) {
        // check incoming data
        if (!m_Lenient) {
            for (i = 0; i < m_Reference.numAttributes(); i++) {
                att = m_Reference.attribute(i);
                if (dataOld.attribute(att.name()) == null) {
                    if (result == null)
                        result = "Missing attribute(s) in incoming data: " + att.name();
                    else
                        result += ", " + att.name();
                }
            }
            if (result != null)
                getLogger().severe(result);
        }

        if (result == null) {
            try {
                // determine indices
                order = new StringBuilder();
                adds = new ArrayList<Add>();
                for (i = 0; i < m_Reference.numAttributes(); i++) {
                    att = m_Reference.attribute(i);
                    if (dataOld.attribute(att.name()) == null) {
                        index = dataOld.numAttributes() + adds.size();
                        add = new Add();
                        add.setAttributeIndex("last");
                        add.setAttributeName(att.name());
                        add.setAttributeType(new SelectedTag(att.type(), Add.TAGS_TYPE));
                        if (att.isNominal()) {
                            labels = new StringBuilder();
                            for (n = 0; n < att.numValues(); n++) {
                                if (labels.length() > 0)
                                    labels.append(",");
                                labels.append(att.value(n));
                            }
                            add.setNominalLabels(labels.toString());
                        }
                        adds.add(add);
                    } else {
                        index = dataOld.attribute(att.name()).index();
                    }
                    if (order.length() > 0)
                        order.append(",");
                    order.append((index + 1));
                }

                // build reorder filter
                reorder = new Reorder();
                reorder.setAttributeIndices(order.toString());

                // build multifilter
                filters = new ArrayList<Filter>();
                filters.addAll(adds);
                filters.add(reorder);
                m_Reorder = new MultiFilter();
                m_Reorder.setFilters(filters.toArray(new Filter[filters.size()]));

                // initialize filter
                m_Reorder.setInputFormat(dataOld);
            } catch (Exception e) {
                result = handleException("Failed to initialize reorder filter!", e);
            }
        }
    }

    // reorder data
    if (result == null) {
        try {
            if (instOld != null) {
                m_Reorder.input(instOld);
                m_Reorder.batchFinished();
                instNew = m_Reorder.output();
                if (m_KeepRelationName)
                    instNew.dataset().setRelationName(dataOld.relationName());
            } else {
                dataNew = Filter.useFilter(dataOld, m_Reorder);
                if (m_KeepRelationName)
                    dataNew.setRelationName(dataOld.relationName());
            }
        } catch (Exception e) {
            result = handleException("Failed to reorder data!", e);
            instNew = null;
            dataNew = null;
        }
    }

    if (instNew != null)
        m_OutputToken = new Token(instNew);
    else if (dataNew != null)
        m_OutputToken = new Token(dataNew);

    return result;
}

From source file:adams.gui.menu.CostCurve.java

License:Open Source License

/**
 * Launches the functionality of the menu item.
 *//* w  ww.  j a va  2s  .  com*/
@Override
public void launch() {
    File file;
    if (m_Parameters.length == 0) {
        // choose file
        int retVal = m_FileChooser.showOpenDialog(null);
        if (retVal != JFileChooser.APPROVE_OPTION)
            return;
        file = m_FileChooser.getSelectedFile();
    } else {
        file = new PlaceholderFile(m_Parameters[0]).getAbsoluteFile();
        m_FileChooser.setSelectedFile(file);
    }

    // create plot
    Instances result;
    try {
        result = m_FileChooser.getLoader().getDataSet();
    } catch (Exception e) {
        GUIHelper.showErrorMessage(getOwner(),
                "Error loading file '" + file + "':\n" + adams.core.Utils.throwableToString(e));
        return;
    }
    result.setClassIndex(result.numAttributes() - 1);
    ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
    PlotData2D plot = new PlotData2D(result);
    plot.setPlotName(result.relationName());
    plot.m_displayAllPoints = true;
    boolean[] connectPoints = new boolean[result.numInstances()];
    for (int cp = 1; cp < connectPoints.length; cp++)
        connectPoints[cp] = true;
    try {
        plot.setConnectPoints(connectPoints);
        vmc.addPlot(plot);
    } catch (Exception e) {
        GUIHelper.showErrorMessage(getOwner(), "Error adding plot:\n" + adams.core.Utils.throwableToString(e));
        return;
    }

    ChildFrame frame = createChildFrame(vmc, GUIHelper.getDefaultDialogDimension());
    frame.setTitle(frame.getTitle() + " - " + file);
}

From source file:adams.gui.menu.InstancesPlot.java

License:Open Source License

/**
 * Launches the functionality of the menu item.
 *//*from  w w w  .  ja  va  2 s .  c om*/
@Override
public void launch() {
    File file;
    AbstractFileLoader loader;
    if (m_Parameters.length == 0) {
        // choose file
        int retVal = m_FileChooser.showOpenDialog(getOwner());
        if (retVal != JFileChooser.APPROVE_OPTION)
            return;
        file = m_FileChooser.getSelectedFile();
        loader = m_FileChooser.getLoader();
    } else {
        file = new PlaceholderFile(m_Parameters[0]).getAbsoluteFile();
        loader = ConverterUtils.getLoaderForFile(file);
    }

    // build plot
    VisualizePanel panel = new VisualizePanel();
    getLogger().severe("Loading instances from " + file);
    try {
        loader.setFile(file);
        Instances i = loader.getDataSet();
        i.setClassIndex(i.numAttributes() - 1);
        PlotData2D pd1 = new PlotData2D(i);
        pd1.setPlotName("Master plot");
        panel.setMasterPlot(pd1);
    } catch (Exception e) {
        getLogger().log(Level.SEVERE, "Failed to load: " + file, e);
        GUIHelper.showErrorMessage(getOwner(),
                "Error loading file '" + file + "':\n" + Utils.throwableToString(e));
        return;
    }

    // create frame
    ChildFrame frame = createChildFrame(panel, GUIHelper.getDefaultDialogDimension());
    frame.setTitle(frame.getTitle() + " - " + file);
}