Example usage for weka.core Instances numAttributes

List of usage examples for weka.core Instances numAttributes

Introduction

In this page you can find the example usage for weka.core Instances numAttributes.

Prototype


publicint numAttributes() 

Source Link

Document

Returns the number of attributes.

Usage

From source file:clasificador.RedNeuronal.java

public void testing() {
    try {//from   ww w  .  ja  v a 2s . c om
        FileReader testReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\librotest.arff"));
        Instances testInstance = new Instances(testReader);
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        Evaluation evalTest = new Evaluation(testInstance);
        SerializedClassifier clasificador = new SerializedClassifier();
        clasificador.setModelFile(new File("TrainMLP.train"));
        //CLASIFICADOR ESTANDAR
        Classifier clasificadorEstandar = clasificador.getCurrentModel();
        evalTest.evaluateModel(clasificadorEstandar, testInstance);

        System.out.println(evalTest.toSummaryString("resultado:", false));
        System.out.println(evalTest.toMatrixString("*****************Matriz de confusion*******"));

        //vamos a predecir el numero q voy a usar       
        // evalTest.toMatrixString();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:clasificador.RedNeuronal.java

public void prediccion() {

    FileReader testReader = null;
    try {/*www  . j  a  va2  s .c om*/
        testReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro1.arff"));
        Instances testInstance = new Instances(testReader);
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        Evaluation evalTest = new Evaluation(testInstance);
        SerializedClassifier clasificador = new SerializedClassifier();
        clasificador.setModelFile(new File("TrainMLP.train"));
        //CLASIFICADOR ESTANDAR
        Classifier clasificadorEstandar = clasificador.getCurrentModel();
        evalTest.evaluateModel(clasificadorEstandar, testInstance);

        double[] valores = evalTest.evaluateModel(clasificadorEstandar, testInstance);

        for (int i = 0; i < valores.length; i++) {

            System.out.println("se predice:     " + valores[i] + "\n");
        }
    } catch (FileNotFoundException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            testReader.close();
        } catch (IOException ex) {
            Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

}

From source file:classification.classifiers.LDA.java

License:Open Source License

/**
 * Modification on Dr. Wolfgang Lenhard's code.
 * This was necessary because this classifier had to implements
 * "buildClassifier" and "classifyInstance" to be like a classifier of WEKA(R).
 * //w  w w  . j a  v a 2 s  .c o  m
 * @param data
 * @throws Exception
 */
public void buildClassifier(Instances data) throws Exception {
    int n = data.numInstances();
    int a = data.numAttributes();
    int k = data.numClasses();
    int[] g = new int[n];

    double[][] d = new double[n][a];
    for (int i = 0; i < n; i++) {
        double[] d_i = data.instance(i).toDoubleArray();
        d[i] = d_i;

        /**
         * To print the attribute with the correspondent double
         *
         * System.out.print("\n"); for(int j=0; j<a; j++){
         * System.out.print(data.instance(i).stringValue(data.attribute(j))
         * + " = ");
         * System.out.print(data.instance(i).value(data.attribute(j)) +
         * ";  "); } System.out.print("\n"); /
         **/
    }

    // Gives the number of objects belonging to class i in the trainingSet.
    int classIndex = a - 1;
    valueClass = new double[k];

    data.setClassIndex(classIndex);

    for (int i = 0; i < k; i++) {
        // Reference class
        String refClass = data.classAttribute().value(i);
        //
        // System.out.println("refClass: " + refClass + " ");

        for (int j = 0; j < n; j++) {
            // Object class
            String objectClass = data.instance(j).stringValue(classIndex);
            //
            // System.out.println("objectClass: " + objectClass + " - value:
            // " + data.instance(j).value(data.attribute(classIndex)));

            // Building two vectors of classes, one in int format and
            // another in double format.
            if (objectClass == refClass) {

                // Object class as a double
                valueClass[i] = data.instance(j).value(data.attribute(classIndex));
                // Object class as an int
                g[j] = i;

                //
                // System.out.println("value of class (int): " + g[j] + "
                // ___ value (double): " + valueClass[i]);
            }
        }

    }

    this.BuildLDA(d, g, true);
}

From source file:classifier.CustomStringToWordVector.java

License:Open Source License

/**
 * Sets the format of the input instances.
 * /*  ww  w .java2  s.co m*/
 * @param instanceInfo
 *            an Instances object containing the input instance structure
 *            (any instances contained in the object are ignored - only the
 *            structure is required).
 * @return true if the outputFormat may be collected immediately
 * @throws Exception
 *             if the input format can't be set successfully
 */
public boolean setInputFormat(Instances instanceInfo) throws Exception {

    super.setInputFormat(instanceInfo);
    m_SelectedRange.setUpper(instanceInfo.numAttributes() - 1);
    m_AvgDocLength = -1;
    m_NumInstances = -1;
    return false;
}

From source file:classifier.CustomStringToWordVector.java

License:Open Source License

/**
 * determines the selected range./*  w w  w  . j  a va 2s. c  o  m*/
 */
private void determineSelectedRange() {

    Instances inputFormat = getInputFormat();

    // Calculate the default set of fields to convert
    if (m_SelectedRange == null) {
        StringBuffer fields = new StringBuffer();
        for (int j = 0; j < inputFormat.numAttributes(); j++) {
            if (inputFormat.attribute(j).type() == Attribute.STRING)
                fields.append((j + 1) + ",");
        }
        m_SelectedRange = new Range(fields.toString());
    }
    m_SelectedRange.setUpper(inputFormat.numAttributes() - 1);

    // Prevent the user from converting non-string fields
    StringBuffer fields = new StringBuffer();
    for (int j = 0; j < inputFormat.numAttributes(); j++) {
        if (m_SelectedRange.isInRange(j) && inputFormat.attribute(j).type() == Attribute.STRING)
            fields.append((j + 1) + ",");
    }
    m_SelectedRange.setRanges(fields.toString());
    m_SelectedRange.setUpper(inputFormat.numAttributes() - 1);

    // System.err.println("Selected Range: " +
    // getSelectedRange().getRanges());
}

From source file:classifier.SellerClassifier.java

private Instances loadData(String dataset) throws Exception {
    DataSource data = new DataSource(dataset);
    Instances instances = data.getDataSet();
    if (instances.classIndex() == -1) {
        instances.setClassIndex(instances.numAttributes() - 1);
    }/*from www  .j a v  a2  s.  com*/

    return instances;
}

From source file:Classifier.supervised.LinearRegression.java

License:Open Source License

/**
 * Builds a regression model for the given data.
 *
 * @param data the training data to be used for generating the
 * linear regression function/*ww  w.ja v  a  2s .  c  om*/
 * @throws Exception if the classifier could not be built successfully
 */
public void buildClassifier(Instances data) throws Exception {
    m_ModelBuilt = false;

    if (!m_checksTurnedOff) {
        // can classifier handle the data?
        getCapabilities().testWithFail(data);

        // remove instances with missing class
        data = new Instances(data);
        data.deleteWithMissingClass();
    }

    // Preprocess instances
    if (!m_checksTurnedOff) {
        m_TransformFilter = new NominalToBinary();
        m_TransformFilter.setInputFormat(data);
        data = Filter.useFilter(data, m_TransformFilter);
        m_MissingFilter = new ReplaceMissingValues();
        m_MissingFilter.setInputFormat(data);
        data = Filter.useFilter(data, m_MissingFilter);
        data.deleteWithMissingClass();
    } else {
        m_TransformFilter = null;
        m_MissingFilter = null;
    }

    m_ClassIndex = data.classIndex();
    m_TransformedData = data;

    // Turn all attributes on for a start
    m_SelectedAttributes = new boolean[data.numAttributes()];
    for (int i = 0; i < data.numAttributes(); i++) {
        if (i != m_ClassIndex) {
            m_SelectedAttributes[i] = true;
        }
    }
    m_Coefficients = null;

    // Compute means and standard deviations
    m_Means = new double[data.numAttributes()];
    m_StdDevs = new double[data.numAttributes()];
    for (int j = 0; j < data.numAttributes(); j++) {
        if (j != data.classIndex()) {
            m_Means[j] = data.meanOrMode(j);
            m_StdDevs[j] = Math.sqrt(data.variance(j));
            if (m_StdDevs[j] == 0) {
                m_SelectedAttributes[j] = false;
            }
        }
    }

    m_ClassStdDev = Math.sqrt(data.variance(m_TransformedData.classIndex()));
    m_ClassMean = data.meanOrMode(m_TransformedData.classIndex());

    // Perform the regression
    findBestModel();

    // Save memory
    if (m_Minimal) {
        m_TransformedData = null;
        m_Means = null;
        m_StdDevs = null;
    } else {
        m_TransformedData = new Instances(data, 0);
    }

    m_ModelBuilt = true;
}

From source file:classifiers.ComplexClassifier.java

@Override
public void bootstrapvalidierungsmenge(Instances inst) {
    if (inst.numAttributes() != 0) {
        int[] hilf = new int[inst.numInstances()];

        for (int i = 0; i < inst.numInstances(); i++) {
            int a = ((int) (Math.random() * inst.numInstances()));

            hilf[i] = a;/*www .j  a va  2  s . c o m*/
        }

        Modelsindexen = EliminiereDopelt(hilf);
        Modelmenge = new Instances(inst, Modelsindexen.length);
        for (int i = 0; i < Modelsindexen.length; i++) {

            Modelmenge.add(new Instance(inst.instance(Modelsindexen[i])));
        }

        validierungsindexen = new int[inst.numInstances() - Modelsindexen.length];
        validierungsmenge = new Instances(Modelmenge, validierungsindexen.length);

        for (int i = 0, j = 0; i < inst.numInstances() && j < validierungsindexen.length; i++, j++) {
            if (!(HasSet(Modelsindexen, i))) {
                validierungsindexen[j] = i;
                validierungsmenge.add(inst.instance(validierungsindexen[j]));

            }
        }

    }
}

From source file:classifiers.ComplexClassifier.java

@Override
public double[][] test(Instances testinst) {
    double count = 0;
    long anfangszeit = System.currentTimeMillis();
    ;/*w w w . j a  v  a2 s  .co  m*/
    long endzeit;
    double[][] ausgabe = new double[1][2];
    if (testinst.numAttributes() != 0) {

        testinst.setClass(testinst.attribute(testinst.numAttributes() - 1));

        for (int i = 0; i < testinst.numInstances(); i++) {

            if (!Classify(testinst.instance(i))) {
                count++;
            } else {
            }

        }

        endzeit = System.currentTimeMillis();
        ausgabe[0][0] = (count / testinst.numInstances()) * 100;

        ausgabe[0][1] = ((endzeit - anfangszeit));
        // System.out.println(testinst);
        return ausgabe;
    } else {
        // System.out.println(testinst);
        return ausgabe;
    }

}

From source file:classifiers.ComplexClassifierZufall.java

@Override
@SuppressWarnings("empty-statement")
public double[][] test(Instances testinst) {
    double count = 0;
    long anfangszeit = System.currentTimeMillis();
    ;//from w  w  w.ja  va 2 s .  c  o m
    long endzeit;
    double[][] ausgabe = new double[1][2];
    if (testinst.numAttributes() != 0) {

        testinst.setClass(testinst.attribute(testinst.numAttributes() - 1));

        for (int i = 0; i < testinst.numInstances(); i++) {

            if (!Classify(testinst.instance(i))) {
                count++;
            } else {
            }

        }

        endzeit = System.currentTimeMillis();
        ausgabe[0][0] = (count / testinst.numInstances()) * 100;

        ausgabe[0][1] = ((endzeit - anfangszeit));
        // System.out.println(testinst);
        return ausgabe;
    } else {
        // System.out.println(testinst);
        return ausgabe;
    }

}