Example usage for weka.core Instances numAttributes

List of usage examples for weka.core Instances numAttributes

Introduction

In this page you can find the example usage for weka.core Instances numAttributes.

Prototype


publicint numAttributes() 

Source Link

Document

Returns the number of attributes.

Usage

From source file:ANN_Single.SinglelayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\diabetes.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);/* w w  w .j a v a 2  s .  co  m*/
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    System.out.println();
    //                System.out.println(i + " "+0.8);
    SinglelayerPerceptron slp = new SinglelayerPerceptron(train, 0.1, 5000);
    slp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    //                eval.crossValidateModel(slp, train, 10, new Random(1));
    eval.evaluateModel(slp, train);
    System.out.println(eval.toSummaryString());
    System.out.print(eval.toMatrixString());
}

From source file:ANN_single2.MultilayerPerceptron.java

public MultilayerPerceptron(Instances i, int numHide, double rate, double thres) {
    learningRate = rate;/*from   www.  j  a  v  a2 s  .c om*/
    threshold = thres;
    numHiden = numHide;
    //inisialisasi array hidden
    listHidden = new ArrayList<>();
    for (int idx = 0; idx < numHiden; idx++) {
        listHidden.add(new Node(i.numAttributes())); //1 untuk bias
    }

    //inialisasi array output
    listOutput = new ArrayList<>();
    for (int idx = 0; idx < i.numClasses(); idx++) {
        listOutput.add(new Node(listHidden.size()));
    }
}

From source file:ANN_single2.MultilayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\Team.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);//from w w w . java 2s .com
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    MultilayerPerceptron slp = new MultilayerPerceptron(train, 13, 0.1, 0.5);
    //        slp.buildClassifier(train);
    Evaluation eval = new Evaluation(train);
    eval.crossValidateModel(slp, train, 10, new Random(1));
    //        eval.evaluateModel(slp, train);
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toMatrixString());
}

From source file:ANN_single2.SinglelayerPerceptron.java

@Override
public void buildClassifier(Instances i) {
    listOutput = new ArrayList<>();
    for (int idx = 0; idx < i.numClasses(); idx++) {
        listOutput.add(new Node(i.numAttributes()));
    }//  w w w.  ja v a2s.com

    //mengubah class menjadi numeric (diambil indexnya)
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }

    double error = 0;
    for (int iter = 0; iter < itteration; iter++) {
        double errorThres = 0;
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {

            //buat list input
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0); //ini bias
            for (int idx = 0; idx < i.numAttributes() - 1; idx++) {
                listInput.add(i.get(idxInstance).value(idx));
            }

            //Hitung output rumus = sigmoid dari sigma
            for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) {
                output(listInput, idxOut);
            }

            //Hitung error
            calculateError(idxInstance);
            //update bobot
            updateBobot(listInput);

        }
        for (int idxOut = 0; idxOut < listOutput.size(); idxOut++) {
            errorThres += Math.pow(listOutput.get(idxOut).getError(), 2) / 2;
        }
        if (errorThres <= threshold)
            break;
        //            System.out.println(errorThres);
    }
    //        fold++;
    //        for (int idx =0; idx < i.numInstances(); idx++) {
    //            for (int idxOut=0; idxOut < listOutput.size(); idxOut++) {
    //                error += Math.pow(listOutput.get(idxOut).getError(), 2)/2;
    //            }
    //        }
    //        System.out.println("Fold " + fold);
    //        System.out.println("error " + error);
}

From source file:ANN_single2.SinglelayerPerceptron.java

public static void main(String[] args) throws Exception {
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(
            ("D:\\Program Files\\Weka-3-8\\data\\Team.arff"));
    Instances train = source.getDataSet();
    Normalize nm = new Normalize();
    nm.setInputFormat(train);//w w w  . j  a va 2 s.  c  om
    train = Filter.useFilter(train, nm);
    train.setClassIndex(train.numAttributes() - 1);
    for (int i = 100; i < 3000; i += 100) {
        for (double j = 0.01; j < 1; j += 0.01) {
            System.out.println(i + " " + j);
            SinglelayerPerceptron slp = new SinglelayerPerceptron(i, j, 0.00);
            slp.buildClassifier(train);
            Evaluation eval = new Evaluation(train);
            //                eval.crossValidateModel(slp, train,10, new Random(1));
            eval.evaluateModel(slp, train);
            System.out.println(eval.toSummaryString());
            System.out.println(eval.toMatrixString());
        }
    }
}

From source file:ap.mavenproject1.HelloWeka.java

public static void main(String args[]) {
    Instances data = null;
    ArffLoader loader = new ArffLoader();
    try {/* w  w  w . j  av a 2  s  . c o  m*/

        loader.setFile(new File("C:\\Users\\USER\\Desktop\\data.arff"));

        data = loader.getDataSet();
        data.setClassIndex(data.numAttributes() - 1);

    } catch (IOException ex) {
        Logger.getLogger(HelloWeka.class.getName()).log(Level.SEVERE, null, ex);
    }

    Apriori apriori = new Apriori();
    try {
        NumericToNominal numericToNominal = new NumericToNominal();
        numericToNominal.setInputFormat(data);

        Instances nominalData = Filter.useFilter(data, numericToNominal);
        apriori.buildAssociations(nominalData);
        FastVector[] allTheRules;
        allTheRules = apriori.getAllTheRules();
        for (int i = 0; i < allTheRules.length; i++) {
            System.out.println(allTheRules[i]);
        }
        //             BufferedWriter writer = new BufferedWriter(new FileWriter("./output.arff"));
        //                writer.write(nominalData.toString());
        //                writer.flush();
        //                writer.close();

    } catch (Exception ex) {
        Logger.getLogger(HelloWeka.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:app.RunApp.java

License:Open Source License

/**
 * Generates TableModel for attributes/*from   www .j ava2  s.c  o  m*/
 * 
 * @param jtable Table
 * @param dataset Multi-label dataset
 * @return Generated TableModel
 */
private TableModel attributesTableModel(JTable jtable, MultiLabelInstances dataset) {
    DefaultTableModel tableModel = new DefaultTableModel() {
        @Override
        public boolean isCellEditable(int row, int column) {
            //This causes all cells to be not editable
            return false;
        }
    };

    tableModel.addColumn("Attribute");

    Object[] row = new Object[1];

    Instances instances = dataset.getDataSet();

    int numLabels = dataset.getNumLabels();

    int numAttributes = instances.numAttributes() - numLabels;

    Attribute att;
    for (int i = 0; i < numAttributes; i++) {
        att = instances.attribute(i);
        if (att.isNumeric()) {
            row[0] = att.name();
            tableModel.addRow(row);
        }
    }

    jtable.setModel(tableModel);

    return jtable.getModel();
}

From source file:arffcreator.arffFrame.java

private void createActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createActionPerformed
    // TODO add your handling code here:

    FastVector atts;/*from  w  w  w . ja  v  a  2 s  .  c  om*/
    FastVector attsRel;
    FastVector attVals;
    FastVector attValsRel;
    Instances data;
    Instances dataRel;
    double[] vals;
    double[] valsRel;
    int i;

    // 1. set up attributes
    atts = new FastVector();
    // - numeric
    atts.addElement(new Attribute("att1"));
    // - nominal
    attVals = new FastVector();
    for (i = 0; i < 5; i++)
        attVals.addElement("val" + (i + 1));
    atts.addElement(new Attribute("att2", attVals));
    // - string
    atts.addElement(new Attribute("att3", (FastVector) null));
    // - date
    atts.addElement(new Attribute("att4", "yyyy-MM-dd"));
    // - relational
    attsRel = new FastVector();
    // -- numeric
    attsRel.addElement(new Attribute("att5.1"));
    // -- nominal
    attValsRel = new FastVector();
    for (i = 0; i < 5; i++)
        attValsRel.addElement("val5." + (i + 1));
    attsRel.addElement(new Attribute("att5.2", attValsRel));
    dataRel = new Instances("att5", attsRel, 0);
    atts.addElement(new Attribute("att5", dataRel, 0));

    // 2. create Instances object
    data = new Instances("MyRelation", atts, 0);

    // 3. fill with data
    // first instance
    vals = new double[data.numAttributes()];
    // - numeric
    vals[0] = Math.PI;
    // - nominal
    vals[1] = attVals.indexOf("val3");
    // - string
    vals[2] = data.attribute(2).addStringValue("This is a string!");
    try {
        // - date
        vals[3] = data.attribute(3).parseDate("2015-07-30");
    } catch (ParseException ex) {
        Logger.getLogger(arffFrame.class.getName()).log(Level.SEVERE, null, ex);
    }
    // - relational
    dataRel = new Instances(data.attribute(4).relation(), 0);
    // -- first instance
    valsRel = new double[2];
    valsRel[0] = Math.PI + 1;
    valsRel[1] = attValsRel.indexOf("val5.3");
    dataRel.add(new Instance(1.0, valsRel));
    // -- second instance
    valsRel = new double[2];
    valsRel[0] = Math.PI + 2;
    valsRel[1] = attValsRel.indexOf("val5.2");
    dataRel.add(new Instance(1.0, valsRel));
    vals[4] = data.attribute(4).addRelation(dataRel);
    // add
    data.add(new Instance(1.0, vals));

    // second instance
    vals = new double[data.numAttributes()]; // important: needs NEW array!
    // - numeric
    vals[0] = Math.E;
    // - nominal
    vals[1] = attVals.indexOf("val1");
    // - string
    vals[2] = data.attribute(2).addStringValue("And another one!");
    try {
        // - date
        vals[3] = data.attribute(3).parseDate("2015-07-30");
    } catch (ParseException ex) {
        Logger.getLogger(arffFrame.class.getName()).log(Level.SEVERE, null, ex);
    }
    // - relational
    dataRel = new Instances(data.attribute(4).relation(), 0);
    // -- first instance
    valsRel = new double[2];
    valsRel[0] = Math.E + 1;
    valsRel[1] = attValsRel.indexOf("val5.4");
    dataRel.add(new Instance(1.0, valsRel));
    // -- second instance
    valsRel = new double[2];
    valsRel[0] = Math.E + 2;
    valsRel[1] = attValsRel.indexOf("val5.1");
    dataRel.add(new Instance(1.0, valsRel));
    vals[4] = data.attribute(4).addRelation(dataRel);
    // add
    data.add(new Instance(1.0, vals));

    // 4. output data

    textArea.append(data.toString());

    dataset = data.toString();

}

From source file:asap.CrossValidation.java

/**
 *
 * @param dataInput/*from w  ww.  j  a  va2  s  .co m*/
 * @param classIndex
 * @param removeIndices
 * @param cls
 * @param seed
 * @param folds
 * @param modelOutputFile
 * @return
 * @throws Exception
 */
public static String performCrossValidation(String dataInput, String classIndex, String removeIndices,
        AbstractClassifier cls, int seed, int folds, String modelOutputFile) throws Exception {

    PerformanceCounters.startTimer("cross-validation ST");

    PerformanceCounters.startTimer("cross-validation init ST");

    // loads data and set class index
    Instances data = DataSource.read(dataInput);
    String clsIndex = classIndex;

    switch (clsIndex) {
    case "first":
        data.setClassIndex(0);
        break;
    case "last":
        data.setClassIndex(data.numAttributes() - 1);
        break;
    default:
        try {
            data.setClassIndex(Integer.parseInt(clsIndex) - 1);
        } catch (NumberFormatException e) {
            data.setClassIndex(data.attribute(clsIndex).index());
        }
        break;
    }

    Remove removeFilter = new Remove();
    removeFilter.setAttributeIndices(removeIndices);
    removeFilter.setInputFormat(data);
    data = Filter.useFilter(data, removeFilter);

    // randomize data
    Random rand = new Random(seed);
    Instances randData = new Instances(data);
    randData.randomize(rand);
    if (randData.classAttribute().isNominal()) {
        randData.stratify(folds);
    }

    // perform cross-validation and add predictions
    Evaluation eval = new Evaluation(randData);
    Instances trainSets[] = new Instances[folds];
    Instances testSets[] = new Instances[folds];
    Classifier foldCls[] = new Classifier[folds];

    for (int n = 0; n < folds; n++) {
        trainSets[n] = randData.trainCV(folds, n);
        testSets[n] = randData.testCV(folds, n);
        foldCls[n] = AbstractClassifier.makeCopy(cls);
    }

    PerformanceCounters.stopTimer("cross-validation init ST");
    PerformanceCounters.startTimer("cross-validation folds+train ST");
    //paralelize!!:--------------------------------------------------------------
    for (int n = 0; n < folds; n++) {
        Instances train = trainSets[n];
        Instances test = testSets[n];

        // the above code is used by the StratifiedRemoveFolds filter, the
        // code below by the Explorer/Experimenter:
        // Instances train = randData.trainCV(folds, n, rand);
        // build and evaluate classifier
        Classifier clsCopy = foldCls[n];
        clsCopy.buildClassifier(train);
        eval.evaluateModel(clsCopy, test);
    }

    cls.buildClassifier(data);
    //until here!-----------------------------------------------------------------

    PerformanceCounters.stopTimer("cross-validation folds+train ST");
    PerformanceCounters.startTimer("cross-validation post ST");
    // output evaluation
    String out = "\n" + "=== Setup ===\n" + "Classifier: " + cls.getClass().getName() + " "
            + Utils.joinOptions(cls.getOptions()) + "\n" + "Dataset: " + data.relationName() + "\n" + "Folds: "
            + folds + "\n" + "Seed: " + seed + "\n" + "\n"
            + eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false) + "\n";

    if (!modelOutputFile.isEmpty()) {
        SerializationHelper.write(modelOutputFile, cls);
    }

    PerformanceCounters.stopTimer("cross-validation post ST");
    PerformanceCounters.stopTimer("cross-validation ST");

    return out;
}

From source file:asap.CrossValidation.java

/**
 *
 * @param dataInput/*from  w w w .j  a v  a2s.co  m*/
 * @param classIndex
 * @param removeIndices
 * @param cls
 * @param seed
 * @param folds
 * @param modelOutputFile
 * @return
 * @throws Exception
 */
public static String performCrossValidationMT(String dataInput, String classIndex, String removeIndices,
        AbstractClassifier cls, int seed, int folds, String modelOutputFile) throws Exception {

    PerformanceCounters.startTimer("cross-validation MT");

    PerformanceCounters.startTimer("cross-validation init MT");

    // loads data and set class index
    Instances data = DataSource.read(dataInput);
    String clsIndex = classIndex;

    switch (clsIndex) {
    case "first":
        data.setClassIndex(0);
        break;
    case "last":
        data.setClassIndex(data.numAttributes() - 1);
        break;
    default:
        try {
            data.setClassIndex(Integer.parseInt(clsIndex) - 1);
        } catch (NumberFormatException e) {
            data.setClassIndex(data.attribute(clsIndex).index());
        }
        break;
    }

    Remove removeFilter = new Remove();
    removeFilter.setAttributeIndices(removeIndices);
    removeFilter.setInputFormat(data);
    data = Filter.useFilter(data, removeFilter);

    // randomize data
    Random rand = new Random(seed);
    Instances randData = new Instances(data);
    randData.randomize(rand);
    if (randData.classAttribute().isNominal()) {
        randData.stratify(folds);
    }

    // perform cross-validation and add predictions
    Evaluation eval = new Evaluation(randData);
    List<Thread> foldThreads = (List<Thread>) Collections.synchronizedList(new LinkedList<Thread>());

    List<FoldSet> foldSets = (List<FoldSet>) Collections.synchronizedList(new LinkedList<FoldSet>());

    for (int n = 0; n < folds; n++) {
        foldSets.add(new FoldSet(randData.trainCV(folds, n), randData.testCV(folds, n),
                AbstractClassifier.makeCopy(cls)));

        if (n < Config.getNumThreads() - 1) {
            Thread foldThread = new Thread(new CrossValidationFoldThread(n, foldSets, eval));
            foldThreads.add(foldThread);
        }
    }

    PerformanceCounters.stopTimer("cross-validation init MT");
    PerformanceCounters.startTimer("cross-validation folds+train MT");
    //paralelize!!:--------------------------------------------------------------
    if (Config.getNumThreads() > 1) {
        for (Thread foldThread : foldThreads) {
            foldThread.start();
        }
    } else {
        //use the current thread to run the cross-validation instead of using the Thread instance created here:
        new CrossValidationFoldThread(0, foldSets, eval).run();
    }

    cls.buildClassifier(data);

    for (Thread foldThread : foldThreads) {
        foldThread.join();
    }

    //until here!-----------------------------------------------------------------
    PerformanceCounters.stopTimer("cross-validation folds+train MT");
    PerformanceCounters.startTimer("cross-validation post MT");
    // evaluation for output:
    String out = "\n" + "=== Setup ===\n" + "Classifier: " + cls.getClass().getName() + " "
            + Utils.joinOptions(cls.getOptions()) + "\n" + "Dataset: " + data.relationName() + "\n" + "Folds: "
            + folds + "\n" + "Seed: " + seed + "\n" + "\n"
            + eval.toSummaryString("=== " + folds + "-fold Cross-validation ===", false) + "\n";

    if (!modelOutputFile.isEmpty()) {
        SerializationHelper.write(modelOutputFile, cls);
    }

    PerformanceCounters.stopTimer("cross-validation post MT");
    PerformanceCounters.stopTimer("cross-validation MT");
    return out;
}