Example usage for weka.classifiers.trees HoeffdingTree HoeffdingTree

List of usage examples for weka.classifiers.trees HoeffdingTree HoeffdingTree

Introduction

In this page you can find the example usage for weka.classifiers.trees HoeffdingTree HoeffdingTree.

Prototype

HoeffdingTree

Source Link

Usage

From source file:fcul.viegas.ml.learners.NetworkStreamLearningClassifierMapFunction.java

public void open(Configuration parameters) throws Exception {

    try {/*from   w  w  w .  jav a 2s . co  m*/
        BufferedReader reader = new BufferedReader(new FileReader(NetworkPacketsDefinitions.arffPath));
        ArffLoader.ArffReader arff = new ArffLoader.ArffReader(reader);
        weka.core.Instances dataTrain = arff.getData();
        dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

        Random rand = new Random();
        rand.setSeed(this.hashCode() ^ 83484856847L);
        ArrayList<Integer> listaFeaturesChoosen = new ArrayList<>();
        for (int j = 1; j < dataTrain.numAttributes(); j++) {
            listaFeaturesChoosen.add(j);
        }

        //chose features to remove
        int numberOfFeaturesToBeRemoved = (int) (listaFeaturesChoosen.size()
                * (1.0f - percentOfSubsetFeatures));
        for (int j = 0; j < numberOfFeaturesToBeRemoved; j++) {
            int numberOfFeature = rand.nextInt(listaFeaturesChoosen.size());
            listaFeaturesChoosen.remove(numberOfFeature);
        }
        System.out.println("LEARNING: featureSubset: " + NetworkStreamLearningClassifierMapFunction
                .printFeatures(listaFeaturesChoosen, dataTrain.numAttributes() - 1));
        mappingFeatures = "featureSubset: " + NetworkStreamLearningClassifierMapFunction
                .printFeatures(listaFeaturesChoosen, dataTrain.numAttributes() - 1);
        FastVector attributes = new FastVector();
        int indexLista = 0;
        for (int i = 0; i < NetworkPacketsDefinitions.numberOfFeatures; i++) {
            if (indexLista < listaFeaturesChoosen.size() && listaFeaturesChoosen.get(indexLista) == (i + 1)) {
                indexLista++;
                attributes.addElement(new Attribute("att" + (i + 1)));
            }
        }
        FastVector classLabels = new FastVector();
        for (int i = 0; i < NetworkPacketsDefinitions.numberOfClasses; i++) {
            classLabels.addElement("class" + (i + 1));
        }
        attributes.addElement(new Attribute("class", classLabels));

        this.streamLearningInstanceHeader = new InstancesHeader(new Instances("FlinkPhd", attributes, 0));

        this.streamLearningInstanceHeader.setClassIndex(attributes.size());

        listaFeaturesChoosen.add(dataTrain.numAttributes());

        String[] options = new String[2];
        options[0] = "-R";

        String optRemove = "";
        for (int j = 0; j < listaFeaturesChoosen.size() - 1; j++) {
            optRemove = optRemove + listaFeaturesChoosen.get(j) + ",";
        }
        optRemove = optRemove + listaFeaturesChoosen.get(listaFeaturesChoosen.size() - 1);
        options[1] = optRemove;

        Remove remove = new Remove();
        remove.setOptions(options);
        remove.setInvertSelection(true);
        remove.setInputFormat(dataTrain);

        weka.core.Instances newdataFeat = Filter.useFilter(dataTrain, remove);

        HoeffdingTree tree = new HoeffdingTree();

        weka.classifiers.misc.InputMappedClassifier classifier = new weka.classifiers.misc.InputMappedClassifier();
        classifier.setModelHeader(newdataFeat);
        classifier.setClassifier(tree);
        classifier.buildClassifier(newdataFeat);
        this.classifier = classifier;
        this.coreInstances = dataTrain;
        this.coreInstances.clear();

    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:fcul.viegas.ml.learners.NetworkStreamLearningClassifierUpdateMapFunction.java

public void open(Configuration parameters) throws Exception {

    try {//from  w  w  w. j a  v  a  2s.co m
        BufferedReader reader = new BufferedReader(new FileReader(NetworkPacketsDefinitions.arffPath));
        ArffLoader.ArffReader arff = new ArffLoader.ArffReader(reader);
        weka.core.Instances dataTrain = arff.getData();
        dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

        Random rand = new Random();
        rand.setSeed(this.hashCode() ^ 83484856847L);
        ArrayList<Integer> listaFeaturesChoosen = new ArrayList<>();
        for (int j = 1; j < dataTrain.numAttributes(); j++) {
            listaFeaturesChoosen.add(j);
        }

        //chose features to remove
        int numberOfFeaturesToBeRemoved = (int) (listaFeaturesChoosen.size()
                * (1.0f - percentOfSubsetFeatures));
        for (int j = 0; j < numberOfFeaturesToBeRemoved; j++) {
            int numberOfFeature = rand.nextInt(listaFeaturesChoosen.size());
            listaFeaturesChoosen.remove(numberOfFeature);
        }
        System.out.println("UPDATES featureSubset: " + NetworkStreamLearningClassifierMapFunction
                .printFeatures(listaFeaturesChoosen, dataTrain.numAttributes() - 1));
        mappingFeatures = "featureSubset: " + NetworkStreamLearningClassifierMapFunction
                .printFeatures(listaFeaturesChoosen, dataTrain.numAttributes() - 1);
        FastVector attributes = new FastVector();
        int indexLista = 0;
        for (int i = 0; i < NetworkPacketsDefinitions.numberOfFeatures; i++) {
            if (indexLista < listaFeaturesChoosen.size() && listaFeaturesChoosen.get(indexLista) == (i + 1)) {
                indexLista++;
                attributes.addElement(new Attribute("att" + (i + 1)));
            }
        }
        FastVector classLabels = new FastVector();
        for (int i = 0; i < NetworkPacketsDefinitions.numberOfClasses; i++) {
            classLabels.addElement("class" + (i + 1));
        }
        attributes.addElement(new Attribute("class", classLabels));

        this.streamLearningInstanceHeader = new InstancesHeader(new Instances("FlinkPhd", attributes, 0));

        this.streamLearningInstanceHeader.setClassIndex(attributes.size());

        listaFeaturesChoosen.add(dataTrain.numAttributes());

        String[] options = new String[2];
        options[0] = "-R";

        String optRemove = "";
        for (int j = 0; j < listaFeaturesChoosen.size() - 1; j++) {
            optRemove = optRemove + listaFeaturesChoosen.get(j) + ",";
        }
        optRemove = optRemove + listaFeaturesChoosen.get(listaFeaturesChoosen.size() - 1);
        options[1] = optRemove;

        Remove remove = new Remove();
        remove.setOptions(options);
        remove.setInvertSelection(true);
        remove.setInputFormat(dataTrain);

        weka.core.Instances newdataFeat = Filter.useFilter(dataTrain, remove);

        HoeffdingTree tree = new HoeffdingTree();

        weka.classifiers.misc.InputMappedClassifier classifier = new weka.classifiers.misc.InputMappedClassifier();
        classifier.setModelHeader(newdataFeat);
        classifier.setClassifier(tree);
        classifier.buildClassifier(newdataFeat);
        this.classifier = classifier;
        this.coreInstances = dataTrain;

    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:meka.classifiers.multilabel.incremental.BRUpdateable.java

License:Open Source License

public BRUpdateable() {
    // default classifier for GUI
    this.m_Classifier = new HoeffdingTree();
}

From source file:meka.classifiers.multilabel.incremental.CCUpdateable.java

License:Open Source License

public CCUpdateable() {
    // default classifier for GUI
    this.m_Classifier = new HoeffdingTree();
}

From source file:meka.classifiers.multilabel.incremental.PSUpdateable.java

License:Open Source License

public PSUpdateable() {
    // default classifier for GUI
    this.m_Classifier = new HoeffdingTree();
}

From source file:meka.classifiers.multilabel.incremental.RTUpdateable.java

License:Open Source License

public RTUpdateable() {
    // default classifier for GUI
    this.m_Classifier = new HoeffdingTree();
}