Example usage for weka.classifiers.functions Logistic Logistic

List of usage examples for weka.classifiers.functions Logistic Logistic

Introduction

In this page you can find the example usage for weka.classifiers.functions Logistic Logistic.

Prototype

public Logistic() 

Source Link

Document

Constructor that sets the default number of decimal places to 4.

Usage

From source file:machinelearningcw.MachineLearningCw.java

public static void main(String[] args) throws Exception {

    Instances data[] = getAllFiles();/*from  www .java 2 s . c o m*/
    //writes the data to excel
    writer = new FileWriter(
            "\\\\ueahome4\\stusci5\\ypf12pxu\\data\\Documents\\Machine Learning\\adamt94-machinelearning-da75565f2abe\\adamt94-machinelearning-da75565f2abe\\data.csv");
    writer.append("DataName");
    writer.append(",");//next column
    writer.append("Offline");
    writer.append(",");
    writer.append("Online");
    writer.append(",");
    writer.append("Offlinestd");
    writer.append(",");
    writer.append("Onlinestd");
    writer.append(",");
    writer.append("CrossValidation");
    writer.append(",");
    writer.append("Ensemble");
    writer.append(",");
    writer.append("WEKA1");
    writer.append(",");
    writer.append("WEKA2");
    writer.append("\n");//new row
    for (int i = 0; i < data.length; i++) {

        System.out.println("===============" + fileNames.get(i) + "=============");
        writer.append(fileNames.get(i));
        writer.append(",");
        data[i].setClassIndex(data[i].numAttributes() - 1);
        //1. Is one learning algorithm better than the other?
        //   compareAlgorithms(data[i]);

        /*2. Does standardising the data produce a 
         more accurate classifier? 
         You can test this on both learningalgorithms.*/
        //  standardiseData(data[i]);

        /*3. Does choosing the learning algorithm through 
         cross validation produce a more accurate classifier?*/
        //   crossValidation(data[i]);

        // 4. Does using an ensemble produce a more accurate classifier?
        //     ensemble(data[i]);

        /*5. Weka contains several related classifiers in the 
         package weka.classifiers.functions. 
         Comparetwo of your classifiers (including the ensemble) 
         to at least two of the following*/

        /*=======================================
              Weka Classifiers
        =========================================*/

        //            VotedPerceptron mp = new VotedPerceptron();
        // Logistic l = new Logistic();
        //  SimpleLogistic sl = new SimpleLogistic();
        //  MultilayerPerceptron mp = new MultilayerPerceptron();
        //  VotedPerceptron vp = new VotedPerceptron();
        //            
        //            int numFolds = 10;
        //            EvaluationUtils eval = new EvaluationUtils();
        //            ArrayList<Prediction> preds
        //                    = eval.getCVPredictions(mp, data[i], numFolds);
        //            int correct = 0;
        //            int total = 0;
        //            for (Prediction pred : preds) {
        //                if (pred.predicted() == pred.actual()) {
        //                    correct++;
        //                }
        //                total++;
        //            }
        //            double acc = ((double) correct / total);
        //
        //            System.out.println("Logistic Accuracy: " + acc);
        //            writer.append(acc + ",");
        int j = data[i].numClasses();
        writer.append(j + ",");
        writer.append("\n");

    }

    /*=======================================================
     TIMING EXPIREMENT
     =========================================================
     */
    //create all the classifiers
    perceptronClassifier online = new perceptronClassifier();
    EnhancedLinearPerceptron offline = new EnhancedLinearPerceptron();
    EnhancedLinearPerceptron onlinestd = new EnhancedLinearPerceptron();
    onlinestd.setStandardiseAttributes = true;
    EnhancedLinearPerceptron offlinestd = new EnhancedLinearPerceptron();
    offlinestd.setStandardiseAttributes = true;
    EnhancedLinearPerceptron crossvalidate = new EnhancedLinearPerceptron();
    crossvalidate.setStandardiseAttributes = true;
    RandomLinearPerceptron random = new RandomLinearPerceptron();
    Logistic l = new Logistic();
    SimpleLogistic sl = new SimpleLogistic();
    MultilayerPerceptron mp = new MultilayerPerceptron();
    VotedPerceptron vp = new VotedPerceptron();
    //    timingExperiment(online, data);
    //  timingExperiment(offline, data);
    //timingExperiment(onlinestd, data);
    //timingExperiment(offlinestd, data);
    //timingExperiment(crossvalidate, data);
    timingExperiment(random, data);
    //timingExperiment(l, data);
    //timingExperiment(sl, data);
    //  timingExperiment(mp, data);
    // timingExperiment(vp, data);
    writer.flush();
    writer.close();

}

From source file:machinelearningcw.MachineLearningCw.java

public static void wekaClassifiers() {
    Logistic l = new Logistic();
    SimpleLogistic sl = new SimpleLogistic();
    MultilayerPerceptron mp = new MultilayerPerceptron();
    VotedPerceptron vp = new VotedPerceptron();
}

From source file:meddle.TrainModelByDomainOS.java

License:Open Source License

/**
 * Given the classifierName, return a classifier
 *
 * @param classifierName/* ww  w .  j av  a  2  s.  co m*/
 *            e.g. J48, Bagging etc.
 */
public static Classifier getClassifier(String classifierName) {
    Classifier classifier = null;
    if (classifierName.equals("J48")) {
        J48 j48 = new J48();
        j48.setUnpruned(true);
        classifier = j48;
    } else if (classifierName.equals("AdaBoostM1")) {
        AdaBoostM1 adm = new AdaBoostM1();
        adm.setNumIterations(10);
        J48 j48 = new J48();
        adm.setClassifier(j48);
        classifier = adm;
    } else if (classifierName.equals("Bagging")) {
        Bagging bagging = new Bagging();
        bagging.setNumIterations(10);
        J48 j48 = new J48();
        bagging.setClassifier(j48);
        classifier = bagging;
    } else if (classifierName.equals("Stacking")) {
        Stacking stacking = new Stacking();
        stacking.setMetaClassifier(new Logistic());
        Classifier cc[] = new Classifier[2];
        cc[0] = new J48();
        cc[1] = new IBk();
        stacking.setClassifiers(cc);
        classifier = stacking;
    } else if (classifierName.equals("AdditiveRegression")) {
        AdditiveRegression ar = new AdditiveRegression();
        ar.setClassifier(new J48());
        classifier = ar;
    } else if (classifierName.equals("LogitBoost")) {
        LogitBoost lb = new LogitBoost();
        lb.setClassifier(new J48());
        classifier = lb;
    }
    return classifier;
}

From source file:mulan.classifier.lazy.IBLR_ML.java

License:Open Source License

@Override
protected void buildInternal(MultiLabelInstances mltrain) throws Exception {
    super.buildInternal(mltrain);

    classifier = new Classifier[numLabels];

    /*//from  ww w .j a  v a2 s  .c  om
     * Create the new training data with label info as features.
     */
    Instances[] trainingDataForLabel = new Instances[numLabels];
    ArrayList<Attribute> attributes = new ArrayList<Attribute>();
    if (addFeatures == true) {// create an ArrayList with numAttributes size
        for (int i = 1; i <= train.numAttributes(); i++) {
            attributes.add(new Attribute("Attr." + i));
        }
    } else {// create a FastVector with numLabels size
        for (int i = 1; i <= numLabels; i++) {
            attributes.add(new Attribute("Attr." + i));
        }
    }
    ArrayList<String> classlabel = new ArrayList<String>();
    classlabel.add("0");
    classlabel.add("1");
    attributes.add(new Attribute("Class", classlabel));
    for (int i = 0; i < trainingDataForLabel.length; i++) {
        trainingDataForLabel[i] = new Instances("DataForLabel" + (i + 1), attributes, train.numInstances());
        trainingDataForLabel[i].setClassIndex(trainingDataForLabel[i].numAttributes() - 1);
    }

    for (int i = 0; i < train.numInstances(); i++) {

        Instances knn = new Instances(lnn.kNearestNeighbours(train.instance(i), numOfNeighbors));
        /*
         * Get the label confidence vector as the additional features.
         */
        double[] confidences = new double[numLabels];
        for (int j = 0; j < numLabels; j++) {
            // compute sum of counts for each label in KNN
            double count_for_label_j = 0;
            for (int k = 0; k < numOfNeighbors; k++) {
                double value = Double.parseDouble(
                        train.attribute(labelIndices[j]).value((int) knn.instance(k).value(labelIndices[j])));
                if (Utils.eq(value, 1.0)) {
                    count_for_label_j++;
                }
            }
            confidences[j] = count_for_label_j / numOfNeighbors;
        }

        double[] attvalue = new double[numLabels + 1];

        if (addFeatures == true) {
            attvalue = new double[train.numAttributes() + 1];

            // Copy the original features
            for (int m = 0; m < featureIndices.length; m++) {
                attvalue[m] = train.instance(i).value(featureIndices[m]);
            }
            // Copy the label confidences as additional features
            for (int m = 0; m < confidences.length; m++) {
                attvalue[train.numAttributes() - numLabels + m] = confidences[m];
            }
        } else {
            // Copy the label confidences as features
            for (int m = 0; m < confidences.length; m++) {
                attvalue[m] = confidences[m];
            }
        }

        // Add the class labels and finish the new training data
        for (int j = 0; j < numLabels; j++) {
            attvalue[attvalue.length - 1] = Double.parseDouble(
                    train.attribute(labelIndices[j]).value((int) train.instance(i).value(labelIndices[j])));
            Instance newInst = DataUtils.createInstance(train.instance(i), 1, attvalue);
            newInst.setDataset(trainingDataForLabel[j]);
            if (attvalue[attvalue.length - 1] > 0.5) {
                newInst.setClassValue("1");
            } else {
                newInst.setClassValue("0");
            }
            trainingDataForLabel[j].add(newInst);
        }

    }

    // for every label create a corresponding classifier.
    for (int i = 0; i < numLabels; i++) {
        classifier[i] = new Logistic();
        classifier[i].buildClassifier(trainingDataForLabel[i]);
    }

}

From source file:mulan.examples.TrainTestExperiment.java

License:Open Source License

public static void main(String[] args) {
    String[] methodsToCompare = { "HOMER", "BR", "CLR", "MLkNN", "MC-Copy", "IncludeLabels", "MC-Ignore",
            "RAkEL", "LP", "MLStacking" };

    try {//  w w w .  ja  v  a2s.  com
        String path = Utils.getOption("path", args); // e.g. -path dataset/
        String filestem = Utils.getOption("filestem", args); // e.g. -filestem emotions
        String percentage = Utils.getOption("percentage", args); // e.g. -percentage 50 (for 50%)
        System.out.println("Loading the dataset");
        MultiLabelInstances mlDataSet = new MultiLabelInstances(path + filestem + ".arff",
                path + filestem + ".xml");

        //split the data set into train and test
        Instances dataSet = mlDataSet.getDataSet();
        //dataSet.randomize(new Random(1));
        RemovePercentage rmvp = new RemovePercentage();
        rmvp.setInvertSelection(true);
        rmvp.setPercentage(Double.parseDouble(percentage));
        rmvp.setInputFormat(dataSet);
        Instances trainDataSet = Filter.useFilter(dataSet, rmvp);

        rmvp = new RemovePercentage();
        rmvp.setPercentage(Double.parseDouble(percentage));
        rmvp.setInputFormat(dataSet);
        Instances testDataSet = Filter.useFilter(dataSet, rmvp);

        MultiLabelInstances train = new MultiLabelInstances(trainDataSet, path + filestem + ".xml");
        MultiLabelInstances test = new MultiLabelInstances(testDataSet, path + filestem + ".xml");

        Evaluator eval = new Evaluator();
        Evaluation results;

        for (int i = 0; i < methodsToCompare.length; i++) {

            if (methodsToCompare[i].equals("BR")) {
                System.out.println(methodsToCompare[i]);
                Classifier brClassifier = new NaiveBayes();
                BinaryRelevance br = new BinaryRelevance(brClassifier);
                br.setDebug(true);
                br.build(train);
                results = eval.evaluate(br, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("LP")) {
                System.out.println(methodsToCompare[i]);
                Classifier lpBaseClassifier = new J48();
                LabelPowerset lp = new LabelPowerset(lpBaseClassifier);
                lp.setDebug(true);
                lp.build(train);
                results = eval.evaluate(lp, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("CLR")) {
                System.out.println(methodsToCompare[i]);
                Classifier clrClassifier = new J48();
                CalibratedLabelRanking clr = new CalibratedLabelRanking(clrClassifier);
                clr.setDebug(true);
                clr.build(train);
                results = eval.evaluate(clr, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("RAkEL")) {
                System.out.println(methodsToCompare[i]);
                MultiLabelLearner lp = new LabelPowerset(new J48());
                RAkEL rakel = new RAkEL(lp);
                rakel.setDebug(true);
                rakel.build(train);
                results = eval.evaluate(rakel, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MC-Copy")) {
                System.out.println(methodsToCompare[i]);
                Classifier mclClassifier = new J48();
                MultiClassTransformation mcTrans = new Copy();
                MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans);
                mcl.setDebug(true);
                mcl.build(train);
                results = eval.evaluate(mcl, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MC-Ignore")) {
                System.out.println(methodsToCompare[i]);
                Classifier mclClassifier = new J48();
                MultiClassTransformation mcTrans = new Ignore();
                MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans);
                mcl.build(train);
                results = eval.evaluate(mcl, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("IncludeLabels")) {
                System.out.println(methodsToCompare[i]);
                Classifier ilClassifier = new J48();
                IncludeLabelsClassifier il = new IncludeLabelsClassifier(ilClassifier);
                il.setDebug(true);
                il.build(train);
                results = eval.evaluate(il, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MLkNN")) {
                System.out.println(methodsToCompare[i]);
                int numOfNeighbors = 10;
                double smooth = 1.0;
                MLkNN mlknn = new MLkNN(numOfNeighbors, smooth);
                mlknn.setDebug(true);
                mlknn.build(train);
                results = eval.evaluate(mlknn, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("HMC")) {
                System.out.println(methodsToCompare[i]);
                Classifier baseClassifier = new J48();
                LabelPowerset lp = new LabelPowerset(baseClassifier);
                RAkEL rakel = new RAkEL(lp);
                HMC hmc = new HMC(rakel);
                hmc.build(train);
                results = eval.evaluate(hmc, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("HOMER")) {
                System.out.println(methodsToCompare[i]);
                Classifier baseClassifier = new SMO();
                CalibratedLabelRanking learner = new CalibratedLabelRanking(baseClassifier);
                learner.setDebug(true);
                HOMER homer = new HOMER(learner, 3, HierarchyBuilder.Method.Random);
                homer.setDebug(true);
                homer.build(train);
                results = eval.evaluate(homer, test);
                System.out.println(results);
            }
            if (methodsToCompare[i].equals("MLStacking")) {
                System.out.println(methodsToCompare[i]);
                int numOfNeighbors = 10;
                Classifier baseClassifier = new IBk(numOfNeighbors);
                Classifier metaClassifier = new Logistic();
                MultiLabelStacking mls = new MultiLabelStacking(baseClassifier, metaClassifier);
                mls.setMetaPercentage(1.0);
                mls.setDebug(true);
                mls.build(train);
                results = eval.evaluate(mls, test);
                System.out.println(results);
            }

        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.dkpro.similarity.algorithms.ml.ClassifierSimilarityMeasure.java

License:Open Source License

public static Classifier getClassifier(WekaClassifier classifier) throws IllegalArgumentException {
    try {//w w w . j ava2 s.c  o  m
        switch (classifier) {
        case NAIVE_BAYES:
            return new NaiveBayes();
        case J48:
            J48 j48 = new J48();
            j48.setOptions(new String[] { "-C", "0.25", "-M", "2" });
            return j48;
        case SMO:
            SMO smo = new SMO();
            smo.setOptions(Utils.splitOptions(
                    "-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
            return smo;
        case LOGISTIC:
            Logistic logistic = new Logistic();
            logistic.setOptions(Utils.splitOptions("-R 1.0E-8 -M -1"));
            return logistic;
        default:
            throw new IllegalArgumentException("Classifier " + classifier + " not found!");
        }
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }

}