Example usage for weka.classifiers.functions SMO SMO

List of usage examples for weka.classifiers.functions SMO SMO

Introduction

In this page you can find the example usage for weka.classifiers.functions SMO SMO.

Prototype

SMO

Source Link

Usage

From source file:cs.man.ac.uk.predict.Predictor.java

License:Open Source License

public static void makePredictionsEnsembleStream(String trainPath, String testPath, String resultPath) {
    System.out.println("Training set: " + trainPath);
    System.out.println("Test set: " + testPath);

    /**//from   w w w.  j a v a2s .co  m
     * The ensemble classifiers. This is a heterogeneous ensemble.
     */
    J48 learner1 = new J48();
    SMO learner2 = new SMO();
    NaiveBayes learner3 = new NaiveBayes();
    MultilayerPerceptron learner5 = new MultilayerPerceptron();

    System.out.println("Training Ensemble.");
    long startTime = System.nanoTime();
    try {
        BufferedReader reader = new BufferedReader(new FileReader(trainPath));
        Instances data = new Instances(reader);
        data.setClassIndex(data.numAttributes() - 1);
        System.out.println("Training data length: " + data.numInstances());

        learner1.buildClassifier(data);
        learner2.buildClassifier(data);
        learner3.buildClassifier(data);
        learner5.buildClassifier(data);

        long endTime = System.nanoTime();
        long nanoseconds = endTime - startTime;
        double seconds = (double) nanoseconds / 1000000000.0;
        System.out.println("Training Ensemble completed in " + nanoseconds + " (ns) or " + seconds + " (s).");
    } catch (IOException e) {
        System.out.println("Could not train Ensemble classifier IOException on training data file.");
    } catch (Exception e) {
        System.out.println("Could not train Ensemble classifier Exception building model.");
    }

    try {
        // A different ARFF loader used here (compared to above) as
        // the ARFF file may be extremely large. In which case the whole
        // file cannot be read in. Instead it is read in incrementally.
        ArffLoader loader = new ArffLoader();
        loader.setFile(new File(testPath));

        Instances data = loader.getStructure();
        data.setClassIndex(data.numAttributes() - 1);

        System.out.println("Ensemble Classifier is ready.");
        System.out.println("Testing on all instances avaialable.");

        startTime = System.nanoTime();

        int instanceNumber = 0;

        // label instances
        Instance current;

        while ((current = loader.getNextInstance(data)) != null) {
            instanceNumber += 1;

            double classification1 = learner1.classifyInstance(current);
            double classification2 = learner2.classifyInstance(current);
            double classification3 = learner3.classifyInstance(current);
            double classification5 = learner5.classifyInstance(current);

            // All classifiers must agree. This is a very primitive ensemble strategy!
            if (classification1 == 1 && classification2 == 1 && classification3 == 1 && classification5 == 1) {
                Writer.append(resultPath, instanceNumber + "\n");
            }
        }

        System.out.println("Test set instances: " + instanceNumber);

        long endTime = System.nanoTime();
        long duration = endTime - startTime;
        double seconds = (double) duration / 1000000000.0;

        System.out.println("Testing Ensemble completed in " + duration + " (ns) or " + seconds + " (s).");
    } catch (Exception e) {
        System.out.println("Could not test Ensemble classifier due to an error.");
    }
}

From source file:de.tudarmstadt.ukp.dkpro.spelling.experiments.hoo2012.featureextraction.AllFeaturesExtractor.java

License:Apache License

private Classifier getClassifier() throws Exception {
    Classifier cl = null;/*from  www . j a va 2 s  . c  om*/
    // Build and evaluate classifier
    // The options given correspond to the default settings in the WEKA GUI
    if (classifier.equals("smo")) {
        SMO smo = new SMO();
        smo.setOptions(Utils.splitOptions(
                "-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
        cl = smo;
    } else if (classifier.equals("j48")) {
        J48 j48 = new J48();
        j48.setOptions(new String[] { "-C", "0.25", "-M", "2" });
        cl = j48;
    } else if (classifier.equals("naivebayes")) {
        cl = new NaiveBayes();
    } else if (classifier.equals("randomforest")) {
        RandomForest rf = new RandomForest();
        rf.setOptions(Utils.splitOptions("-I 10 -K 0 -S 1"));
        cl = rf;
    }
    return cl;
}

From source file:development.CrossValidateShapelets.java

public static ArrayList<Classifier> setSingleClassifiers(ArrayList<String> names) {
    ArrayList<Classifier> sc = new ArrayList<>();
    kNN n = new kNN(50);
    n.setCrossValidate(true);/*from w  w  w.  j a v a 2s . co m*/
    sc.add(n);
    names.add("kNN");
    sc.add(new J48());
    names.add("C45");
    sc.add(new NaiveBayes());
    names.add("NB");
    BayesNet bn = new BayesNet();
    sc.add(bn);
    names.add("BayesNet");
    RandomForest rf = new RandomForest();
    rf.setNumTrees(200);
    sc.add(rf);
    names.add("RandForest");
    RotationForest rot = new RotationForest();
    rot.setNumIterations(30);
    sc.add(rf);
    names.add("RotForest");
    SMO svmL = new SMO();
    PolyKernel kernel = new PolyKernel();
    kernel.setExponent(1);
    svmL.setKernel(kernel);
    sc.add(svmL);
    names.add("SVML");
    kernel = new PolyKernel();
    kernel.setExponent(2);
    SMO svmQ = new SMO();
    svmQ.setKernel(kernel);
    sc.add(svmQ);
    names.add("SVMQ");
    return sc;
}

From source file:DiversifyQuery.DivTopK.java

public static void table4_5() throws Exception {

    // Initialise classifiers required for this experiment
    classifiers = new Classifier[8];
    classifiers[0] = new ShapeletTreeClassifier("infoTree.txt");
    classifiers[1] = new J48();
    classifiers[2] = new IB1();
    classifiers[3] = new NaiveBayes();
    classifiers[4] = new BayesNet();
    classifiers[5] = new RandomForest();
    classifiers[6] = new RotationForest();
    classifiers[7] = new SMO();

    // Set up names for the classifiers - only used for output
    classifierNames = new String[8];
    classifierNames[0] = "ShapeletTree";
    classifierNames[1] = "C4.5";
    classifierNames[2] = "1NN";
    classifierNames[3] = "Naive Bayes";
    classifierNames[4] = "Bayesian Network";
    classifierNames[5] = "Random Forest";
    classifierNames[6] = "Rotation Forest";
    classifierNames[7] = "SVM (linear)";

    //        if ((classifierToProcessIndex < 1 || classifierToProcessIndex > classifiers.length) && classifierToProcessIndex != -1) {
    //            throw new IOException("Invalid classifier identifier.");
    //        } else {
    //            if (classifierToProcessIndex != -1) {
    //                classifierToProcessIndex--;
    //            }
    //        }//  w  w w. j  ava  2  s.  c  o m

    // Compute classifier accuracies for each classifier
    double accuracies[] = new double[classifiers.length];

    for (int i = 1; i < classifiers.length; i++) {

        //if (i == classifierToProcessIndex || classifierToProcessIndex == -1) {
        accuracies[i] = classifierAccuracy(i, true, true);

    }

    // Write experiment output to file 
    writeFileContent(accuracies);
}

From source file:es.upm.dit.gsi.barmas.launcher.WekaClassifiersValidator.java

License:Open Source License

/**
 * @return a list of all WEKA classifiers
 *//*from  w w  w .  j  a va  2s  . c  om*/
public List<Classifier> getNewClassifiers() {
    Classifier classifier;
    List<Classifier> classifiers = new ArrayList<Classifier>();

    // NBTree
    classifier = new NBTree();
    classifiers.add(classifier);

    // PART
    classifier = new PART();
    classifiers.add(classifier);

    // J48
    classifier = new J48();
    ((J48) classifier).setUnpruned(true);
    classifiers.add(classifier);

    // // J48Graft
    // classifier = new J48graft();
    // ((J48graft) classifier).setUnpruned(true);
    // classifiers.add(classifier);

    // // OneR
    // classifier = new OneR();
    // classifiers.add(classifier);

    // LADTree
    classifier = new LADTree();
    classifiers.add(classifier);

    // // REPTree
    // classifier = new REPTree();
    // classifiers.add(classifier);

    // // SimpleLogistic
    // classifier = new SimpleLogistic();
    // classifiers.add(classifier);

    // // Logistic
    // classifier = new Logistic();
    // classifiers.add(classifier);

    // // MultiLayerPerceptron
    // classifier = new MultilayerPerceptron();
    // classifiers.add(classifier);

    // // DecisionStump
    // classifier = new DecisionStump();
    // classifiers.add(classifier);

    // // LMT
    // classifier = new LMT();
    // classifiers.add(classifier);

    // // SimpleCart
    // classifier = new SimpleCart();
    // classifiers.add(classifier);

    // // BFTree
    // classifier = new BFTree();
    // classifiers.add(classifier);

    // // RBFNetwork
    // classifier = new RBFNetwork();
    // classifiers.add(classifier);

    // // DTNB
    // classifier = new DTNB();
    // classifiers.add(classifier);

    // // Jrip
    // classifier = new JRip();
    // classifiers.add(classifier);

    // // Conjunction Rule
    // classifier = new ConjunctiveRule();
    // classifiers.add(classifier);

    // // ZeroR
    // classifier = new ZeroR();
    // classifiers.add(classifier);

    // SMO
    classifier = new SMO();
    classifiers.add(classifier);

    // // OneR
    // classifier = new OneR();
    // classifiers.add(classifier);

    // // RandomForest
    // classifier = new RandomForest();
    // classifiers.add(classifier);

    return classifiers;

}

From source file:etc.aloe.cscw2013.TrainingImpl.java

License:Open Source License

@Override
public WekaModel train(ExampleSet examples) {
    System.out.println("SMO Options: " + SMO_OPTIONS);
    SMO smo = new SMO();
    try {//from   w w  w.  j a v a 2  s  .  c  o  m
        smo.setOptions(Utils.splitOptions(SMO_OPTIONS));
    } catch (Exception ex) {
        System.err.println("Unable to configure SMO.");
        System.err.println("\t" + ex.getMessage());
        return null;
    }

    //Build logistic models if desired
    smo.setBuildLogisticModels(isBuildLogisticModel());

    Classifier classifier = smo;

    if (useCostTraining) {
        CostSensitiveClassifier cost = new CostSensitiveClassifier();
        cost.setClassifier(smo);
        CostMatrix matrix = new CostMatrix(2);
        matrix.setElement(0, 0, 0);
        matrix.setElement(0, 1, falsePositiveCost);
        matrix.setElement(1, 0, falseNegativeCost);
        matrix.setElement(1, 1, 0);
        cost.setCostMatrix(matrix);

        classifier = cost;

        System.out.print("Wrapping SMO in CostSensitiveClassifier " + matrix.toMatlab());

        if (useReweighting) {
            cost.setMinimizeExpectedCost(false);
            System.out.println(" using re-weighting.");
        } else {
            cost.setMinimizeExpectedCost(true);
            System.out.println(" using min-cost criterion.");
        }
    }

    try {
        System.out.print("Training SMO on " + examples.size() + " examples... ");
        classifier.buildClassifier(examples.getInstances());
        System.out.println("done.");

        WekaModel model = new WekaModel(classifier);
        return model;
    } catch (Exception ex) {
        System.err.println("Unable to train SMO.");
        System.err.println("\t" + ex.getMessage());
        return null;
    }
}

From source file:etc.aloe.oilspill2010.TrainingImpl.java

@Override
public WekaModel train(ExampleSet examples) {
    //These settings aren't terrible
    SMO smo = new SMO();
    RBFKernel rbf = new RBFKernel();
    rbf.setGamma(0.5);//from   w w  w.  j a v a 2s.c o  m
    smo.setKernel(rbf);
    smo.setC(1.5);

    //These also work pretty ok
    Logistic log = new Logistic();
    log.setRidge(100);

    Classifier classifier = log;

    try {
        System.out.print("Training on " + examples.size() + " examples... ");
        classifier.buildClassifier(examples.getInstances());
        System.out.println("done.");

        WekaModel model = new WekaModel(classifier);
        return model;
    } catch (Exception ex) {
        System.err.println("Unable to train classifier.");
        System.err.println("\t" + ex.getMessage());
        return null;
    }
}

From source file:farm_ads.MyClassifier.java

public Classifier classifierSMO(Instances instances) throws Exception {
    SMO classifier = new SMO();
    classifier.setOptions(weka.core.Utils.splitOptions(
            "-C 1.0 -L 0.0010 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
    classifier.buildClassifier(instances);
    return classifier;
}

From source file:focusedCrawler.target.ClassifyBuilder.java

License:Open Source License

public String buildClassifier(String trainFile, String outputModel) throws Exception {
    double max = Double.MIN_NORMAL;
    double cValue = 0;
    for (double c = 1; c > 0.1; c = c - 0.2) {
        SMO classifier = new SMO();
        String[] argum = new String[] { "-t", trainFile, "-C", "" + c, "-v", "-d", outputModel + c };
        String output = Evaluation.evaluateModel(classifier, argum);
        int index = output.indexOf("Correctly Classified Instances");
        if (index >= 0) {
            int end = output.indexOf("%", index);
            String line = (output.substring(index, end)).trim();
            line = line.substring(line.lastIndexOf(" "));
            double accuracy = Double.parseDouble(line.trim());
            if (accuracy > max) {
                max = accuracy;//from   w w  w. j a  v a  2s. co  m
                cValue = c;
            }
        }
    }
    System.out.println("C:" + cValue);
    return outputModel + cValue;
}

From source file:focusedCrawler.target.ClassifyBuilder.java

License:Open Source License

public void testClassifier(String testFile, String outputModel) throws Exception {
    SMO classifier = new SMO();
    String[] argum = new String[] { "-T", testFile, "-l", outputModel, "-i" };
    String output = Evaluation.evaluateModel(classifier, argum);
    int index = output.indexOf("F-Measure");
    if (index >= 0) {
        index = output.indexOf("\n", index);
        int end = output.indexOf("\n", index + 1);
        String line = (output.substring(index, end)).trim();
        System.out.println(line);
        StringTokenizer tokenizer = new StringTokenizer(line, " ");
        int count = 0;
        while (tokenizer.hasMoreTokens()) {
            String word = tokenizer.nextToken();
            if (count == 2) {
                System.out.println("PRECISION:" + word);
            }//  w  w  w .  jav  a 2 s  . com
            if (count == 3) {
                System.out.println("RECALL:" + word);
            }
            if (count == 4) {
                System.out.println("F-MEASURE:" + word);
            }
            count++;
        }
    }
}