Example usage for weka.classifiers.functions SMO SMO

List of usage examples for weka.classifiers.functions SMO SMO

Introduction

In this page you can find the example usage for weka.classifiers.functions SMO SMO.

Prototype

SMO

Source Link

Usage

From source file:KFST.featureSelection.embedded.SVMBasedMethods.SVMBasedMethods.java

License:Open Source License

/**
 * generates binary classifiers (SVM) using input data and based on selected
 * feature subset, and finally returns the weights of features.
 * One-Versus-One strategy is used to construct classifiers in multiclass
 * classification./*from  ww w .j  a  va  2  s.c  o m*/
 *
 * @param selectedFeature an array of indices of the selected feature subset
 *
 * @return an array of the weights of features
 */
protected double[][][] buildSVM_OneAgainstOne(int[] selectedFeature) {
    String nameDataCSV = TEMP_PATH + "dataCSV.csv";
    String nameDataARFF = TEMP_PATH + "dataARFF.arff";
    double[][][] weights = new double[numClass][numClass][selectedFeature.length];

    FileFunc.createCSVFile(trainSet, selectedFeature, nameDataCSV, nameFeatures, classLabel);
    FileFunc.convertCSVtoARFF(nameDataCSV, nameDataARFF, TEMP_PATH, selectedFeature.length, numFeatures,
            nameFeatures, numClass, classLabel);

    try {
        BufferedReader readerTrain = new BufferedReader(new FileReader(nameDataARFF));
        Instances dataTrain = new Instances(readerTrain);
        readerTrain.close();
        dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

        SMO svm = new SMO();
        svm.setC(parameterC);
        svm.setKernel(WekaSVMKernel.parse(kernelType));
        svm.buildClassifier(dataTrain);

        for (int i = 0; i < numClass; i++) {
            for (int j = i + 1; j < numClass; j++) {
                double[] weightsSparse = svm.sparseWeights()[i][j];
                int[] indicesSparse = svm.sparseIndices()[i][j];
                for (int k = 0; k < weightsSparse.length; k++) {
                    weights[i][j][indicesSparse[k]] = weightsSparse[k];
                }
            }
        }
    } catch (Exception ex) {
        Logger.getLogger(SVMBasedMethods.class.getName()).log(Level.SEVERE, null, ex);
    }

    return weights;
}

From source file:KFST.featureSelection.embedded.SVMBasedMethods.SVMBasedMethods.java

License:Open Source License

/**
 * generates binary classifiers (SVM) using input data and based on selected
 * feature subset, and finally returns the weights of features.
 * One-Versus-All strategy is used to construct classifiers in multiclass
 * classification./*from   w w  w .  j av a  2  s .  co m*/
 *
 * @param selectedFeature an array of indices of the selected feature subset
 *
 * @return an array of the weights of features
 */
protected double[][] buildSVM_OneAgainstRest(int[] selectedFeature) {
    double[][] weights = new double[numClass][selectedFeature.length];
    String[] tempClassLabel = new String[] { "c1", "c2" };

    for (int indexClass = 0; indexClass < numClass; indexClass++) {
        double[][] copyTrainSet = ArraysFunc.copyDoubleArray2D(trainSet);
        String nameDataCSV = TEMP_PATH + "dataCSV" + indexClass + ".csv";
        String nameDataARFF = TEMP_PATH + "dataARFF" + indexClass + ".arff";

        for (double[] dataRow : copyTrainSet) {
            if (dataRow[numFeatures] == classLabelInTrainSet[indexClass]) {
                dataRow[numFeatures] = 0;
            } else {
                dataRow[numFeatures] = 1;
            }
        }

        FileFunc.createCSVFile(copyTrainSet, selectedFeature, nameDataCSV, nameFeatures, tempClassLabel);
        FileFunc.convertCSVtoARFF(nameDataCSV, nameDataARFF, TEMP_PATH, selectedFeature.length, numFeatures,
                nameFeatures, tempClassLabel.length, tempClassLabel);

        try {
            BufferedReader readerTrain = new BufferedReader(new FileReader(nameDataARFF));
            Instances dataTrain = new Instances(readerTrain);
            readerTrain.close();
            dataTrain.setClassIndex(dataTrain.numAttributes() - 1);

            SMO svm = new SMO();
            svm.setC(parameterC);
            svm.setKernel(WekaSVMKernel.parse(kernelType));
            svm.buildClassifier(dataTrain);

            double[] weightsSparse = svm.sparseWeights()[0][1];
            int[] indicesSparse = svm.sparseIndices()[0][1];
            for (int k = 0; k < weightsSparse.length; k++) {
                weights[indexClass][indicesSparse[k]] = weightsSparse[k];
            }
        } catch (Exception ex) {
            Logger.getLogger(SVMBasedMethods.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    return weights;
}

From source file:machinelearningproject.MachineLearningProject.java

/**
 * @param args the command line arguments
 *///from   ww w  .java  2s  . com
public static void main(String[] args) throws Exception {
    // TODO code application logic here
    DataSource source = new DataSource("D:\\spambase.arff");
    //        DataSource source = new DataSource("D:\\weather-nominal.arff");
    Instances instances = source.getDataSet();
    int numAttr = instances.numAttributes();
    instances.setClassIndex(instances.numAttributes() - 1);

    int runs = 5;
    int seed = 15;
    for (int i = 0; i < runs; i++) {
        //randomize data
        seed = seed + 1; // the seed for randomizing the data
        Random rand = new Random(seed); // create seeded number generator
        Instances randData = new Instances(instances); // create copy of original data
        Collections.shuffle(randData);

        Evaluation evalDTree = new Evaluation(randData);
        Evaluation evalRF = new Evaluation(randData);
        Evaluation evalSVM = new Evaluation(randData);

        int folds = 10;
        for (int n = 0; n < folds; n++) {
            Instances train = randData.trainCV(folds, n, rand);
            Instances test = randData.testCV(folds, n);
            //instantiate classifiers
            DecisionTree dtree = new DecisionTree();
            RandomForest rf = new RandomForest(100);
            SMO svm = new SMO();
            RBFKernel rbfKernel = new RBFKernel();
            double gamma = 0.70;
            rbfKernel.setGamma(gamma);

            dtree.buildClassifier(train);
            rf.buildClassifier(train);
            svm.buildClassifier(train);

            evalDTree.evaluateModel(dtree, test);
            evalRF.evaluateModel(rf, test);
            evalSVM.evaluateModel(svm, test);
        }
        System.out.println("=== Decision Tree Evaluation ===");
        System.out.println(evalDTree.toSummaryString());
        System.out.println(evalDTree.toClassDetailsString());
        System.out.println(evalDTree.toMatrixString());

        System.out.println("=== Random Forest Evaluation ===");
        System.out.println(evalRF.toSummaryString());
        System.out.println(evalRF.toClassDetailsString());
        System.out.println(evalRF.toMatrixString());

        System.out.println("=== SVM Evaluation ===");
        System.out.println(evalSVM.toSummaryString());
        System.out.println(evalSVM.toClassDetailsString());
        System.out.println(evalSVM.toMatrixString());
    }
}

From source file:machinelearning_cw.MachineLearning_CW.java

/**
 * @param args the command line arguments
 *///from  w  w w  . jav  a 2 s  . c  om
public static void main(String[] args) throws Exception {
    // TODO code application logic here

    /* Initializing test datasets */
    ArrayList<Instances> trainData = new ArrayList<Instances>();
    ArrayList<Instances> testData = new ArrayList<Instances>();

    Instances train = WekaLoader.loadData("PitcherTrain.arff");
    Instances test = WekaLoader.loadData("PitcherTest.arff");
    trainData.add(train);
    testData.add(test);

    Instances bananaTrain = WekaLoader.loadData("banana-train.arff");
    Instances bananaTest = WekaLoader.loadData("banana-test.arff");
    trainData.add(bananaTrain);
    testData.add(bananaTest);

    Instances cloudTrain = WekaLoader.loadData("clouds-train.arff");
    Instances cloudTest = WekaLoader.loadData("clouds-test.arff");
    trainData.add(cloudTrain);
    testData.add(cloudTest);

    Instances concentricTrain = WekaLoader.loadData("concentric-train.arff");
    Instances concentricTest = WekaLoader.loadData("concentric-test.arff");
    trainData.add(concentricTrain);
    testData.add(concentricTest);

    // 3 dimensional data set
    Instances habermanTrain = WekaLoader.loadData("haberman-train.arff");
    Instances habermanTest = WekaLoader.loadData("haberman-test.arff");
    trainData.add(habermanTrain);
    testData.add(habermanTest);

    // >3 dimensional data sets
    Instances thyroidTrain = WekaLoader.loadData("thyroid-train.arff");
    Instances thyroidTest = WekaLoader.loadData("thyroid-test.arff");
    trainData.add(thyroidTrain);
    testData.add(thyroidTest);

    Instances heartTrain = WekaLoader.loadData("heart-train.arff");
    Instances heartTest = WekaLoader.loadData("heart-test.arff");
    trainData.add(heartTrain);
    testData.add(heartTest);

    Instances liverTrain = WekaLoader.loadData("liver-train.arff");
    Instances liverTest = WekaLoader.loadData("liver-test.arff");
    trainData.add(liverTrain);
    testData.add(liverTest);

    Instances pendigitisTrain = WekaLoader.loadData("pendigitis-train.arff");
    Instances pendigitisTest = WekaLoader.loadData("pendigitis-test.arff");
    trainData.add(pendigitisTrain);
    testData.add(pendigitisTest);

    Instances phonemeTrain = WekaLoader.loadData("phoneme-train.arff");
    Instances phonemeTest = WekaLoader.loadData("phoneme-test.arff");
    trainData.add(phonemeTrain);
    testData.add(phonemeTest);

    Instances yeastTrain = WekaLoader.loadData("yeast-train.arff");
    Instances yeastTest = WekaLoader.loadData("yeast-test.arff");
    trainData.add(yeastTrain);
    testData.add(yeastTest);

    /* Test to see that BasicKNN provides the same results obtained from
     * the hand exercise.
     */
    System.out.println(
            "Test to see that BasicKNN provides the same" + " results obtained from the hand exercise:");
    System.out.println("(Ties are settled randomly)");
    BasicKNN basicKNN = new BasicKNN();
    basicKNN.buildClassifier(train);
    for (int i = 0; i < test.size(); i++) {
        Instance inst = test.get(i);
        System.out.println(i + 1 + ": " + basicKNN.classifyInstance(inst));
    }

    /* Initializing alternative classifiers */
    IBk wekaKNN = new IBk();
    NaiveBayes naiveBayes = new NaiveBayes();
    J48 decisionTree = new J48();
    SMO svm = new SMO();

    /* Tests for experiments 1,2 & 3 */
    KNN myKNN = new KNN();
    myKNN.setUseStandardisedAttributes(true);
    myKNN.setAutoDetermineK(false);
    myKNN.setUseWeightedVoting(true);
    myKNN.buildClassifier(train);
    //myKNN.setUseAcceleratedNNSearch(true);
    System.out.println("\nAccuracy Experiments:");
    MachineLearning_CW.performClassifierAccuracyTests(myKNN, trainData, testData, 1);

    /* Timing tests */
    System.out.println("\n\nTiming Experiments:");
    MachineLearning_CW.performClassifierTimingTests(wekaKNN, trainData, testData);
}

From source file:meka.classifiers.multilabel.cc.CNode.java

License:Open Source License

/**
 * Main - run some tests.// www  .  ja va2 s .c  o m
 */
public static void main(String args[]) throws Exception {
    Instances D = new Instances(new FileReader(args[0]));
    Instance x = D.lastInstance();
    D.remove(D.numInstances() - 1);
    int L = Integer.parseInt(args[1]);
    D.setClassIndex(L);
    double y[] = new double[L];
    Random r = new Random();
    int s[] = new int[] { 1, 0, 2 };
    int PA_J[][] = new int[][] { {}, {}, { 0, 1 }, };

    //MLUtils.randomize(s,r);
    // MUST GO IN TREE ORDER !!
    for (int j : s) {
        int pa_j[] = PA_J[j];
        System.out.println("PARENTS = " + Arrays.toString(pa_j));
        //MLUtils.randomize(pa_j,r);
        System.out.println("**** TRAINING ***");
        CNode n = new CNode(j, null, pa_j);
        n.build(D, new SMO());
        /*
         */
        //Instances D_ = n.transform(D);
        //n.T = D_;
        System.out.println("============== D_" + j + " / class = " + n.T.classIndex() + " =");
        System.out.println("" + n.T);
        System.out.println("**** TESTING ****");
        /*
        Instance x_ = MLUtils.setTemplate(x,(Instance)D_.firstInstance().copy(),D_);
        for(int pa : pa_j) {
           //System.out.println(""+map[pa]);
           x_.setValue(n.map[pa],y[pa]);
        }
        //x_.setDataset(T);
        x_.setClassMissing();
         */
        //n.T = D_;
        Instance x_ = n.transform(x, y);
        System.out.println("" + x_);
        y[j] = 1;
    }
}

From source file:meka.core.StatUtils.java

License:Open Source License

/**
 * Main - do some tests./*from w ww  . ja  va2  s.com*/
 */
public static void main(String args[]) throws Exception {
    Instances D = Evaluation.loadDataset(args);
    MLUtils.prepareData(D);
    int L = D.classIndex();

    double CD[][] = null;

    if (args[2].equals("L")) {
        String I = "I";
        if (args.length >= 3)
            I = args[3];
        CD = StatUtils.LEAD(D, new SMO(), new Random(), I);
    } else {
        CD = StatUtils.margDepMatrix(D, args[2]);
    }
    System.out.println(MatrixUtils.toString(CD, "M" + args[2]));
}

From source file:mulan.examples.CrossValidationExperiment.java

License:Open Source License

/**
 * Executes this example//from   w  w w.  j a  va  2s .c  o  m
 *
 * @param args command-line arguments -arff and -xml
 */
public static void main(String[] args) {

    try {

        String path = "";
        String arffFilename = "SinaEmotion20140117.arff";
        String xmlFilename = "Emotion.xml";
        int numLabels = 8;
        int modelnum = 8;
        String filepath = "";//E:/result/
        String filename = "BR.txt";

        System.out.println(
                "...Loading the dataset 20140331... this is EPS B 5 2 0.5 LabeledData, new SMO() Sina");
        MultiLabelInstances dataset = new MultiLabelInstances(path + arffFilename, path + xmlFilename);
        long start = System.nanoTime();
        EnsembleOfPrunedSets learner1 = new EnsembleOfPrunedSets(63, 16, 0.5, 5, PrunedSets.Strategy.B, 2,
                new SMO()); //EPS m=16 strategy B p=5 b=2 
        // RAkEL learner1 = new RAkEL(new LabelPowerset(new SMO()),16,3);    //RAkEL m=16
        // EnsembleOfClassifierChains learner1= new EnsembleOfClassifierChains(new SMO(),10,true,true);  //ECC m=10
        //  MLkNN learner1 = new MLkNN(10,1.0);   //MLKNN
        // BinaryRelevance learner1= new BinaryRelevance(new SMO());   //BR
        //  HOMER learner1 = new HOMER(new LabelPowerset(new SMO()),4,Method.BalancedClustering);
        //  IBLR_ML learner1 = new IBLR_ML(10);
        //  BPMLL learner1 = new BPMLL();
        Evaluator eval = new Evaluator();
        MultipleEvaluation results;

        int numFolds = 10;
        results = eval.crossValidate(learner1, dataset, numFolds);
        long t1 = System.nanoTime();
        System.out.println("add_cost:" + ((double) (t1 - start) / (1000 * 1000 * 1000)) + "s");
        System.out.println(results);
        BufferedWriter writer = new BufferedWriter(new FileWriter(new File(filepath + filename)));
        writer.write(results.toString());
        writer.close();
        System.out.println("=====");
        System.gc();
    } catch (InvalidDataFormatException ex) {
        Logger.getLogger(CrossValidationExperiment.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(CrossValidationExperiment.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:mulan.examples.TrainTestExperiment.java

License:Open Source License

public static void main(String[] args) {
    String[] methodsToCompare = { "HOMER", "BR", "CLR", "MLkNN", "MC-Copy", "IncludeLabels", "MC-Ignore",
            "RAkEL", "LP", "MLStacking" };

    try {/*from  w  w w .j  av  a2s . com*/
        String path = Utils.getOption("path", args); // e.g. -path dataset/
        String filestem = Utils.getOption("filestem", args); // e.g. -filestem emotions
        String percentage = Utils.getOption("percentage", args); // e.g. -percentage 50 (for 50%)
        System.out.println("Loading the dataset");
        MultiLabelInstances mlDataSet = new MultiLabelInstances(path + filestem + ".arff",
                path + filestem + ".xml");

        //split the data set into train and test
        Instances dataSet = mlDataSet.getDataSet();
        //dataSet.randomize(new Random(1));
        RemovePercentage rmvp = new RemovePercentage();
        rmvp.setInvertSelection(true);
        rmvp.setPercentage(Double.parseDouble(percentage));
        rmvp.setInputFormat(dataSet);
        Instances trainDataSet = Filter.useFilter(dataSet, rmvp);

        rmvp = new RemovePercentage();
        rmvp.setPercentage(Double.parseDouble(percentage));
        rmvp.setInputFormat(dataSet);
        Instances testDataSet = Filter.useFilter(dataSet, rmvp);

        MultiLabelInstances train = new MultiLabelInstances(trainDataSet, path + filestem + ".xml");
        MultiLabelInstances test = new MultiLabelInstances(testDataSet, path + filestem + ".xml");

        Evaluator eval = new Evaluator();
        Evaluation results;

        for (int i = 0; i < methodsToCompare.length; i++) {

            if (methodsToCompare[i].equals("BR")) {
                System.out.println(methodsToCompare[i]);
                Classifier brClassifier = new NaiveBayes();
                BinaryRelevance br = new BinaryRelevance(brClassifier);
                br.setDebug(true);
                br.build(train);
                results = eval.evaluate(br, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("LP")) {
                System.out.println(methodsToCompare[i]);
                Classifier lpBaseClassifier = new J48();
                LabelPowerset lp = new LabelPowerset(lpBaseClassifier);
                lp.setDebug(true);
                lp.build(train);
                results = eval.evaluate(lp, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("CLR")) {
                System.out.println(methodsToCompare[i]);
                Classifier clrClassifier = new J48();
                CalibratedLabelRanking clr = new CalibratedLabelRanking(clrClassifier);
                clr.setDebug(true);
                clr.build(train);
                results = eval.evaluate(clr, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("RAkEL")) {
                System.out.println(methodsToCompare[i]);
                MultiLabelLearner lp = new LabelPowerset(new J48());
                RAkEL rakel = new RAkEL(lp);
                rakel.setDebug(true);
                rakel.build(train);
                results = eval.evaluate(rakel, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MC-Copy")) {
                System.out.println(methodsToCompare[i]);
                Classifier mclClassifier = new J48();
                MultiClassTransformation mcTrans = new Copy();
                MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans);
                mcl.setDebug(true);
                mcl.build(train);
                results = eval.evaluate(mcl, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MC-Ignore")) {
                System.out.println(methodsToCompare[i]);
                Classifier mclClassifier = new J48();
                MultiClassTransformation mcTrans = new Ignore();
                MultiClassLearner mcl = new MultiClassLearner(mclClassifier, mcTrans);
                mcl.build(train);
                results = eval.evaluate(mcl, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("IncludeLabels")) {
                System.out.println(methodsToCompare[i]);
                Classifier ilClassifier = new J48();
                IncludeLabelsClassifier il = new IncludeLabelsClassifier(ilClassifier);
                il.setDebug(true);
                il.build(train);
                results = eval.evaluate(il, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("MLkNN")) {
                System.out.println(methodsToCompare[i]);
                int numOfNeighbors = 10;
                double smooth = 1.0;
                MLkNN mlknn = new MLkNN(numOfNeighbors, smooth);
                mlknn.setDebug(true);
                mlknn.build(train);
                results = eval.evaluate(mlknn, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("HMC")) {
                System.out.println(methodsToCompare[i]);
                Classifier baseClassifier = new J48();
                LabelPowerset lp = new LabelPowerset(baseClassifier);
                RAkEL rakel = new RAkEL(lp);
                HMC hmc = new HMC(rakel);
                hmc.build(train);
                results = eval.evaluate(hmc, test);
                System.out.println(results);
            }

            if (methodsToCompare[i].equals("HOMER")) {
                System.out.println(methodsToCompare[i]);
                Classifier baseClassifier = new SMO();
                CalibratedLabelRanking learner = new CalibratedLabelRanking(baseClassifier);
                learner.setDebug(true);
                HOMER homer = new HOMER(learner, 3, HierarchyBuilder.Method.Random);
                homer.setDebug(true);
                homer.build(train);
                results = eval.evaluate(homer, test);
                System.out.println(results);
            }
            if (methodsToCompare[i].equals("MLStacking")) {
                System.out.println(methodsToCompare[i]);
                int numOfNeighbors = 10;
                Classifier baseClassifier = new IBk(numOfNeighbors);
                Classifier metaClassifier = new Logistic();
                MultiLabelStacking mls = new MultiLabelStacking(baseClassifier, metaClassifier);
                mls.setMetaPercentage(1.0);
                mls.setDebug(true);
                mls.build(train);
                results = eval.evaluate(mls, test);
                System.out.println(results);
            }

        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:mulan.experiments.ICDM08EnsembleOfPrunedSets.java

License:Open Source License

/**
 * Main class//w ww .  j  a  v a 2s . c  o m
 *
 * @param args command line arguments
 */
public static void main(String[] args) {

    try {

        String arffFilename = "SinaEmotion20140117.arff";
        String xmlFilename = "Emotion.xml";

        FileReader frData = new FileReader(arffFilename);
        Instances data = new Instances(frData);
        System.out.println("===Loading the data set 20140315===");
        MultiLabelInstances dataSet = new MultiLabelInstances(arffFilename, xmlFilename);

        // String path = Utils.getOption("path", args);
        //String filestem = Utils.getOption("filestem", args);

        // System.out.println("Loading the data set");
        // MultiLabelInstances dataSet = new MultiLabelInstances(path + filestem + ".arff", path + filestem + ".xml");

        Evaluator evaluator;

        Measure[] evaluationMeasures = new Measure[5];
        evaluationMeasures[0] = new ExampleBasedAccuracy(false);
        evaluationMeasures[1] = new HammingLoss();
        evaluationMeasures[2] = new SubsetAccuracy();
        evaluationMeasures[3] = new OneError();
        evaluationMeasures[4] = new AveragePrecision();

        HashMap<String, MultipleEvaluation> result = new HashMap<String, MultipleEvaluation>();
        for (Measure m : evaluationMeasures) {
            MultipleEvaluation me = new MultipleEvaluation();
            result.put(m.getName(), me);
        }

        Random random = new Random(1);

        for (int repetition = 0; repetition < 5; repetition++) {
            // perform 2-fold CV and add each to the current results
            dataSet.getDataSet().randomize(random);
            for (int fold = 0; fold < 2; fold++) {
                System.out.println("Experiment " + (repetition * 2 + fold + 1));
                Instances train = dataSet.getDataSet().trainCV(2, fold);
                MultiLabelInstances multiTrain = new MultiLabelInstances(train, dataSet.getLabelsMetaData());
                Instances test = dataSet.getDataSet().testCV(2, fold);
                MultiLabelInstances multiTest = new MultiLabelInstances(test, dataSet.getLabelsMetaData());

                HashMap<String, Integer> bestP = new HashMap<String, Integer>();
                HashMap<String, Integer> bestB = new HashMap<String, Integer>();
                HashMap<String, PrunedSets.Strategy> bestStrategy = new HashMap<String, PrunedSets.Strategy>();
                HashMap<String, Double> bestDiff = new HashMap<String, Double>();
                for (Measure m : evaluationMeasures) {
                    bestDiff.put(m.getName(), Double.MAX_VALUE);
                }

                System.out.println("Searching parameters");
                for (int p = 5; p > 1; p--) {
                    for (int b = 1; b < 4; b++) {
                        MultipleEvaluation innerResult = null;
                        LinkedList<Measure> measures;
                        PrunedSets ps;
                        double diff;

                        evaluator = new Evaluator();
                        ps = new PrunedSets(new SMO(), p, PrunedSets.Strategy.A, b);
                        measures = new LinkedList<Measure>();
                        for (Measure m : evaluationMeasures) {
                            measures.add(m.makeCopy());
                        }
                        System.out.print("p=" + p + " b=" + b + " strategy=A ");
                        innerResult = evaluator.crossValidate(ps, multiTrain, measures, 5);
                        for (Measure m : evaluationMeasures) {
                            System.out.print(m.getName() + ": " + innerResult.getMean(m.getName()) + " ");
                            diff = Math.abs(m.getIdealValue() - innerResult.getMean(m.getName()));
                            if (diff <= bestDiff.get(m.getName())) {
                                bestDiff.put(m.getName(), diff);
                                bestP.put(m.getName(), p);
                                bestB.put(m.getName(), b);
                                bestStrategy.put(m.getName(), PrunedSets.Strategy.A);
                            }
                        }
                        System.out.println();

                        evaluator = new Evaluator();
                        ps = new PrunedSets(new SMO(), p, PrunedSets.Strategy.B, b);
                        measures = new LinkedList<Measure>();
                        for (Measure m : evaluationMeasures) {
                            measures.add(m.makeCopy());
                        }
                        System.out.print("p=" + p + " b=" + b + " strategy=B ");
                        innerResult = evaluator.crossValidate(ps, multiTrain, measures, 5);
                        for (Measure m : evaluationMeasures) {
                            System.out.print(m.getName() + ": " + innerResult.getMean(m.getName()) + " ");
                            diff = Math.abs(m.getIdealValue() - innerResult.getMean(m.getName()));
                            if (diff <= bestDiff.get(m.getName())) {
                                bestDiff.put(m.getName(), diff);
                                bestP.put(m.getName(), p);
                                bestB.put(m.getName(), b);
                                bestStrategy.put(m.getName(), PrunedSets.Strategy.B);
                            }
                        }
                        System.out.println();
                    }
                }

                for (Measure m : evaluationMeasures) {
                    System.out.println(m.getName());
                    System.out.println("Best p: " + bestP.get(m.getName()));
                    System.out.println("Best strategy: " + bestStrategy.get(m.getName()));
                    System.out.println("Best b: " + bestB.get(m.getName()));
                    EnsembleOfPrunedSets eps = new EnsembleOfPrunedSets(63, 10, 0.5, bestP.get(m.getName()),
                            bestStrategy.get(m.getName()), bestB.get(m.getName()), new SMO());
                    OneThreshold ot = new OneThreshold(eps, (BipartitionMeasureBase) m.makeCopy(), 5);
                    ot.build(multiTrain);
                    System.out.println("Best threshold: " + ot.getThreshold());
                    evaluator = new Evaluator();
                    Evaluation e = evaluator.evaluate(ot, multiTest);
                    System.out.println(e.toCSV());
                    result.get(m.getName()).addEvaluation(e);
                }
            }
        }
        for (Measure m : evaluationMeasures) {
            System.out.println(m.getName());
            result.get(m.getName()).calculateStatistics();
            System.out.println(result.get(m.getName()));
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:newsclassifier.NewsClassifier.java

public static void main(String[] args) {
    try {//w  ww. j  a v a2 s  .c  o  m
        NewsClassifier nc = new NewsClassifier();
        //nc.readData();
        nc.LoadDB();
        nc.StrToWV("s.txt");
        //System.out.println(nc.data.toString());
        nc.StrtoNom();
        //nc.ClassAssigner();
        //System.out.println(nc.data.toString());
        //RandomForest cls = new RandomForest();
        SMO cls = new SMO();
        //NaiveBayesMultinomial cls = new(NaiveBayesMultinomial);
        nc.CrossValidation(cls, 10);
    } catch (Exception e) {
        e.printStackTrace();
    }
}