Example usage for weka.classifiers.functions SMO SMO

List of usage examples for weka.classifiers.functions SMO SMO

Introduction

In this page you can find the example usage for weka.classifiers.functions SMO SMO.

Prototype

SMO

Source Link

Usage

From source file:org.conqat.engine.commons.machine_learning.BaseWekaClassifier.java

License:Apache License

/**
 * Returns a new classifier based on the given algorithm.
 *///from  ww w . j av a  2  s.  com
protected weka.classifiers.Classifier getClassifier(EClassificationAlgorithm algorithm) {
    switch (algorithm) {
    case DECISION_TREE_REP:
        return new REPTree();
    case SUPPORT_VECTOR_MACHINE_SMO:
        return new SMO();
    case COST_SENSITIVE_CLASSIFIER:
        return new CostSensitiveClassifier();
    case DECISION_TREE_J48:
        return new J48();
    default:
        throw new AssertionError("Cannot create a classifier without a specified algorithm.");
    }

}

From source file:org.dkpro.similarity.algorithms.ml.ClassifierSimilarityMeasure.java

License:Open Source License

public static Classifier getClassifier(WekaClassifier classifier) throws IllegalArgumentException {
    try {//from  w ww.  ja v a 2 s .com
        switch (classifier) {
        case NAIVE_BAYES:
            return new NaiveBayes();
        case J48:
            J48 j48 = new J48();
            j48.setOptions(new String[] { "-C", "0.25", "-M", "2" });
            return j48;
        case SMO:
            SMO smo = new SMO();
            smo.setOptions(Utils.splitOptions(
                    "-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
            return smo;
        case LOGISTIC:
            Logistic logistic = new Logistic();
            logistic.setOptions(Utils.splitOptions("-R 1.0E-8 -M -1"));
            return logistic;
        default:
            throw new IllegalArgumentException("Classifier " + classifier + " not found!");
        }
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }

}

From source file:org.opentox.qsar.processors.trainers.classification.SVCTrainer.java

License:Open Source License

public QSARModel train(Instances data) throws QSARException {

    // GET A UUID AND DEFINE THE TEMPORARY FILE WHERE THE TRAINING DATA
    // ARE STORED IN ARFF FORMAT PRIOR TO TRAINING.
    final String rand = java.util.UUID.randomUUID().toString();
    final String temporaryFilePath = ServerFolders.temp + "/" + rand + ".arff";
    final File tempFile = new File(temporaryFilePath);

    // SAVE THE DATA IN THE TEMPORARY FILE
    try {/*from  www  .  j a  v a  2  s. c o m*/
        ArffSaver dataSaver = new ArffSaver();
        dataSaver.setInstances(data);
        dataSaver.setDestination(new FileOutputStream(tempFile));
        dataSaver.writeBatch();
        if (!tempFile.exists()) {
            throw new IOException("Temporary File was not created");
        }
    } catch (final IOException ex) {/*
                                    * The content of the dataset cannot be
                                    * written to the destination file due to
                                    * some communication issue.
                                    */
        tempFile.delete();
        throw new RuntimeException(
                "Unexpected condition while trying to save the " + "dataset in a temporary ARFF file", ex);
    }

    // INITIALIZE THE CLASSIFIER
    SMO classifier = new SMO();
    classifier.setEpsilon(0.1);
    classifier.setToleranceParameter(tolerance);

    // CONSTRUCT A KERNEL ACCORDING TO THE POSTED PARAMETERS
    // SUPPORTED KERNELS ARE {rbf, linear, polynomial}
    Kernel svc_kernel = null;
    if (this.kernel.equalsIgnoreCase("rbf")) {
        RBFKernel rbf_kernel = new RBFKernel();
        rbf_kernel.setGamma(gamma);
        rbf_kernel.setCacheSize(cacheSize);
        svc_kernel = rbf_kernel;
    } else if (this.kernel.equalsIgnoreCase("polynomial")) {
        PolyKernel poly_kernel = new PolyKernel();
        poly_kernel.setExponent(degree);
        poly_kernel.setCacheSize(cacheSize);
        poly_kernel.setUseLowerOrder(true);
        svc_kernel = poly_kernel;
    } else if (this.kernel.equalsIgnoreCase("linear")) {
        PolyKernel linear_kernel = new PolyKernel();
        linear_kernel.setExponent((double) 1.0);
        linear_kernel.setCacheSize(cacheSize);
        linear_kernel.setUseLowerOrder(true);
        svc_kernel = linear_kernel;
    }
    classifier.setKernel(svc_kernel);

    String modelFilePath = ServerFolders.models_weka + "/" + uuid.toString();
    String[] generalOptions = { "-c", Integer.toString(data.classIndex() + 1), "-t", temporaryFilePath,
            /// Save the model in the following directory
            "-d", modelFilePath };

    // AFTER ALL, BUILD THE CLASSIFICATION MODEL AND SAVE IT AS A SERIALIZED
    // WEKA FILE IN THE CORRESPONDING DIRECTORY.
    try {
        Evaluation.evaluateModel(classifier, generalOptions);
    } catch (final Exception ex) {
        tempFile.delete();
        throw new QSARException(Cause.XQReg350, "Unexpected condition while trying to train "
                + "a support vector classification model. Possible explanation : {" + ex.getMessage() + "}",
                ex);
    }

    ArrayList<Feature> independentFeatures = new ArrayList<Feature>();
    for (int i = 0; i < data.numAttributes(); i++) {
        Feature f = new Feature(data.attribute(i).name());
        if (data.classIndex() != i) {
            independentFeatures.add(f);
        }
    }

    Feature dependentFeature = new Feature(data.classAttribute().name());
    Feature predictedFeature = dependentFeature;

    QSARModel model = new QSARModel();
    model.setCode(uuid.toString());
    model.setAlgorithm(YaqpAlgorithms.SVC);
    model.setPredictionFeature(predictedFeature);
    model.setDependentFeature(dependentFeature);
    model.setIndependentFeatures(independentFeatures);
    model.setDataset(datasetUri);
    model.setParams(getParameters());
    model.setModelStatus(ModelStatus.UNDER_DEVELOPMENT);

    tempFile.delete();
    return model;
}

From source file:org.uclab.mm.icl.llc.config.RecognizerType.java

License:Apache License

/**
 * Returns the corresponding recognizer//from w  w w .  j av  a2s  . co m
 * @param rec recognizer type to return
 * @param userID user ID to set
 * @return instance of the corresponding recognizer
 */
public LLCRecognizer getRecognizer(long userID) {

    RecognizerType rec = this.values()[value];
    switch (rec) {
    case SER:
        String[] labels = { "Anger", "Happiness", "Sadness" };
        String path = FileUtil.getRootPath() + "/training/modeldataV2.7.txt";
        SMO svm = new SMO(); // Define Classifier with Weka
        try {
            svm.setOptions(weka.core.Utils.splitOptions(
                    "-C 1.0 -L 0.0010 -P 1.0E-12 -N 1 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.RBFKernel -C 250007 -G 0.01\""));
            svm.setFilterType(new SelectedTag(SMO.FILTER_STANDARDIZE, SMO.TAGS_FILTER));
        } catch (Exception e) {
            e.printStackTrace();
        }
        ExtClassification classifier = new ExtClassification(path, 78 * 2, labels, svm);
        AudioEmotionRecognizer aer = new AudioEmotionRecognizer(classifier, path, userID);

        return aer;
    case ER:
        return new AudioEmotionRecognizerV(userID);
    case IAR:
        return new InertialActivityRecognizer(userID);
    case VAR:
        return new VideoActivityRecognizer(userID);
    case LR:
        //get user loc coord / label with userID by restful service
        return new GPSLocationRecognizer(userID);
    case FR:
        return new FoodRecognizer(userID);
    }
    return null;
}

From source file:qa.experiment.ProcessFeatureVector.java

public void evaluate(Instances trainingData) throws Exception {
    Classifier c1 = new SMO();
    Evaluation eval = new Evaluation(trainingData);
    eval.crossValidateModel(c1, trainingData, 10, new Random(1));
    System.out.println("Estimated Accuracy: " + Double.toString(eval.pctCorrect()));
}

From source file:sentinets.TrainModel.java

License:Open Source License

public void runExps() {
    Classifier c1 = new SMO();
    Classifier c2 = new J48();
    Classifier c3 = new NaiveBayes();
    trainModel(c1, "SVM");
    trainModel(c2, "J48");
    trainModel(c3, "Naive Bayes");

}

From source file:statistics.BinaryStatisticsEvaluator.java

@Override
public double[][] getConfusionMatrix(Instances Training_Instances, Instances Testing_Instances,
        String classifier) {//from www.  j a v a  2s.c  o m

    Classifier cModel = null;
    if ("NB".equals(classifier)) {
        cModel = (Classifier) new NaiveBayes();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("DT".equals(classifier)) {
        cModel = (Classifier) new J48();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("SVM".equals(classifier)) {
        cModel = (Classifier) new SMO();

        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    } else if ("KNN".equals(classifier)) {
        cModel = (Classifier) new IBk();
        try {
            cModel.buildClassifier(Training_Instances);
        } catch (Exception ex) {
            Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    //Test the model
    Evaluation eTest;
    try {
        eTest = new Evaluation(Training_Instances);
        eTest.evaluateModel(cModel, Testing_Instances);
        //Print the result
        String strSummary = eTest.toSummaryString();
        System.out.println(strSummary);
        String strSummary1 = eTest.toMatrixString();
        System.out.println(strSummary1);
        String strSummary2 = eTest.toClassDetailsString();
        System.out.println(strSummary2);

        //Get the confusion matrix
        double[][] cmMatrix = eTest.confusionMatrix();
        return cmMatrix;
    } catch (Exception ex) {
        Logger.getLogger(BinaryStatisticsEvaluator.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:tutorials.tools.TutorialWekaClassifier.java

License:Open Source License

public static void main(String[] args) throws Exception {
    /* Load data */
    Dataset data = FileHandler.loadDataset(new File("devtools/data/iris.data"), 4, ",");
    /* Create Weka classifier */
    SMO smo = new SMO();
    /* Wrap Weka classifier in bridge */
    Classifier javamlsmo = new WekaClassifier(smo);
    /* Initialize cross-validation */
    CrossValidation cv = new CrossValidation(javamlsmo);
    /* Perform cross-validation */
    Map<Object, PerformanceMeasure> pm = cv.crossValidation(data);
    /* Output results */
    System.out.println(pm);/*from   w  w w  .jav a2 s  .  c o  m*/
}

From source file:wedt.project.SvmClassifier.java

SvmClassifier() {
    cls = new SMO();
    try {//from   www. j a v  a  2  s . c o  m
        ((SMO) cls).setOptions(weka.core.Utils.splitOptions("-M"));
        ((SMO) cls).setBuildLogisticModels(true);
    } catch (Exception e) {
    }
}

From source file:wekimini.learning.SVMModelBuilder.java

public SVMModelBuilder() {
    classifier = new SMO();
    kernelType = KernelType.POLYNOMIAL;/*w  w  w. j av a 2s.c om*/
    PolyKernel k = new PolyKernel();
    k.setExponent(polyExponent);
    ((SMO) classifier).setKernel(k);
}