Example usage for weka.classifiers.meta FilteredClassifier classifyInstance

List of usage examples for weka.classifiers.meta FilteredClassifier classifyInstance

Introduction

In this page you can find the example usage for weka.classifiers.meta FilteredClassifier classifyInstance.

Prototype

public double classifyInstance(Instance instance) throws Exception;

Source Link

Document

Classifies the given test instance.

Usage

From source file:com.ivanrf.smsspam.SpamClassifier.java

License:Apache License

public static String classify(String model, String text, JTextArea log) {
    FilteredClassifier classifier = loadModel(model, log);

    //Create the instance
    ArrayList<String> fvNominalVal = new ArrayList<String>();
    fvNominalVal.add("ham");
    fvNominalVal.add("spam");

    Attribute attribute1 = new Attribute("spam_class", fvNominalVal);
    Attribute attribute2 = new Attribute("text", (List<String>) null);
    ArrayList<Attribute> fvWekaAttributes = new ArrayList<Attribute>();
    fvWekaAttributes.add(attribute1);//from  w  w  w  .  jav a2 s. com
    fvWekaAttributes.add(attribute2);

    Instances instances = new Instances("Test relation", fvWekaAttributes, 1);
    instances.setClassIndex(0);

    DenseInstance instance = new DenseInstance(2);
    instance.setValue(attribute2, text);
    instances.add(instance);

    publishEstado("=== Instance created ===", log);
    publishEstado(instances.toString(), log);

    //Classify the instance
    try {
        publishEstado("=== Classifying instance ===", log);

        double pred = classifier.classifyInstance(instances.instance(0));

        publishEstado("=== Instance classified  ===", log);

        String classPredicted = instances.classAttribute().value((int) pred);
        publishEstado("Class predicted: " + classPredicted, log);

        return classPredicted;
    } catch (Exception e) {
        publishEstado("Error found when classifying the text", log);
        return null;
    }
}

From source file:mlpoc.MLPOC.java

/**
 * @param args the command line arguments
 *///from  w  w w . ja  v a 2 s. com
public static void main(String[] args) {
    try {
        // TODO code application logic here
        BufferedReader br;
        br = new BufferedReader(
                new FileReader("D:/Extra/B.E Project/agrodeploy/webapp/Data/ClusterAutotrain12.arff"));
        Instances training_data = new Instances(br);
        br.close();
        training_data.setClassIndex(training_data.numAttributes() - 1);
        br = new BufferedReader(new FileReader("D:/Extra/B.E Project/agrodeploy/webapp/Data/TestFinal.arff"));
        Instances testing_data = new Instances(br);
        br.close();
        testing_data.setClassIndex(testing_data.numAttributes() - 1);
        String summary = training_data.toSummaryString();
        int number_samples = training_data.numInstances();
        int number_attributes_per_sample = training_data.numAttributes();
        System.out.println("Number of attributes in model = " + number_attributes_per_sample);
        System.out.println("Number of samples = " + number_samples);
        System.out.println("Summary: " + summary);
        System.out.println();

        J48 j48 = new J48();
        FilteredClassifier fc = new FilteredClassifier();
        fc.setClassifier(j48);
        fc.buildClassifier(training_data);
        System.out.println("Testing instances: " + testing_data.numInstances());
        for (int i = 0; i < testing_data.numInstances(); i++) {
            double pred = fc.classifyInstance(testing_data.instance(i));
            String s1 = testing_data.classAttribute().value((int) pred);
            System.out.println(testing_data.instance(i) + " Predicted value: " + s1);
        }
        Evaluation crossValidate = crossValidate(
                "D:/Extra/B.E Project/agrodeploy/webapp/Data/ClusterAutotrain12.arff");

        DataSource source = new DataSource(
                "D:/Extra/B.E Project/agrodeploy/webapp/Data/ClusterAutotrain12.arff");
        Instances data = source.getDataSet();
        System.out.println(data.numInstances());
        data.setClassIndex(data.numAttributes() - 1);

        // 1. meta-classifier
        useClassifier(data);

        // 2. filter
        useFilter(data);
    } catch (Exception ex) {
        Logger.getLogger(MLPOC.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:org.ml.classifier.TextDirectoryToArff.java

License:Open Source License

public static void main(String[] args) {

    //      if (args.length == 2) {
    TextDirectoryToArff tdta = new TextDirectoryToArff();
    try {//from w w w  . j  a  va 2s  .com
        //            Instances trainData = tdta.createDataset(TRAINING_FILES);
        //            LOGGER.debug(trainData.toString());

        Instances testData = tdta.createDataset(TESTING_FILES);
        //            LOGGER.debug(testData.toString());
        //            System.out.println(testData);

        //            System.exit(0);

        // apply the StringToWordVector in a batch mode
        // (see the source code of setOptions(String[]) method of the filter
        // if you want to know which command-line option corresponds to which
        // bean property)
        //            StringToWordVector strToWordFilter = new StringToWordVector();
        //            strToWordFilter.setInputFormat(trainData);
        //            strToWordFilter.setOutputWordCounts(true);
        //            strToWordFilter.setTFTransform(true);
        //            strToWordFilter.setIDFTransform(true);

        //            trainData = Filter.useFilter(trainData, strToWordFilter);
        //            testData = Filter.useFilter(testData, strToWordFilter);

        //transform to non-sparse format
        //            SparseToNonSparse spFilter = new SparseToNonSparse(); 
        //            spFilter.setInputFormat(trainData);
        //            trainData = Filter.useFilter(trainData, spFilter);
        //            testData = Filter.useFilter(testData, spFilter);

        //            Standardize standardizeFilter = new Standardize();
        //            standardizeFilter.setInputFormat(trainData);
        //            
        //            Instances newTrainData = Filter.useFilter(trainData, standardizeFilter);
        //            Instances newTestData = Filter.useFilter(testData, standardizeFilter);

        //            NaiveBayesMultinomial cl = null;

        //            // train classifier
        //            cl = new NaiveBayesMultinomial();
        //            // further options...
        //            cl.buildClassifier(trainData);

        //            FilteredClassifier fcl = new FilteredClassifier();
        //            fcl.setFilter(strToWordFilter);
        //            fcl.setClassifier(cl);
        //            
        //            fcl.buildClassifier(trainData);

        //            SerializationHelper.write(MODEL, fcl);

        // read the model from the file
        FilteredClassifier fcl = (FilteredClassifier) SerializationHelper.read(MODEL);

        //            System.out.println("Training finished!");
        //            System.exit(0);

        //            Evaluation eTest = new Evaluation(trainData);

        //            eTest.evaluateModel(cl, trainData);
        //            String strSummary = eTest.toSummaryString();
        //            LOGGER.debug(strSummary);

        //            eTest.evaluateModel(cl, testData);
        //            strSummary = eTest.toSummaryString();
        //            LOGGER.debug(strSummary);

        // Get the confusion matrix
        //            double[][] cmMatrix = eTest.confusionMatrix();
        //            LOGGER.debug(cmMatrix);

        int[] myLst = { 5, 7, 9, 100, 345, 1000, 1500, 7500 };

        for (int i = 0; i < myLst.length; i++) {
            int idx = myLst[i];
            System.out.println("Actual: " + testData.instance(idx).stringValue(testData.classIndex()));
            long start = System.currentTimeMillis();
            System.out.println(fcl.classifyInstance(testData.instance(idx)));
            long end = System.currentTimeMillis();
            System.out.println("\n Time: " + (end - start) + " ms");
        }

    } catch (Exception e) {
        LOGGER.error(e.getMessage());
        e.printStackTrace();
    }
    //      } else {
    //         System.out.println("Usage: java TextDirectoryToArff <directory name>");
    //      }
}

From source file:org.vimarsha.classifier.impl.FunctionWiseClassifier.java

License:Open Source License

/**
 * Classifies function wise test instances in the associated with the names labels mentioned in the arraylist passed as the argument.
 *
 * @param list - labels of instances contained in the test set that need to be classified.
 * @return TreeMap containing the instance labels and the associated classification results.
 * @throws ClassificationFailedException
 *//*from   w  w w. j  a va 2s  .  c o m*/
@Override
public LinkedHashMap<String, String> classify(LinkedList<String> list) throws ClassificationFailedException {
    output = new LinkedHashMap<String, String>();
    J48 j48 = new J48();
    Remove rm = new Remove();
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);
        for (int i = 0; i < testSet.numInstances(); i++) {
            double pred = fc.classifyInstance(testSet.instance(i));
            if (list.isEmpty()) {
                output.put(String.valueOf(i + 1), testSet.classAttribute().value((int) pred));
            } else {
                output.put(list.get(i), testSet.classAttribute().value((int) pred));
            }
        }
    } catch (Exception ex) {
        throw new ClassificationFailedException();
    }
    return output;
}

From source file:org.vimarsha.classifier.impl.TimeslicedClassifier.java

License:Open Source License

/**
 * Classifies Timesliced test data instances.
 *
 * @return Resulting linked list with timelsiced classification results.
 * @throws ClassificationFailedException
 *///ww w.j  av  a  2 s.co m
@Override
public Object classify() throws ClassificationFailedException {
    output = new LinkedList<String>();
    J48 j48 = new J48();
    Remove rm = new Remove();
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);

        for (int i = 0; i < testSet.numInstances(); i++) {
            //System.out.println(testSet.instance(i));
            double pred = fc.classifyInstance(testSet.instance(i));
            output.add(testSet.classAttribute().value((int) pred));
        }
    } catch (Exception ex) {
        System.out.println(ex.toString());
        throw new ClassificationFailedException();
    }
    return output;
}

From source file:org.vimarsha.classifier.impl.WholeProgramClassifier.java

License:Open Source License

/**
 * Classifies whole program test instances,
 *
 * @return String containing the classification result of the evaluated program's dataset.
 * @throws ClassificationFailedException
 *//*from w  w w.  ja va 2 s . c  o m*/
@Override
public Object classify() throws ClassificationFailedException {
    J48 j48 = new J48();
    Remove rm = new Remove();
    String output = null;
    rm.setAttributeIndices("1");
    FilteredClassifier fc = new FilteredClassifier();
    fc.setFilter(rm);
    fc.setClassifier(j48);
    try {
        fc.buildClassifier(trainSet);
        this.treeModel = j48.toString();
        double pred = fc.classifyInstance(testSet.instance(0));
        output = testSet.classAttribute().value((int) pred);
        classificationResult = output;
    } catch (Exception ex) {
        throw new ClassificationFailedException();
    }
    return output;
}

From source file:recsys.EvaluationMachineLearning.java

public static void main(String args[]) throws Exception {
    int own_training = 0;
    //opening the testing file
    DataSource sourceTest;//from  w  ww.ja  v a2  s.c  o m
    if (own_training == 1) {
        sourceTest = new DataSource("D://own_training//item//feature data//test_feature.arff");
    } else {
        sourceTest = new DataSource("E://test_featureFile.arff");
    }
    //DataSource sourceTest = new DataSource("D://own_training//test_featureFile.arff");
    //System.out.println("working");
    Instances test = sourceTest.getDataSet();

    PrintFile solutionFile;
    if (own_training == 1) {
        solutionFile = new PrintFile(null, new File("D://own_training//item//solution//solution.dat"));
    } else {
        solutionFile = new PrintFile(null, new File("E://solution.dat"));
    }
    //PrintFile solutionFile = new PrintFile(null, new File("D://own_training//solution.dat"));

    if (test.classIndex() == -1) {
        test.setClassIndex(test.numAttributes() - 1);
    }

    //System.out.println("hello");
    ObjectInputStream ois;
    if (own_training == 1) {
        ois = new ObjectInputStream(new FileInputStream("D://own_training//item//model//train.model"));
    } else {
        ois = new ObjectInputStream(new FileInputStream("E://naive_bayes.model"));
    }

    //System.out.println("hello");
    Remove rm = new Remove();
    rm.setAttributeIndices("1");
    rm.setAttributeIndices("2");
    //rm.setAttributeIndices("6");
    //rm.setAttributeIndices("5");
    //NaiveBayes cls = (NaiveBayes) ois.readObject();

    FilteredClassifier fc = (FilteredClassifier) ois.readObject();
    //fc.setFilter(rm);
    //fc.setClassifier(cls);
    ois.close();

    int totalSessionCount = 0;
    int buySessionCount = 0;

    Integer tempSessionId = (int) test.instance(0).value(0);
    int sessionItemCount = (int) test.instance(0).value(4);
    ArrayList<Integer> buy = new ArrayList<>();
    String result = String.valueOf(tempSessionId) + ";";
    int count = 0;
    for (int i = 0; i < test.numInstances(); i++) {
        //System.out.println(i);
        //System.out.print("ID: " + test.instance(i).value(0));
        //if a new session occurs
        //sessionItemCount++;
        if ((int) test.instance(i).value(0) != tempSessionId) {

            totalSessionCount++;
            if (buy.size() > 0) {
                if (sessionItemCount != 1) {
                    if (sessionItemCount >= 2 && sessionItemCount <= 3) {
                        if (buy.size() == 1) {
                            for (int j = 0; j < buy.size(); j++) {
                                result += buy.get(j) + ",";
                            }
                            solutionFile.writeFile(result.substring(0, result.length() - 1));
                            buySessionCount++;
                        }
                    } else if (sessionItemCount >= 4) {
                        if (buy.size() >= 2) {
                            for (int j = 0; j < buy.size(); j++) {
                                result += buy.get(j) + ",";
                            }
                            solutionFile.writeFile(result.substring(0, result.length() - 1));
                            buySessionCount++;
                        }
                    }
                }
            }
            tempSessionId = (int) test.instance(i).value(0);
            sessionItemCount = (int) test.instance(i).value(4);
            //System.out.println(tempSessionId + "," + sessionItemCount);
            result = String.valueOf(tempSessionId) + ";";
            buy.clear();
        }
        double pred = fc.classifyInstance(test.instance(i));
        if (test.classAttribute().value((int) pred).equals("buy")) {
            Integer item = (int) test.instance(i).value(1);
            buy.add(item);
        }
        //System.out.print(", actual: " + test.classAttribute().value((int) test.instance(i).classValue()));
        //System.out.println(", predicted: " + test.classAttribute().value((int) pred));
    }
    System.out.println(buySessionCount);
    System.out.println(totalSessionCount);
    if (buy.size() > 0) {
        solutionFile.writeFile(result.substring(0, result.length() - 1));
    }
    solutionFile.closeFile();
}

From source file:tests.BayesUpdateQuery.java

public static void main(String[] args) {
    try {//from  w ww.  jav a  2  s. c  o  m
        MyFilteredLearner learner;
        learner = new MyFilteredLearner();
        learner.loadDataset("C:\\Users\\NG\\Dropbox\\Tutors\\Inverted_SFU_Review_Corpus.arff");
        // Evaluation must be done before training
        // More info in: http://weka.wikispaces.com/Use+WEKA+in+your+Java+code
        learner.evaluate();
        learner.learn();
        learner.saveModel("C:\\Users\\NG\\Dropbox\\Tutors\\YesNo.model");
        Object nba = learner.classifier;

        MyFilteredClassifier Myclassifier;

        Myclassifier = new MyFilteredClassifier();
        Myclassifier.load("C:\\Users\\NG\\Dropbox\\Tutors\\smstest.txt");
        Myclassifier.loadModel("C:\\Users\\NG\\Dropbox\\Tutors\\YesNo.model");

        Myclassifier.makeInstance();
        Myclassifier.classify();
        FilteredClassifier fc = (FilteredClassifier) nba;
        System.out.println("persistance test " + fc.classifyInstance(Myclassifier.instances.firstInstance()));
    } catch (Exception ex) {
        Logger.getLogger(BayesUpdateQuery.class.getName()).log(Level.SEVERE, null, ex);
    }
}