Example usage for weka.core Instances setClassIndex

List of usage examples for weka.core Instances setClassIndex

Introduction

In this page you can find the example usage for weka.core Instances setClassIndex.

Prototype

public void setClassIndex(int classIndex) 

Source Link

Document

Sets the class index of the set.

Usage

From source file:Learning.WekaWrapper.java

public double[] evaluate(String fn) throws Exception {

    ConverterUtils.DataSource source = new ConverterUtils.DataSource(fn);

    Instances data = source.getDataSet();

    // setting class attribute if the data format does not provide this information
    // For example, the XRFF format saves the class attribute information as well
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }/*from  ww w . j a  va 2 s .c om*/

    NumericToNominal nmf = new NumericToNominal();
    nmf.setInputFormat(data);
    data = Filter.useFilter(data, nmf);

    tree = new J48(); // new instance of tree

    String[] options = new String[1];

    options[0] = "-C 0.25 -M 2";
    tree.setOptions(options);
    tree.buildClassifier(data); // build classifier

    // eval
    eval = new Evaluation(data);
    eval.crossValidateModel(tree, data, 5, new Random(1));

    // System.out.println("corr: " + eval.pctCorrect());
    // System.out.println("inco: " + eval.pctIncorrect());
    // System.out.println(eval.toSummaryString());
    // System.out.println(eval.toMatrixString());
    //  System.out.println(eval.toClassDetailsString());
    double[] results = new double[2];
    results[0] = eval.pctCorrect();
    results[1] = eval.pctIncorrect();
    return results;
}

From source file:lector.Analizador.java

public static void clasificador() {

    BufferedReader reader1;/* w  ww .jav  a  2 s.c  om*/
    BufferedReader reader2;
    try {
        reader1 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                + "proyecto/compartida/DataSetAnalisisSentimientos.arff"));

        reader2 = new BufferedReader(new FileReader("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                + "proyecto/compartida/DataSetAnalisisSentimientos_inc.arff"));
        Instances train = new Instances(reader1);
        train.setClassIndex(train.numAttributes() - 1);
        System.out.println(train.classIndex() + " " + train.numAttributes());

        Instances test = new Instances(reader2);
        test.setClassIndex(train.numAttributes() - 1);
        System.out.println(test.classIndex() + " " + test.numAttributes());

        NaiveBayes model = new NaiveBayes();
        model.buildClassifier(train);

        //classify
        Instances labeled = new Instances(test);

        for (int i = 0; i < test.numInstances(); i++) {
            double clsLabel = model.classifyInstance(test.instance(i));
            labeled.instance(i).setClassValue(clsLabel);
        }

        // https://youtu.be/JY_x5zKTfyo?list=PLJbE6j2EG1pZnBhOg3_Rb63WLCprtyJag
        Evaluation eval_train = new Evaluation(test);
        eval_train.evaluateModel(model, test);

        reader1.close();
        reader2.close();

        //System.out.println(eval_train.toSummaryString("\nResults\n======\n", false));
        String[] options = new String[4];
        options[0] = "-t"; //name of training file
        options[1] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/"
                + "compartida/DataSetAnalisisSentimientos.arff";
        options[2] = "-T";
        options[3] = "/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/proyecto/"
                + "compartida/DataSetAnalisisSentimientos_inc.arff";
        System.out.println(Evaluation.evaluateModel(model, options));

        try ( // print classification results to file
                BufferedWriter writer = new BufferedWriter(
                        new FileWriter("/Users/danieltapia/Google Drive/EPN/MAESTRIA/MSW128 BI/"
                                + "proyecto/compartida/DataSetAnalisisSentimientos_labeled.arff"))) {
            writer.write(labeled.toString());
        }

    } catch (Exception e) {
    }
}

From source file:LeerArchivo.Leer.java

public void leerArchivoArff() {
    try {//from w  w w . ja v  a 2s . co m
        // create J48
        Classifier cls = new KStar();
        // train
        Instances inst = new Instances(new BufferedReader(new FileReader("../datos.arff")));

        inst.setClassIndex(inst.numAttributes() - 1);
        cls.buildClassifier(inst);
        // serialize model
        ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream("./KStar.model"));
        oos.writeObject(cls);
        oos.flush();
        oos.close();
    } catch (IOException ex) {
        Logger.getLogger(Leer.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(Leer.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:LeerArchivo.Leer.java

public String leerModelo() {
    try {/*w w  w.j  a  va 2s.  co m*/
        String[] valoresAtributos = { "0", "1" };
        Classifier clasificador = (Classifier) weka.core.SerializationHelper.read("./KStar.model");
        ConverterUtils.DataSource source = new ConverterUtils.DataSource("./test.arff");
        Instances data = source.getDataSet();
        data.setClassIndex(5);
        System.out.println(data.instance(0));
        return valoresAtributos[(int) clasificador.classifyInstance(data.instance(0))];
    } catch (Exception ex) {
        Logger.getLogger(Leer.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:les.negocio.LerWeka.java

@Override
public String processar(EntidadeDominio entidade) {
    Arquivo arq = (Arquivo) entidade;/*from w w w .j av  a2  s. com*/
    String path = "/home/gustavo/Documents/weka/";
    String full_path = path + arq.getNomeDoArquivo();
    List<String> nm_att = new ArrayList<String>();
    int qt_att = 0;

    String s = null;
    BufferedReader reader = null;
    try {
        reader = new BufferedReader(new FileReader(full_path));
    } catch (FileNotFoundException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    ArffReader arff = null;
    try {
        arff = new ArffReader(reader);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    Instances data = arff.getData();
    data.setClassIndex(data.numAttributes() - 1);
    int num_atributos = data.numAttributes() - 1;

    for (int i = 3; i < num_atributos; i++) {
        // o indice comea no trs, pois os atributos anteriores so fixados pelo sistema
        if (data.attribute(i).isNominal()) {
            qt_att++;
            nm_att.add(data.attribute(i).name().toString());

        }
    }

    arq.setAtributos_weka(nm_att);
    arq.setQt_perguntas(qt_att);

    return null;
}

From source file:liac.igmn.loader.DataLoader.java

License:Open Source License

/**
 * Carrega dataset a partir de arquivo ARFF e binariza os atributos nominais.
 * Assume que a classe seja o ultimo atributo.
 * //from  w ww .j  a va  2s . com
 * @param filename path do arquivo
 * @return dataset
 * @throws DataLoaderException lancado quando o arquivo nao e encontrado
 * ou quando ocorre algum erro de IO
 */
public static Dataset loadARFF(String filename) throws DataLoaderException {
    Dataset dataset = new Dataset();
    try {
        ArffLoader loader = new ArffLoader();

        loader.setSource(new File(filename));
        Instances data = loader.getDataSet();
        Instances m_Intances = new Instances(data);

        data.setClassIndex(data.numAttributes() - 1);

        String[] classes = new String[data.numClasses()];
        for (int i = 0; i < data.numClasses(); i++)
            classes[i] = data.classAttribute().value(i);
        dataset.setClassesNames(classes);

        NominalToBinary filter = new NominalToBinary();
        filter.setInputFormat(m_Intances);
        filter.setOptions(new String[] { "-A" });
        m_Intances = Filter.useFilter(m_Intances, filter);

        int inputSize = m_Intances.numAttributes() - data.numClasses();

        dataset.setInputSize(inputSize);
        dataset.setNumClasses(data.numClasses());

        dataset.setWekaDataset(m_Intances);
    } catch (IOException e) {
        throw new DataLoaderException("Arquivo no encontrado", e.getCause());
    } catch (Exception e) {
        throw new DataLoaderException("Falha na converso do arquivo", e.getCause());
    }

    return dataset;
}

From source file:licensedetect.classifying.java

public static Instances alpInstance() {
    // Build Instance
    SetBuilder setbuild = new SetBuilder();
    setbuild.relation("test");
    JFileChooser fc = new JFileChooser();
    int returnVal = fc.showSaveDialog(fc);

    if (returnVal == JFileChooser.APPROVE_OPTION) {
        int pixels[] = APLPixels(fc.getSelectedFile().getAbsolutePath());
        for (int i = 1; i <= 1638; i++) {
            setbuild.addAttribute(Integer.toString(i), "numeric");
        }//from w  ww .  jav a2 s .c o  m
        setbuild.addAttribute("class", "{ALP,NONALP}");
        setbuild.addData(pixels, "?");// will not work until fixed numbers
    }

    setbuild.write("test");
    ArffLoader loader = new ArffLoader();
    Instances trainData;
    try {
        loader.setFile(new File("test.arff"));
    } catch (IOException e) {
        System.out.println("Could not read file");
    }

    try {
        trainData = loader.getDataSet();
        trainData.setClassIndex(trainData.numAttributes() - 1);
        return trainData;
    } catch (IOException e1) {
        // TODO Auto-generated catch block
    }
    // Set class to id to move

    return null;
}

From source file:licensedetect.classifying.java

public static Instances numInstance() {
    // Build Instance
    SetBuilder setbuild = new SetBuilder();
    setbuild.relation("testnum");
    JFileChooser fc = new JFileChooser();
    int returnVal = fc.showSaveDialog(fc);

    if (returnVal == JFileChooser.APPROVE_OPTION) {
        int pixels[] = NumberPixels(fc.getSelectedFile().getAbsolutePath());
        for (int i = 1; i <= 1440; i++) {
            setbuild.addAttribute(Integer.toString(i), "numeric");
        }//  www  .  j  a  v a2s.  c o  m
        setbuild.addAttribute("class", "{4,5,other}");
        setbuild.addData(pixels, "?");// will not work until fixed numbers
    }

    setbuild.write("testnum");
    ArffLoader loader = new ArffLoader();
    Instances trainData;
    try {
        loader.setFile(new File("testnum.arff"));
    } catch (IOException e) {
        System.out.println("Could not read file");
    }

    try {
        trainData = loader.getDataSet();
        trainData.setClassIndex(trainData.numAttributes() - 1);
        return trainData;
    } catch (IOException e1) {
        // TODO Auto-generated catch block
    }
    // Set class to id to move

    return null;
}

From source file:lu.lippmann.cdb.common.gui.dataset.InstancesLoaderDialogFactory.java

License:Open Source License

private static Instances showDialog(final Component parent, final boolean setClass) throws Exception {
    final Preferences prefs = Preferences.userRoot().node("CadralDecisionBuild");
    final String path = prefs.get(REG_KEY, WekaDataAccessUtil.DEFAULT_SAMPLE_DIR);

    final JFileChooser fc = new JFileChooser();
    fc.setCurrentDirectory(new File(path));
    final int returnVal = fc.showOpenDialog(parent);
    if (returnVal == JFileChooser.APPROVE_OPTION) {
        final File file = fc.getSelectedFile();
        if (file != null) {
            prefs.put(REG_KEY, file.getPath());
            final Instances ds = WekaDataAccessUtil.loadInstancesFromARFFOrCSVFile(file);
            final Attribute defaultClassAttr = ds.classIndex() >= 0 ? ds.classAttribute() : ds.attribute(0);
            ds.setClassIndex(-1);
            ds.setRelationName(file.getPath());
            final List<String> attributesNames = new ArrayList<String>();
            final Enumeration<?> e = ds.enumerateAttributes();
            while (e.hasMoreElements()) {
                final Attribute attr = (Attribute) e.nextElement();
                attributesNames.add(attr.name());
            }/*ww  w  .  ja  va 2  s  . c  om*/

            if (setClass) {
                final String s = (String) JOptionPane.showInputDialog(parent,
                        "Select the class attribute for '" + file.getName() + "' (default:'"
                                + defaultClassAttr.name() + "'): ",
                        "Class selection", JOptionPane.QUESTION_MESSAGE, null, // icon
                        attributesNames.toArray(), attributesNames.get(attributesNames.size() - 1));
                if (s != null) {
                    ds.setClass(ds.attribute(s));
                } else {
                    //Otherwise no class defined and CACHE attributeClass => No class index defined after cancel + retry
                    ds.setClass(defaultClassAttr);
                    return null;
                }
            } else {
                ds.setClass(defaultClassAttr);
            }
            return ds;
        } else
            throw new Exception();
    } else
        return null;
}

From source file:lu.lippmann.cdb.datasetview.DatasetView.java

License:Open Source License

public DatasetView setDataSet(final Instances pdataSet) {
    if (pdataSet.classIndex() != -1 && !pdataSet.classAttribute().isNominal())
        pdataSet.setClassIndex(-1);

    if (this.initialDataSet == null) {
        this.initialDataSet = pdataSet;
        this.initialCompleteness = new CompletenessComputer(this.initialDataSet);
        this.dataCompletenessProgressBar.setMaximum(pdataSet.numInstances() * pdataSet.numAttributes());
        reinitDataCompleteness();/*w  ww. ja  v  a  2 s .  co m*/
    }

    this.dataSet = pdataSet;

    if (!filtered)
        this.notFilteredDataSet = pdataSet;

    updateClassSelectionMenu();
    this.supervisedTransformPane.setVisible(pdataSet.classIndex() != -1);

    for (final TabView tv : tabViews) {
        tv.update(dataSet);
    }

    try {
        updateFiltersPane(dataSet);
    } catch (Exception e) {
        eventPublisher.publish(new ErrorOccuredEvent("Error when updating filters", e));
    }

    updateTooltipShowingDatasetDimensions();

    return this;
}