Example usage for weka.core Instances Instances

List of usage examples for weka.core Instances Instances

Introduction

In this page you can find the example usage for weka.core Instances Instances.

Prototype

public Instances(Instances dataset) 

Source Link

Document

Constructor copying all instances and references to the header information from the given set of instances.

Usage

From source file:ChiSquare.Chi.java

public double[] PValues(Instances d) throws Exception {
    Instances data = new Instances(d);
    data.deleteAttributeAt(0);/*from   w w  w . j  a  va2 s  .  c  om*/
    double[] pValues = new double[data.numAttributes() - 1];
    double[] chiValues = ChiValues(d);
    for (int i = 0; i < pValues.length; i++) {
        pValues[i] = pochisq(chiValues[i], data.numInstances() - 1);
    }
    return pValues;
}

From source file:clases.GestorFichero.java

public Instances cargarInstancias(String dirFichero) {
    try {//from ww  w .j a  va  2s. c  o m
        abrirFichero(dirFichero);
        if (ficheroARFF != null) {
            Instances instancias = new Instances(ficheroARFF);
            asignarClase(instancias);
            cerrarFichero();
            return instancias;
        } else {
            return null;
        }
    } catch (IOException ex) {
        System.out.println("Error al cargar las instancias del fichero." + ex.getMessage().toString());
        return null;
    }
}

From source file:clasificador.ClasificadorADN.java

public void CargarData(String ruta) throws Exception {
    datapredict = new Instances(new BufferedReader(new FileReader(ruta)));
    datapredict.setClassIndex(datapredict.numAttributes() - 1);
    predicteddata = new Instances(datapredict);
}

From source file:clasificador.RedNeuronal.java

public void Entrenamiento(String paramNN) {
    try {/*from ww  w . j a v  a2  s . c o  m*/
        //aqui va a anetrenar la red neuronal con parametros para la red
        FileReader trainReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro.arff"));
        //FileReader trainReader = new FileReader("aqui va la ruta");
        //intancias
        //lo que vamoas a hacer en agarrar ese objeto y cargarlo dentro de nuestra clase instancias
        Instances trainInstance = new Instances(trainReader);
        trainInstance.setClassIndex(trainInstance.numAttributes() - 1);//esta fijando las etiquetas en el archivo las clases estan en el final es decir el total -1 esto es xk es un ambiento controlado 

        //construccion de la red perceptron multicapa 
        MultilayerPerceptron mlp = new MultilayerPerceptron(); // creo un objeto de  perceptron multicapaa
        mlp.setOptions(Utils.splitOptions(paramNN));
        //fijar los parametros de la red perceptron util es para q reciba toda la confiuguracion es proipio de weka
        mlp.buildClassifier(trainInstance);// la construccion se hace ya basadao en los parametron configurado 

        //Guardar el mlp en un archivo 
        Debug.saveToFile("TrainMLP.train", mlp);
        //evaluacion del entrenamiento despies solo se ocupa el trainMLp
        SerializedClassifier sc = new SerializedClassifier();
        sc.setModelFile(new File("TrainMLP.train"));
        Evaluation evaluarEntrenamiento = new Evaluation(trainInstance);
        evaluarEntrenamiento.evaluateModel(mlp, trainInstance);//evaluando el modelo
        System.out.println(evaluarEntrenamiento.toSummaryString("resultado", false));
        System.out.println(evaluarEntrenamiento.toMatrixString("*****************Matriz de confusion*******"));
        trainReader.close();
    } catch (FileNotFoundException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:clasificador.RedNeuronal.java

public void testing() {
    try {//from   w w w. ja v a 2  s .com
        FileReader testReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\librotest.arff"));
        Instances testInstance = new Instances(testReader);
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        Evaluation evalTest = new Evaluation(testInstance);
        SerializedClassifier clasificador = new SerializedClassifier();
        clasificador.setModelFile(new File("TrainMLP.train"));
        //CLASIFICADOR ESTANDAR
        Classifier clasificadorEstandar = clasificador.getCurrentModel();
        evalTest.evaluateModel(clasificadorEstandar, testInstance);

        System.out.println(evalTest.toSummaryString("resultado:", false));
        System.out.println(evalTest.toMatrixString("*****************Matriz de confusion*******"));

        //vamos a predecir el numero q voy a usar       
        // evalTest.toMatrixString();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:clasificador.RedNeuronal.java

public void prediccion() {

    FileReader testReader = null;
    try {/*from  w ww .j  av  a2  s  . co  m*/
        testReader = new FileReader(
                new File(System.getProperty("user.dir") + "\\src\\clasificador\\archivos\\libro1.arff"));
        Instances testInstance = new Instances(testReader);
        testInstance.setClassIndex(testInstance.numAttributes() - 1);
        Evaluation evalTest = new Evaluation(testInstance);
        SerializedClassifier clasificador = new SerializedClassifier();
        clasificador.setModelFile(new File("TrainMLP.train"));
        //CLASIFICADOR ESTANDAR
        Classifier clasificadorEstandar = clasificador.getCurrentModel();
        evalTest.evaluateModel(clasificadorEstandar, testInstance);

        double[] valores = evalTest.evaluateModel(clasificadorEstandar, testInstance);

        for (int i = 0; i < valores.length; i++) {

            System.out.println("se predice:     " + valores[i] + "\n");
        }
    } catch (FileNotFoundException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            testReader.close();
        } catch (IOException ex) {
            Logger.getLogger(RedNeuronal.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

}

From source file:Classifier.supervised.LinearRegression.java

License:Open Source License

/**
 * Builds a regression model for the given data.
 *
 * @param data the training data to be used for generating the
 * linear regression function//from  ww w . jav  a2  s . c  om
 * @throws Exception if the classifier could not be built successfully
 */
public void buildClassifier(Instances data) throws Exception {
    m_ModelBuilt = false;

    if (!m_checksTurnedOff) {
        // can classifier handle the data?
        getCapabilities().testWithFail(data);

        // remove instances with missing class
        data = new Instances(data);
        data.deleteWithMissingClass();
    }

    // Preprocess instances
    if (!m_checksTurnedOff) {
        m_TransformFilter = new NominalToBinary();
        m_TransformFilter.setInputFormat(data);
        data = Filter.useFilter(data, m_TransformFilter);
        m_MissingFilter = new ReplaceMissingValues();
        m_MissingFilter.setInputFormat(data);
        data = Filter.useFilter(data, m_MissingFilter);
        data.deleteWithMissingClass();
    } else {
        m_TransformFilter = null;
        m_MissingFilter = null;
    }

    m_ClassIndex = data.classIndex();
    m_TransformedData = data;

    // Turn all attributes on for a start
    m_SelectedAttributes = new boolean[data.numAttributes()];
    for (int i = 0; i < data.numAttributes(); i++) {
        if (i != m_ClassIndex) {
            m_SelectedAttributes[i] = true;
        }
    }
    m_Coefficients = null;

    // Compute means and standard deviations
    m_Means = new double[data.numAttributes()];
    m_StdDevs = new double[data.numAttributes()];
    for (int j = 0; j < data.numAttributes(); j++) {
        if (j != data.classIndex()) {
            m_Means[j] = data.meanOrMode(j);
            m_StdDevs[j] = Math.sqrt(data.variance(j));
            if (m_StdDevs[j] == 0) {
                m_SelectedAttributes[j] = false;
            }
        }
    }

    m_ClassStdDev = Math.sqrt(data.variance(m_TransformedData.classIndex()));
    m_ClassMean = data.meanOrMode(m_TransformedData.classIndex());

    // Perform the regression
    findBestModel();

    // Save memory
    if (m_Minimal) {
        m_TransformedData = null;
        m_Means = null;
        m_StdDevs = null;
    } else {
        m_TransformedData = new Instances(data, 0);
    }

    m_ModelBuilt = true;
}

From source file:classifiers.ComplexClassifier.java

public ComplexClassifier(BayesNetz struct, int anzahldurchlauf) {
    super(struct.getInst());

    try {//  w  w w . j  a  va2  s  .  c o  m
        Datenbank = new Instances(super.getinst());
        this.anzahldurchlauf = anzahldurchlauf;
        this.struct = struct;
        Model = new Graph(struct.getInst());
        list = new ArrayList<>();
        Classparam = new double[inst.numInstances()];
        trainergebnisse = new double[anzahldurchlauf][2];
        testergebnisse = new double[anzahldurchlauf][2];
        Modelergebnisse = new double[1][2];
        validierungsergebnisse = new double[1][2];

        erg = new double[5];
        BauClassifier();
        this.BewertunginProzent();
    } catch (Exception ex) {
        Logger.getLogger(ComplexClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
    berechneVernetzung();

}

From source file:classifiers.ComplexClassifierZufall.java

public ComplexClassifierZufall(Instances inst, int anzahl) {
    super(inst);/*from  w  ww  .  ja  v a2s  . c o m*/
    Datenbank = new Instances(super.getinst());
    this.vernetzung = (int) (Math.random() * 101);
    Model = new GraphMitAngabeVernetzungsgrad(inst, vernetzung);
    Model.strukturiereGraph();

    list = new ArrayList<>();
    Classparam = new double[inst.numInstances()];
    this.anzahldurchlauf = anzahl;
    trainergebnisse = new double[anzahldurchlauf][2];
    testergebnisse = new double[anzahldurchlauf][2];
    Modelergebnisse = new double[1][2];
    validierungsergebnisse = new double[1][2];
    struct = new BayesNetz(inst, Model);

}

From source file:Classifiers.MLkNN.java

License:Open Source License

protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception {
    double[] confidences = new double[numLabels];
    boolean[] predictions = new boolean[numLabels];

    Instances knn = null;//from  w ww .jav a2  s . c  om
    try {
        knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors));
    } catch (Exception ex) {
        Logger.getLogger(MLkNN.class.getName()).log(Level.SEVERE, null, ex);
    }

    for (int i = 0; i < numLabels; i++) {
        // compute sum of aces in KNN
        int aces = 0; // num of aces in Knn for i
        for (int k = 0; k < numOfNeighbors; k++) {
            double value = Double.parseDouble(
                    train.attribute(labelIndices[i]).value((int) knn.instance(k).value(labelIndices[i])));
            if (Utils.eq(value, 1.0)) {
                aces++;
            }
        }
        double Prob_in = PriorProbabilities[i] * CondProbabilities[i][aces];
        double Prob_out = PriorNProbabilities[i] * CondNProbabilities[i][aces];
        if (Prob_in > Prob_out) {
            predictions[i] = true;
        } else if (Prob_in < Prob_out) {
            predictions[i] = false;
        } else {
            Random rnd = new Random();
            predictions[i] = (rnd.nextInt(2) == 1) ? true : false;
        }
        // ranking function
        confidences[i] = Prob_in / (Prob_in + Prob_out);
    }
    MultiLabelOutput mlo = new MultiLabelOutput(predictions, confidences);
    return mlo;
}