Example usage for weka.core Instances Instances

List of usage examples for weka.core Instances Instances

Introduction

In this page you can find the example usage for weka.core Instances Instances.

Prototype

public Instances(Instances dataset) 

Source Link

Document

Constructor copying all instances and references to the header information from the given set of instances.

Usage

From source file:affective.core.ArffLexiconEvaluator.java

License:Open Source License

/**
 * Processes  all the dictionary files.//from  w  ww .j ava 2  s  . c o  m
 * @throws IOException  an IOException will be raised if an invalid file is supplied
 */
public void processDict() throws IOException {
    BufferedReader reader = new BufferedReader(new FileReader(this.m_lexiconFile));
    Instances lexInstances = new Instances(reader);

    // set upper value for word index
    lexiconWordIndex.setUpper(lexInstances.numAttributes() - 1);

    List<Attribute> numericAttributes = new ArrayList<Attribute>();
    List<Attribute> nominalAttributes = new ArrayList<Attribute>();

    // checks all numeric and nominal attributes and discards the word attribute
    for (int i = 0; i < lexInstances.numAttributes(); i++) {

        if (i != this.lexiconWordIndex.getIndex()) {
            if (lexInstances.attribute(i).isNumeric()) {
                numericAttributes.add(lexInstances.attribute(i));
                // adds the attribute name to the message-level features to be calculated
                this.featureNames.add(this.lexiconName + "-" + lexInstances.attribute(i).name());
            }

            else if (lexInstances.attribute(i).isNominal()) {
                nominalAttributes.add(lexInstances.attribute(i));
                // adds the attribute name together with the nominal value to the message-level features to be calculated
                int numValues = lexInstances.attribute(i).numValues();
                for (int j = 0; j < numValues; j++)
                    this.featureNames.add(this.lexiconName + "-" + lexInstances.attribute(i).name() + "-"
                            + lexInstances.attribute(i).value(j));

            }

        }

    }

    // Maps all words with their affective scores discarding missing values
    for (Instance inst : lexInstances) {
        if (inst.attribute(this.lexiconWordIndex.getIndex()).isString()) {
            String word = inst.stringValue(this.lexiconWordIndex.getIndex());
            // stems the word
            word = this.m_stemmer.stem(word);

            // map numeric scores
            if (!numericAttributes.isEmpty()) {
                Map<String, Double> wordVals = new HashMap<String, Double>();
                for (Attribute na : numericAttributes) {
                    if (!weka.core.Utils.isMissingValue(inst.value(na)))
                        wordVals.put(na.name(), inst.value(na));
                }
                this.numDict.put(word, wordVals);
            }

            // map nominal associations
            if (!nominalAttributes.isEmpty()) {
                Map<String, String> wordCounts = new HashMap<String, String>();
                for (Attribute no : nominalAttributes) {
                    if (!weka.core.Utils.isMissingValue(inst.value(no))) {
                        wordCounts.put(no.name(), no.value((int) inst.value(no)));
                    }

                    this.nomDict.put(word, wordCounts);

                }

            }

        }

    }

}

From source file:affective.core.ArffLexiconWordLabeller.java

License:Open Source License

/**
 * Processes  all the dictionary files./* ww  w. j a  v a 2 s.c  o  m*/
 * @throws IOException  an IOException will be raised if an invalid file is supplied
 */
public void processDict() throws IOException {
    BufferedReader reader = new BufferedReader(new FileReader(this.m_lexiconFile));
    Instances lexInstances = new Instances(reader);

    // set upper value for word index
    lexiconWordIndex.setUpper(lexInstances.numAttributes() - 1);

    // checks all numeric and nominal attributes and discards the word attribute
    for (int i = 0; i < lexInstances.numAttributes(); i++) {

        if (i != this.lexiconWordIndex.getIndex()) {
            if (lexInstances.attribute(i).isNumeric() || lexInstances.attribute(i).isNominal()) {
                this.attributes.add(lexInstances.attribute(i));
            }

        }

    }

    // Maps all words with their affective scores discarding missing values
    for (Instance inst : lexInstances) {
        if (inst.attribute(this.lexiconWordIndex.getIndex()).isString()) {
            String word = inst.stringValue(this.lexiconWordIndex.getIndex());
            // stems the word
            word = this.m_stemmer.stem(word);

            // map numeric scores
            if (!attributes.isEmpty()) {
                Map<Attribute, Double> wordVals = new HashMap<Attribute, Double>();
                for (Attribute na : attributes) {
                    wordVals.put(na, inst.value(na));
                }
                this.attValMap.put(word, wordVals);
            }

        }

    }

}

From source file:agnes.AgnesMain.java

public static Instances loadData(String filePath) {
    BufferedReader reader;//from w w w .  j a  va  2s .  c o m
    Instances data = null;
    try {
        reader = new BufferedReader(new FileReader(filePath));
        data = new Instances(reader);
        reader.close();
        data.setClassIndex(data.numAttributes() - 1);
    } catch (Exception e) {

    }
    return data;
}

From source file:algoritmogeneticocluster.Cromossomo.java

private void classifica() {
    //SMO classifier = new SMO();
    //HyperPipes classifier = new HyperPipes();
    IBk classifier = new IBk(5);
    BufferedReader datafile = readDataFile(inId + ".arff");

    Instances data;/*from ww w .j  a v a2 s .c o  m*/
    Evaluation eval;
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // usando semente = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        //this.fitness = eval.pctCorrect();
        //fitness = new BigDecimal(fitness).setScale(2, RoundingMode.HALF_UP).doubleValue();//arredondamento para duas casas
        pctAcerto = eval.pctCorrect();
        pctAcerto = new BigDecimal(pctAcerto).setScale(2, RoundingMode.HALF_UP).doubleValue();
        microAverage = getMicroAverage(eval, data);
        microAverage = new BigDecimal(microAverage).setScale(2, RoundingMode.HALF_UP).doubleValue();
        macroAverage = getMacroAverage(eval, data);
        macroAverage = new BigDecimal(macroAverage).setScale(2, RoundingMode.HALF_UP).doubleValue();

    } catch (Exception ex) {
        System.out.println("Erro ao tentar fazer a classificacao");
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

    switch (metodoFitness) {
    case 1:
        fitness = pctAcerto;
        break;
    case 2:
        fitness = microAverage;
        break;
    case 3:
        fitness = macroAverage;
        break;
    default:
        break;
    }

}

From source file:algoritmogeneticocluster.NewClass.java

public static void main(String[] args) throws Exception {
    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data = new Instances(datafile);
    data.setClassIndex(data.numAttributes() - 1);

    // Do 10-split cross validation
    Instances[][] split = crossValidationSplit(data, 10);

    // Separate split into training and testing arrays
    Instances[] trainingSplits = split[0];
    Instances[] testingSplits = split[1];

    // Use a set of classifiers
    Classifier[] models = { new SMO(), new J48(), // a decision tree
            new PART(), new DecisionTable(), //decision table majority classifier
            new DecisionStump() //one-level decision tree

    };/*  w w  w  . j a va 2s.  co m*/

    // Run for each model
    for (int j = 0; j < models.length; j++) {

        // Collect every group of predictions for current model in a FastVector
        FastVector predictions = new FastVector();

        // For each training-testing split pair, train and test the classifier
        for (int i = 0; i < trainingSplits.length; i++) {
            Evaluation validation = classify(models[j], trainingSplits[i], testingSplits[i]);

            predictions.appendElements(validation.predictions());

            // Uncomment to see the summary for each training-testing pair.
            //System.out.println(models[j].toString());
        }

        // Calculate overall accuracy of current classifier on all splits
        double accuracy = calculateAccuracy(predictions);

        // Print current classifier's name and accuracy in a complicated,
        // but nice-looking way.
        System.out.println("Accuracy of " + models[j].getClass().getSimpleName() + ": "
                + String.format("%.2f%%", accuracy) + "\n---------------------------------");
    }

}

From source file:algoritmogeneticocluster.WekaSimulation.java

/**
 * @param args the command line arguments
 *///  ww  w  .j a v  a 2s .  c  o  m
public static void main(String[] args) {
    SMO classifier = new SMO();
    HyperPipes hy = new HyperPipes();
    //        classifier.buildClassifier(trainset);

    BufferedReader datafile = readDataFile("tabela10.arff");

    Instances data;
    Evaluation eval;
    try {
        data = new Instances(datafile);
        data.setClassIndex(data.numAttributes() - 1);
        eval = new Evaluation(data);
        Random rand = new Random(1); // using seed = 1
        int folds = 10;
        eval.crossValidateModel(classifier, data, folds, rand);
        System.out.println(eval.toString());
        System.out.println(eval.numInstances());
        System.out.println(eval.correct());
        System.out.println(eval.incorrect());
        System.out.println(eval.pctCorrect());
        System.out.println(eval.pctIncorrect());

    } catch (Exception ex) {
        Logger.getLogger(WekaSimulation.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:ALR.wdAnJE.java

License:Open Source License

@Override
public void buildClassifier(Instances instances) throws Exception {

    Instances m_DiscreteInstances = null;

    // can classifier handle the data?
    getCapabilities().testWithFail(instances);

    // Discretize instances if required
    if (m_Discretization) {
        m_Disc = new weka.filters.supervised.attribute.Discretize();
        m_Disc.setUseBinNumbers(true);/*from   w  w w.j av  a 2s .  co m*/
        m_Disc.setInputFormat(instances);
        System.out.println("Applying Discretization Filter - dodo");
        m_DiscreteInstances = weka.filters.Filter.useFilter(instances, m_Disc);
        System.out.println("Done");

        m_Instances = new Instances(m_DiscreteInstances);
        m_DiscreteInstances = new Instances(m_DiscreteInstances, 0);
    } else {
        m_Instances = new Instances(instances);
        instances = new Instances(instances, 0);
    }

    // Remove instances with missing class
    m_Instances.deleteWithMissingClass();

    // All done, gather statistics
    nInstances = m_Instances.numInstances();
    nAttributes = m_Instances.numAttributes() - 1;
    nc = m_Instances.numClasses();

    probs = new double[nc];

    paramsPerAtt = new int[nAttributes];
    for (int u = 0; u < nAttributes; u++) {
        paramsPerAtt[u] = m_Instances.attribute(u).numValues();
    }

    /*
     * Initialize structure array based on m_S
     */
    if (m_S.equalsIgnoreCase("A1JE")) {
        numTuples = 1; // NB
    } else if (m_S.equalsIgnoreCase("A2JE")) {
        numTuples = 2;
    } else if (m_S.equalsIgnoreCase("A3JE")) {
        numTuples = 3;
    } else if (m_S.equalsIgnoreCase("A4JE")) {
        numTuples = 4;
    } else if (m_S.equalsIgnoreCase("A5JE")) {
        numTuples = 5;
    } else {
        System.out.println("m_S value should be from set {A1JE, A2JE, A3JE, A4JE, A5JE}");
    }

    /* 
     * ----------------------------------------------------------------------------------------
     * Start Parameter Learning Process
     * ----------------------------------------------------------------------------------------
     */

    int scheme = 1;

    /*
     * ---------------------------------------------------------------------------------------------
     * Intitialize data structure
     * ---------------------------------------------------------------------------------------------
     */

    if (m_P.equalsIgnoreCase("MAP") || m_P.equalsIgnoreCase("MAP2")) {
        /*
         * MAP - Maximum Likelihood Estimates of the Parameters characterzing P(x_i|y)
         * MAP2 - MLE of parameters characterizing P(y|x_i)
         */
        scheme = plTechniques.MAP;

    } else if (m_P.equalsIgnoreCase("dCCBN")) {

        scheme = plTechniques.dCCBN;

    } else if (m_P.equalsIgnoreCase("dCCBNf")) {

        scheme = plTechniques.dCCBNf;

    } else if (m_P.equalsIgnoreCase("wCCBN")) {

        scheme = plTechniques.wCCBN;

    } else if (m_P.equalsIgnoreCase("wCCBNf")) {

        scheme = plTechniques.wCCBNf;

    } else if (m_P.equalsIgnoreCase("wCCBN2")) {

        scheme = plTechniques.wCCBN2;

    } else if (m_P.equalsIgnoreCase("eCCBN")) {
        //TODO                  
    } else {
        //System.out.println("m_P value should be from set {MAP, dCCBN, wCCBN, dCCBNf, wCCBNf, eCCBN, MAP2, wCCBN2}");
        System.out.println(
                "m_P value should be from set {MAP, dCCBN (LR), wCCBN (ALR), dCCBNf (LR Feelders), wCCBNf (ALR Feelders), eCCBN (ELR), MAP2 (discriminative MAP), wCCBN2 (ALR discriminative MAP)}");
    }

    logDComputer = LogDistributionComputerAnJE.getDistributionComputer(numTuples, scheme);

    if (m_I.equalsIgnoreCase("Flat")) {
        dParameters_ = new wdAnJEParametersFlat(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples,
                m_X);
    } else if (m_I.equalsIgnoreCase("Indexed")) {
        dParameters_ = new wdAnJEParametersIndexed(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples,
                m_X);
    } else if (m_I.equalsIgnoreCase("IndexedBig")) {
        dParameters_ = new wdAnJEParametersIndexedBig(nAttributes, nc, nInstances, paramsPerAtt, scheme,
                numTuples, m_X);
    } else if (m_I.equalsIgnoreCase("BitMap")) {
        dParameters_ = new wdAnJEParametersBitmap(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples,
                m_X);
    } else {
        System.out.println("m_I value should be from set {Flat, Indexed, IndexedBig, BitMap}");
    }

    /*
     * ---------------------------------------------------------------------------------------------
     * Create Data Structure by leveraging ONE or TWO pass through the data
     * (These routines are common to all parameter estimation methods)
     * ---------------------------------------------------------------------------------------------
     */
    if (m_MultiThreaded) {

        dParameters_.updateFirstPass_m(m_Instances);
        System.out.println("Finished first pass.");

        dParameters_.finishedFirstPass();

        if (dParameters_.needSecondPass()) {
            dParameters_.updateAfterFirstPass_m(m_Instances);
            System.out.println("Finished second pass.");
        }

    } else {

        for (int i = 0; i < nInstances; i++) {
            Instance instance = m_Instances.instance(i);
            dParameters_.updateFirstPass(instance);
        }
        System.out.println("Finished first pass.");

        dParameters_.finishedFirstPass();

        if (dParameters_.needSecondPass()) {
            for (int i = 0; i < nInstances; i++) {
                Instance instance = m_Instances.instance(i);
                dParameters_.updateAfterFirstPass(instance);
            }
            System.out.println("Finished second pass.");
        }
    }

    /*
     * Check if Feature Selection needs to be done.
     */
    if (dParameters_.needFeatureSelection()) {
        System.out.println("Feature Selection Flag is On.");
        System.out.println("Reallocating Counts, Probs and Gradient vectors based on FS Results");
        dParameters_.updateVectorsBasedOnFS();
    }

    /*
     * Routine specific operations.
     */

    System.out.println("All data structures are initialized. Starting to estimate parameters.");

    if (m_P.equalsIgnoreCase("MAP") || m_P.equalsIgnoreCase("MAP2")) {

        /* 
         * ------------------------------------------------------------------------------
         * MAP - Maximum Likelihood Estimates of the Parameters characterzing P(x_i|y)
         * MAP2 - MLE of parameters characterizing P(y|x_i)
         * ------------------------------------------------------------------------------
         */

        if (m_P.equalsIgnoreCase("MAP2"))
            dParameters_.convertToProbs_Cond();
        else
            dParameters_.convertToProbs();

    } else if (m_P.equalsIgnoreCase("dCCBN")) {

        /*
         * ------------------------------------------------------------------------------
         * Classic high-order Logistic Regression
         * ------------------------------------------------------------------------------          
         */

        dParameters_.convertToProbs();

        dParameters_.initializeParameters_D(m_WeightingInitialization, isFeelders);

        if (m_MultiThreaded) {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ParallelObjectiveFunctionCLL_d(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                function_to_optimize = new ParallelObjectiveFunctionMSE_d(this);
            }
        } else {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ObjectiveFunctionCLL_d(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                function_to_optimize = new ObjectiveFunctionMSE_d(this);
            }
        }

    } else if (m_P.equalsIgnoreCase("dCCBNf")) {

        /*
         * ------------------------------------------------------------------------------
         * Classic high-order Logistic Regression (Feelders implementation)
         * ------------------------------------------------------------------------------
         */

        dParameters_.convertToProbs();

        dParameters_.initializeParameters_D(m_WeightingInitialization, isFeelders);

        if (m_MultiThreaded) {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ParallelObjectiveFunctionCLL_df(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                // TODO
            }
        } else {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ObjectiveFunctionCLL_df(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                // TODO
            }
        }

    } else if (m_P.equalsIgnoreCase("wCCBN")) {

        /*
         * ------------------------------------------------------------------------------
         * ALR
         * ------------------------------------------------------------------------------
         */

        dParameters_.convertToProbs();

        //double scale = 1e5;
        //dParameters_.multiplyProbsWithAnJEWeight(scale);

        if (isM_PreScaling()) {
            //dParameters_.multiplyProbsWithAnJEWeight();
            dParameters_.multiplyProbsWithAnJEWeightOpt();
        }

        dParameters_.initializeParameters_W(m_WeightingInitialization, isFeelders);

        if (m_MultiThreaded) {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ParallelObjectiveFunctionCLL_w(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                function_to_optimize = new ParallelObjectiveFunctionMSE_w(this);
            }
        } else {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ObjectiveFunctionCLL_w(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                function_to_optimize = new ObjectiveFunctionMSE_w(this);
            }
        }

    } else if (m_P.equalsIgnoreCase("wCCBNf")) {

        /*
         * ------------------------------------------------------------------------------
         * ALR (Feelders implementation)
         * ------------------------------------------------------------------------------
         */

        dParameters_.convertToProbs_F();
        //dParameters_.convertToProbs();

        isFeelders = true;
        dParameters_.initializeParameters_W(m_WeightingInitialization, isFeelders);

        if (m_MultiThreaded) {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ParallelObjectiveFunctionCLL_wf(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                // TODO
            }
        } else {
            if (m_E.equalsIgnoreCase("CLL")) {
                function_to_optimize = new ObjectiveFunctionCLL_wf(this);
            } else if (m_E.equalsIgnoreCase("MSE")) {
                // TODO
            }
        }

    } else if (m_P.equalsIgnoreCase("wCCBN2")) {

        /*
         * ------------------------------------------------------------------------------
         * DBL (discriminative optimization - Bin Liu's idea)
         * ------------------------------------------------------------------------------
         */

        dParameters_.convertToProbs_Cond();

        dParameters_.initializeParameters_W(m_WeightingInitialization, isFeelders);

        if (m_E.equalsIgnoreCase("CLL")) {
            function_to_optimize = new ObjectiveFunctionCLL_w2(this);
        }

    } else if (m_P.equalsIgnoreCase("eCCBN")) {
        //TODO            
        /* 
         * Implement ELR here
         */
    } else {
        System.out.println(
                "m_P value should be from set {MAP, dCCBN (LR), wCCBN (ALR), dCCBNf (LR Feelders), wCCBNf (ALR Feelders), eCCBN (ELR), MAP2 (discriminative MAP), wCCBN2 (ALR discriminative MAP)}");
    }

    /*
     * Train the classifier on initialized data structure.
     */

    if (m_P.equalsIgnoreCase("MAP") || m_P.equalsIgnoreCase("MAP2")) {

        // Do nothing
        System.out.print("NLL (MAP) = " + dParameters_.getNLL(m_Instances, logDComputer) + "\n");

    } else if (m_MaxIterations != 0) {

        if (m_O.equalsIgnoreCase("QN")) {

            Minimizer alg = new Minimizer();
            StopConditions sc = alg.getStopConditions();
            //sc.setFunctionReductionFactor(1e1);
            //sc.setFunctionReductionFactorInactive();
            sc.setMaxGradientNorm(maxGradientNorm);
            sc.setMaxIterations(m_MaxIterations);

            Result result;

            // Call the lbfgs optimizer
            if (isM_MVerb()) {
                System.out.println();
                System.out.print("fx_QN = [");

                System.out.print(dParameters_.getNLL(m_Instances, logDComputer) + ", ");

                alg.setIterationFinishedListener((p, nll, g) -> {
                    System.out.print(nll + ", ");
                    return true;
                });
                result = alg.run(function_to_optimize, dParameters_.getParameters());
                System.out.println("];");
                System.out.println(result);

                System.out.println("NoIter = " + result.iterationsInfo.iterations);
                System.out.println();

            } else {
                result = alg.run(function_to_optimize, dParameters_.getParameters());
                System.out.println("NoIter = " + result.iterationsInfo.iterations);
            }

            function_to_optimize.finish();

        } else if (m_O.equalsIgnoreCase("GD")) {

            MinimizerGD alg = new MinimizerGD();
            alg.setMaxIterations(m_MaxIterations);

            Result result;

            if (isM_ReConditioning()) {

                System.out.println("---->");
                System.out.print("fx_GD = [");

                for (int i = 0; i < 200; i++) {
                    alg.setMaxIterations(1);
                    result = alg.run(function_to_optimize, dParameters_.getParameters());
                }

                System.out.println("];");

            } else {

                if (isM_MVerb()) {
                    System.out.println("---->");
                    System.out.print("fx_GD = [");

                    result = alg.run(function_to_optimize, dParameters_.getParameters());
                    System.out.println("];");

                    System.out.println("NoIter = " + result.iterationsInfo.iterations);
                    System.out.println();

                } else {
                    result = alg.run(function_to_optimize, dParameters_.getParameters());
                    System.out.println("NoIter = " + result.iterationsInfo.iterations);
                }

                function_to_optimize.finish();
            }

        } else if (m_O.equalsIgnoreCase("CG")) {

            MinimizerCG alg = new MinimizerCG();
            alg.setMaxIterations(m_MaxIterations);

            Result result;

            if (isM_MVerb()) {
                System.out.println("---->");
                System.out.print("fx_CG = [");

                System.out.print(dParameters_.getNLL(m_Instances, logDComputer) + ", ");

                result = alg.run(function_to_optimize, dParameters_.getParameters());
                System.out.println("];");

                System.out.println("NoIter = " + result.iterationsInfo.iterations);
                System.out.println();

            } else {
                result = alg.run(function_to_optimize, dParameters_.getParameters());
                System.out.println("NoIter = " + result.iterationsInfo.iterations);
            }

            function_to_optimize.finish();

        }

    }

    // free up some space
    m_Instances = new Instances(m_Instances, 0);
}

From source file:AnDE.wdAnDE.java

License:Open Source License

@Override
public void buildClassifier(Instances instances) throws Exception {

    Instances m_DiscreteInstances = null;

    // can classifier handle the data?
    getCapabilities().testWithFail(instances);

    // Discretize instances if required
    if (m_Discretization) {
        m_Disc = new weka.filters.supervised.attribute.Discretize();
        m_Disc.setUseBinNumbers(true);//w  ww.  ja v a 2s. com
        m_Disc.setInputFormat(instances);
        System.out.println("Applying Discretization Filter");
        m_DiscreteInstances = weka.filters.Filter.useFilter(instances, m_Disc);
        System.out.println("Done");

        m_Instances = new Instances(m_DiscreteInstances);
        m_DiscreteInstances = new Instances(m_DiscreteInstances, 0);
    } else {
        m_Instances = new Instances(instances);
        instances = new Instances(instances, 0);
    }

    // remove instances with missing class
    m_Instances.deleteWithMissingClass();
    nInstances = m_Instances.numInstances();
    nAttributes = m_Instances.numAttributes() - 1;
    nc = m_Instances.numClasses();

    probs = new double[nc];

    paramsPerAtt = new int[nAttributes];
    for (int u = 0; u < nAttributes; u++) {
        paramsPerAtt[u] = m_Instances.attribute(u).numValues();
    }

    /*
     * Initialize structure array based on m_S
     */
    if (m_S.equalsIgnoreCase("A0DE")) {
        // A0DE
        numTuples = 0;
    } else if (m_S.equalsIgnoreCase("A1DE")) {
        // A1DE         
        numTuples = 1;
    } else if (m_S.equalsIgnoreCase("A2DE")) {
        // A2DE         
        numTuples = 2;
    }

    /* 
     * ----------------------------------------------------------------------------------------
     * Start Parameter Learning Process
     * ----------------------------------------------------------------------------------------
     */

    int scheme = 1;

    /*
     * ---------------------------------------------------------------------------------------------
     * Intitialize data structure
     * ---------------------------------------------------------------------------------------------
     */

    if (m_P.equalsIgnoreCase("MAP")) {
        /*
         * MAP - Maximum Likelihood Estimates of the Parameters characterzing P(x_i|y)
         */
        scheme = plTechniques.MAP;

    } else {
        System.out.println("m_P value should be from set {MAP}");
    }

    logDComputer = LogDistributionComputerAnDE.getDistributionComputer(numTuples, scheme);

    if (m_I.equalsIgnoreCase("Flat")) {
        dParameters_ = new wdAnDEParametersFlat(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples,
                m_MVerb);
    } else if (m_I.equalsIgnoreCase("Indexed")) {
    } else if (m_I.equalsIgnoreCase("IndexedBig")) {
        dParameters_ = new wdAnDEParametersIndexedBig(nAttributes, nc, nInstances, paramsPerAtt, scheme,
                numTuples);
    } else if (m_I.equalsIgnoreCase("BitMap")) {
    } else {
        System.out.println("m_I value should be from set {Flat, Indexed, IndexedBig, BitMap}");
    }

    /*
     * ---------------------------------------------------------------------------------------------
     * Create Data Structure by leveraging ONE or TWO pass through the data
     * (These routines are common to all parameter estimation methods)
     * ---------------------------------------------------------------------------------------------
     */
    if (m_MultiThreaded) {

        dParameters_.updateFirstPass_m(m_Instances);

        if (m_MVerb)
            System.out.println("Finished first pass.");

        dParameters_.finishedFirstPass();

        if (dParameters_.needSecondPass()) {
            dParameters_.update_MAP_m(m_Instances);

            if (m_MVerb)
                System.out.println("Finished second pass.");
        }

    } else {

        for (int i = 0; i < nInstances; i++) {
            Instance instance = m_Instances.instance(i);
            dParameters_.updateFirstPass(instance);
        }
        if (m_MVerb)
            System.out.println("Finished first pass.");

        dParameters_.finishedFirstPass();

        if (dParameters_.needSecondPass()) {
            for (int i = 0; i < nInstances; i++) {
                Instance instance = m_Instances.instance(i);
                dParameters_.update_MAP(instance);
            }
            if (m_MVerb)
                System.out.println("Finished second pass.");
        }
    }

    /*
     * Routine specific operations.
     */

    if (m_MVerb)
        System.out.println("All data structures are initialized. Starting to estimate parameters.");

    // free up some space
    m_Instances = new Instances(m_Instances, 0);
}

From source file:ann.ANNOptions.java

public void initWeightsSLP(Instances data) throws Exception {
    ntb.setInputFormat(data);//from   w w w  .j a  va  2s .c om
    data = new Instances(Filter.useFilter(data, ntb));

    //normalize filter
    normalize.setInputFormat(data);
    data = new Instances(Filter.useFilter(data, normalize));

    int nAttr = data.numAttributes();
    Scanner sc = new Scanner(System.in);
    int nOutput;
    if (data.numClasses() <= 2 && topologyOpt == 1) {
        nOutput = 1;
    } else {
        nOutput = data.numClasses();
    }

    for (int j = 0; j < nOutput; j++) {
        Neuron temp = new Neuron();
        if (weightOpt == 1) { // Random
            for (int i = 0; i < nAttr; i++) {
                Random random = new Random();
                temp.weights.add(random.nextDouble());
                //                    temp.weights.add(0.0);
            }
        } else { // Given
            System.out.println("Output-" + j);
            for (int i = 0; i < nAttr - 1; i++) {
                System.out.print("Weight-" + (i + 1) + ": ");
                temp.weights.add(sc.nextDouble());
            }
            System.out.print("Bias weight: ");
            temp.weights.add(sc.nextDouble());
        }

        output.add(temp);
    }
}

From source file:ann.ANNOptions.java

public void initWeightsMLP(Instances data) throws Exception {
    ntb.setInputFormat(data);// ww  w. ja  v a  2s .  c  o  m
    data = new Instances(Filter.useFilter(data, ntb));

    //normalize filter
    normalize.setInputFormat(data);
    data = new Instances(Filter.useFilter(data, normalize));

    int nAttr = data.numAttributes();
    Scanner sc = new Scanner(System.in);

    int nOutput = data.numClasses();

    for (int i = 0; i < hiddenLayer; i++) {
        if (weightOpt == 2) {
            System.out.println("Layer-" + (i + 1));
        }
        List<Neuron> neuronLayer = new ArrayList<Neuron>();
        for (int j = 0; j < layerNeuron.get(i) + 1; j++) {
            if (weightOpt == 2)
                if (weightOpt == 2) {
                    System.out.println("Neuron-" + (j + 1));
                }
            Neuron neuron = new Neuron();
            if (i == 0) { // weight from input layer
                for (int k = 0; k < nAttr; k++) {
                    if (weightOpt == 1) { // random 
                        Random random = new Random();
                        neuron.weights.add(random.nextDouble());
                        //                            neuron.weights.add(0.0);
                    } else { // given
                        if (k < nAttr - 1) {
                            if (weightOpt == 2) {
                                System.out.print("Weight input-" + (k + 1) + ": ");
                            }
                        } else {
                            if (weightOpt == 2) {
                                System.out.print("Weight bias: ");
                            }
                        }
                        neuron.weights.add(sc.nextDouble());
                    }
                }
                neuronLayer.add(neuron);
            } else if (j < layerNeuron.get(i)) { // weight from hidden layer
                for (int k = 0; k < layerNeuron.get(i - 1) + 1; k++) { // layer neuron + 1, 1 for bias
                    if (weightOpt == 1) { // random 
                        Random random = new Random();
                        neuron.weights.add(random.nextDouble());
                        //                            neuron.weights.add(0.0);
                    } else { // given
                        if (k < layerNeuron.get(i - 1)) {
                            if (weightOpt == 2) {
                                System.out.print("Weight neuron-" + (k + 1) + ": ");
                            }
                        } else {
                            if (weightOpt == 2) {
                                System.out.print("Weight bias: ");
                            }
                        }
                        neuron.weights.add(sc.nextDouble());
                    }
                }
                neuronLayer.add(neuron);
            }

        }
        if (i != 0) {
            Neuron bias = new Neuron();
            neuronLayer.add(bias);
        }
        layer.add(neuronLayer);
    }

    //last hidden layer to output
    List<Neuron> neuronLayer = new ArrayList<Neuron>();
    for (int i = 0; i < nOutput; i++) {
        Neuron neuron = new Neuron();
        for (int j = 0; j < layerNeuron.get(layerNeuron.size() - 1) + 1; j++) {
            if (weightOpt == 1) { // random 
                Random random = new Random();
                //                    neuron.weights.add(random.nextDouble());
                neuron.weights.add(0.0);
            } else { // given
                if (j < layerNeuron.get(layerNeuron.size() - 1)) {
                    System.out.print("Weight neuron-" + (j + 1) + ": ");
                } else {
                    System.out.print("Bias: ");
                }
                neuron.weights.add(sc.nextDouble());
            }
        }
        neuronLayer.add(neuron);
    }
    layer.add(neuronLayer);
}