cs.man.ac.uk.stats.ComputeANOVAStats.java Source code

Java tutorial

Introduction

Here is the source code for cs.man.ac.uk.stats.ComputeANOVAStats.java

Source

/**
 *
 * This file is part of STFUD.
 *
 * STFUD is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * STFUD is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with STFUD.  If not, see <http://www.gnu.org/licenses/>.
 *
 * File name:    ComputeANOVAStats.java
 * Package: cs.man.ac.uk.stats
 * Created:   Nov 4, 2013
 * Author:   Rob Lyon
 * 
 * Contact:   rob@scienceguyrob.com or robert.lyon@cs.man.ac.uk
 * Web:      <http://www.scienceguyrob.com> or <http://www.cs.manchester.ac.uk> 
 *          or <http://www.jb.man.ac.uk>
 */
package cs.man.ac.uk.stats;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Vector;

import org.apache.commons.math3.distribution.FDistribution;

import cs.man.ac.uk.common.Common;
import cs.man.ac.uk.io.Writer;

/**
 * The class calculates statisitics for the result files output by STFUD. In particular
 * this class can be used to perform a ANOVA analysis of the results, and a further Tukey
 * test.
 *
 * @author Rob Lyon
 *
 * @version 1.0, 11/04/13
 */
public class ComputeANOVAStats {
    //*****************************************
    //*****************************************
    //              Variables
    //*****************************************
    //*****************************************

    /**
     * ANOVA objects stores the means/sums/summed squares of data in the data files to be analyzed.
     * This tree map simply stores these objects. They key for this map is the unique name of a test,
     * and the value is a vector of ANOVA objects containing the results for the test. Each ANOVA
     * object contains the performance results of a particular algorithm. i.e.,
     * 
     *  <KEY>      <VALUE>
     *  <"Test 1">   < <Algorithm 1 results>,...,<Algorithm n results> >
     *  ...
     *  <"Test n">   < <Algorithm 1 results>,...,<Algorithm n results> >
     */
    private static LinkedHashMap<String, Vector<ANOVA>> anovaObjects = new LinkedHashMap<String, Vector<ANOVA>>();

    /*
     * The header of the data set being analyzed (this class analyzes CSV files, so this is the CSV header).
     */
    private static String header;

    /*
     * Significance level for ANOVA and Tukey tests.
     */
    private static double alpha = 0.01;

    //*****************************************
    //*****************************************
    //              Methods
    //*****************************************
    //*****************************************

    /**
     * Executes the statistical analysis.
     * @param args unused command line arguments.
     */
    public static void main(String[] args) {
        // Flag that when set to true, will cause this class to perform
        // an Analysis of Variance statistical test.
        //boolean performANOVA = true;

        // Path to directory containing files to compute statisitics for.
        String dirPath1 = "/Users/Rob/Dropbox/ICPR_RESULTS/MAGIC";
        String dirPath2 = "/Users/Rob/Dropbox/ICPR_RESULTS/MINIBOONE";
        String dirPath3 = "/Users/Rob/Dropbox/ICPR_RESULTS/PULSAR";
        String dirPath4 = "/Users/Rob/Dropbox/ICPR_RESULTS/SKIN_SEGMENTATION";

        runANOVA(dirPath1);
        runANOVA(dirPath2);
        runANOVA(dirPath3);
        runANOVA(dirPath4);
    }

    /**
     * Runs an ANOVA analysis on the specified directory.
     * @param directory the directory containing the files to run the analysis on.
     */
    private static void runANOVA(String directory) {
        anovaObjects.clear();
        anovaObjects = new LinkedHashMap<String, Vector<ANOVA>>();
        header = "";

        // Get the files in the directory.
        String[] files = Common.getFilePaths(directory);

        // Process each file.
        for (int i = 0; i < files.length; i++) {
            if (Common.fileExist(files[i]) && files[i].endsWith(".csv") && !files[i].endsWith(".Statistics.csv")
                    && !files[i].contains("ANOVA")) {
                System.out.println("Processing file: " + files[i]);
                process(files[i], 6);
            }
        }

        ANOVAAnalysis(directory + "/ANOVA.csv");
    }

    /**
     * Processes a file containing test results. Outputs a file containing the averages of the data read in.
     * @param path the file to write the averages to.
     * @param spacer the number of lines after which a newline should be inserted in the output file (for formatting only).
     */
    private static void process(String path, int spacer) {
        // Path to write out summary statistics to.
        String outputPath = path.replace(".csv", ".Statistics.csv");
        Common.fileDelete(outputPath);// Simply clean up any previous files.

        //Firstly try to read the file
        File file = new File(path);

        //if the file exists
        if (file.exists()) {
            /*
             * FILE FORMAT:
             * 
             * We expect the first line to contain labels followed by 1 or more lines of data.
             * 
             * There will then be a two line break (no data in lines), signaling the start of a new data set. For example:
             * 
             * TEST_<+>_<->_<balance>_<labelling>,...,<Label n>
             * TEST_10_20_1.0_0.5,1.0,...,100
             * TEST_10_20_1.0_0.5,2.0,...,200
             * ...
             * TEST_10_20_1.0_0.5,5.0,...,500
             * ,,
             * ,,
             * TEST_50_10_0.5_0.1,1.0,...,100
             * TEST_50_10_0.5_0.1,2.0,...,200
             * ...
             * TEST_50_10_0.5_0.1,5.0,...,500
             */

            // Variables used to store the line of the being read
            // using the input stream, and an array list to store the input
            // patterns into.
            String line = "";

            // Read the file and display it line by line. 
            BufferedReader in = null;

            try {
                //open stream to file
                in = new BufferedReader(new FileReader(file));

                // Read in first line separately.
                line = in.readLine();

                Writer.append(outputPath, line + "\n");

                // Obtain column headers.
                header = line;

                try {
                    // Stores the data found in each block
                    ArrayList<ArrayList<String>> content = new ArrayList<ArrayList<String>>();
                    boolean haveMeta = false; // Flag that when true says we have meta information.
                    boolean printed = false; // Flag that when true says we have printed summary statistics.

                    // Variables that describe the result in each test block.
                    String positives = "";
                    String negatives = "";
                    String balance = "";
                    String labelling = "";

                    // Rows of data in a data set
                    int rows = 0;
                    int columns = 0;
                    int writeCount = 0; // Counts number of writes to statistics file, used to improve file formatting.

                    while ((line = in.readLine()) != null) {
                        if (line.startsWith(",")) // Empty line, no data.
                        {
                            // If we encounter an empty line, then we must have already processed some data
                            // given the input file structure. So we print the summary statisitics of this data
                            // before proceeding.

                            if (!printed) // If we haven't printed previous data collected.
                            {
                                String key = "TEST_" + positives + "_" + negatives + "_" + balance + "_"
                                        + labelling;
                                //System.out.println(key);

                                // Obtain the summary statisitics.
                                StatsDataSet dataSet = new StatsDataSet(content, rows, columns);

                                ANOVA anova = new ANOVA(dataSet.getRows(), dataSet.getColumns(), path);
                                for (int i = 0; i < dataSet.getRows(); i++)
                                    anova.addRow(dataSet.getRowAsArray(i), i);

                                anova.centreData();

                                if (anovaObjects.containsKey(key))
                                    anovaObjects.get(key).add(anova);
                                else {
                                    Vector<ANOVA> value = new Vector<ANOVA>();
                                    value.add(anova);
                                    anovaObjects.put(key, value);
                                }

                                // Print out: Labelling, Balance, <Average 1>,...<Average n>
                                // Firstly get averages.
                                String[] averages = dataSet.avgColumns_CreateNewRow();

                                // Concatenate the averages in to a single string for writing out.
                                String avgString = averages[1];
                                for (int i = 2; i < averages.length; i++)
                                    avgString = avgString + "," + averages[i];

                                Writer.append(outputPath,
                                        "Balance_" + balance + "_Labelling_" + labelling + "," + avgString + "\n");

                                printed = true;
                                writeCount++;

                                // Add new line to improve formatting in output file.
                                if (writeCount == spacer) {
                                    Writer.append(outputPath, line + "\n");
                                    writeCount = 0;
                                }
                            }

                            // Now Reset these variables.
                            haveMeta = false;
                            positives = "";
                            negatives = "";
                            balance = "";
                            labelling = "";
                            rows = 0;
                            columns = 0;
                            content.clear();
                            continue;
                        } else // Collect more data.
                        {
                            String[] components = line.split(",");
                            rows++;
                            columns = components.length;

                            // An array list will represent a row of data from the file.
                            ArrayList<String> row = new ArrayList<String>();

                            //For each comma separated String we retrieve from the file
                            for (String c : components) {
                                //add it to the row array list
                                row.add(c.replace("\"", ""));
                            }

                            //add the row to the over all contents array list
                            content.add(row);

                            // Finally,
                            if (haveMeta == false) {
                                String[] metaComponents = components[0].split("_");
                                // TEST_<+>_<->_<balance>_<labelling>
                                //   ^   ^   ^     ^            ^
                                //   |   |   |     |            |
                                //   0   1   2     3            4
                                positives = metaComponents[1];
                                negatives = metaComponents[2];
                                balance = metaComponents[3];
                                labelling = metaComponents[4];
                                printed = false;
                            }

                        }
                    }

                    if (!printed) // If we haven't printed previous data but have reached the end of the file.
                    {
                        String key = "TEST_" + positives + "_" + negatives + "_" + balance + "_" + labelling;
                        //System.out.println(key);

                        // Obtain the summary statisitics.
                        StatsDataSet dataSet = new StatsDataSet(content, rows, columns);

                        ANOVA anova = new ANOVA(dataSet.getRows(), dataSet.getColumns(), path);
                        for (int i = 0; i < dataSet.getRows(); i++)
                            anova.addRow(dataSet.getRowAsArray(i), i);

                        anova.centreData();

                        if (anovaObjects.containsKey(key))
                            anovaObjects.get(key).add(anova);
                        else {
                            Vector<ANOVA> value = new Vector<ANOVA>();
                            value.add(anova);
                            anovaObjects.put(key, value);
                        }

                        // Print out: Labelling, Balance, <Average 1>,...<Average n>
                        // Firstly get averages.
                        String[] averages = dataSet.avgColumns_CreateNewRow();

                        // Concatenate the averages in to a single string for writing out.
                        String avgString = averages[1];
                        for (int i = 2; i < averages.length; i++)
                            avgString = avgString + "," + averages[i];

                        Writer.append(outputPath,
                                "Balance_" + balance + "_Labelling_" + labelling + "," + avgString + "\n");

                        printed = true;
                        writeCount++;

                        // Add new line to improve formatting in output file.
                        if (writeCount == spacer) {
                            Writer.append(outputPath, line + "\n");
                            writeCount = 0;
                        }
                    }
                } catch (IOException e) {
                    System.out.println("Exception reading data file: " + path);
                    e.printStackTrace();
                } finally {
                    in.close();
                }
            } catch (Exception e) {
                System.out.println("Exception reading data file: " + path);
                e.printStackTrace();
            }
        } else {
            System.out.println("Could not read data file: " + path);
        }
    }

    /**
     * Performs an ANOVA analysis on the data read in.
     * 
     * @param outputPath the path to output details of the ANOVA analysis to.
     */
    private static void ANOVAAnalysis(String outputPath) {
        /**
         * OUTPUT FILE PREPARATION
         */

        // Clear up output path.
        Common.fileDelete(outputPath);

        String tukeyOutputPath = outputPath.replace(".csv", "_HSD.csv");
        Common.fileDelete(tukeyOutputPath);
        Writer.append(tukeyOutputPath,
                "Result 1,Result 2,Test,Metric,MSwg,DFwg,n,alpha,HSD,H0 (1=rejected & 0=accepted),Outcome\n");

        // Write header information to output path
        String[] headers = header.split(",");
        Writer.append(outputPath, headers[0] + ",");

        for (int i = 1; i < headers.length; i++)
            Writer.append(outputPath,
                    headers[i] + ",F-ratio,P-value,Fb,Fw,H0 (1=rejected & 0=accepted), alpha=" + alpha + ",");

        Writer.append(outputPath, "\n");

        /**
         * PERFROM ANOVA
         */

        for (Map.Entry<String, Vector<ANOVA>> entry : anovaObjects.entrySet()) {
            String key = entry.getKey();
            Vector<ANOVA> vector = entry.getValue();

            /**
             * OK, its crucial to understand what is going on here. We have a number of files
             * containing results of algorithm tests. Each File will contain the results of
             * a number of different tests using different parameters. Note that each files contains
             * results for a specific algorithm only.
             * 
             * Now if we want to perform ANOVA analysis on results from multiple algorithms, we
             * must analyze the results in these files together rather than in isolation. So we have
             * the following situation: n files, containing results of m tests, occurring in the same order in each
             * file. These are directly comparable results, for instance:
             * 
             *    FILE 1               FILE 2               FILE N
             *  Test 1               Test 1               Test 1   -|
             *  Test 1               Test 1               Test 1   |
             *  Test 1               Test 1               Test 1   |---> Test Block 1 (i.e. multiple runs of same test)
             *  Test 1               Test 1               Test 1   |
             *  Test 1               Test 1               Test 1   -|
             *  
             *  Test 2               Test 2               Test 2
             *  Test 2               Test 2               Test 2
             *  Test 2               Test 2               Test 2
             *  Test 2               Test 2               Test 2
             *  Test 2               Test 2               Test 2
             *  
             *  Test n               Test n               Test n   -|
             *  Test n               Test n               Test n    |
             *  Test n               Test n               Test n    |---> Test Block n
             *  Test n               Test n               Test n    |
             *  Test n               Test n               Test n   -|
             *  
             *  ** Note each test result is made up of a number of recorded metrics. For instance Test Block 1 in file 1
             *  would look something like (with four metrics recorded during testing lets say TP,TN,FP,FN):
             *  
             *  120   ,   6   ,   5   ,   3   -|---> Run 1 --->
             *  118   ,   7   ,   6   ,   4    |            |
             *  122   ,   8   ,   7   ,   5    |            |---> Test Block 1.
             *  130   ,   12   ,   5   ,   13    |            |
             *  100   ,   2   ,   5   ,   7   -|---> Run 5 --->
             *  
             *  The results of each test are actually described in terms of k variables (typically k=16). These variables
             *  include the true positives (TP), false positives (FP), accuracy, f-score etc. Thus to compare the results
             *  we need to do ANOVA analysis using data occurring in each of the three files. However we can only compare
             *  like with like. So in the example above we can only perform ANOVA on comparable test blocks. In
             *  which case ANOVA would be performed on Test 1 data in files 1, 2 and 3, then Test 2 data and so on.
             *  At no point would Test 1 data in any of the files be compared with Test 2 data for example.
             *  
             *  The code below does this. Using arrays we perform ANOVA on each metric
             *  in the test blocks. Clearly this makes the code below somewhat complicated to understand!
             *  I'm genuinely sorry for that, the main reason is because I may have to perform this type of 
             *  analysis many thousands of times. But I'll try to explain how it works.
             *  
             *  For each Test block, in each file, an ANOVA object is generated in code above ( in the process() method).
             *  Each ANOVA object essentially contains a matrix of the data collected in a test block. These ANOVA objects
             *  have methods that enable them to calculate the mean and sum of the values in their matrix. For instance,
             *  Test 1 involves ten runs of the same test. For each test, lets say we collect 4 pieces of data, the number of 
             *  true positives, true negatives, false positives and false negatives. An ANOVA object for Test 1 for File 1
             *  will contain a matrix of this information, and calculate the means/sums of these four variables storing them in:
             *  
             *  private double sums[];
             *   private double means[];
             *
             *  So then,
             *  
             *  sums[0] contains the sum of true positives.
             *  sums[1] contains the sum of true negatives.
             *  sums[2] contains the sum of false positives.
             *  sums[3] contains the sum of false negatives.  
             *  
             *  And likewise for the means.
             *  
             *  When the process() method terminates we have a number of ANOVA objects stored in a TreeMap structure,
             *  which groups comparable ANOVA objects by storing them in the same vector. 
             *  
             *  Here then we begin iterating through this tree map, and calculate the F-ratio for comparable ANOVA objects.
             *  This way we can calculate all the ANOVA results automatically, for every variable we have.
             */

            /*
             * ANOVA WORKED EXAMPLE (credit Wikipedia!).
             * 
             * Consider an experiment to study the effect of three different levels of a factor
             * on a response (e.g. three levels of a fertilizer on plant growth). If we had 6 observations
             * for each level, we could write the outcome of the experiment in a table like this, 
             * where a1, a2, and a3 are the three levels of the factor being studied. 
             * 
             * a1   a2   a3
             * 6   8   13
             * 8   12   9
             * 4   9   11
             * 5   11   8
             * 3   6   7
             * 4   8   12
             * 
             * The null hypothesis, denoted H0, for the overall F-test for this experiment would be that
             * all three levels of the factor produce the same response, on average. To calculate the F-ratio:
             * 
             * Step 1: Calculate the mean within each group:
             * 
             *    Y1 = ( 6 + 8 + 4 + 5 + 3 + 4 ) / 6       = 5
             *  Y2 = ( 8 + 12 + 9 + 11 + 6 + 8 ) / 6   = 9
             *  Y3 = ( 13 + 9 + 11 + 8 + 7 + 12 ) / 6   = 10
             * 
             * Step 2: Calculate the overall mean, Y:
             * 
             *    Y = (Y1 + Y2 + Y3) / 3 = 8.
             * 
             * Step 3: Calculate the "between-group" sum of squares:
             * 
             *  "between-group" sum of squares = n(Y1-Y)^2 + n(Y2-Y)^2 + n(Y3-Y)^2
             *                          = 6(5-8)^2 + 6(9-8)^2 + 6(9-8)^2
             *                          = 84
             * 
             * Step 4: The between-group degrees of freedom is one less than the number of groups.
             * 
             *   between-group degrees of freedom = a - 1
             *                             = 3-1
             *                             = 2
             *   
             * Step 5: The between-group mean square value is
             * 
             *  between-group mean square value = "between-group" sum of squares / between-group degrees of freedom
             *                          = 84/2
             *                          = 42
             *  
             * Step 6: Calculate the "within-group" sum of squares. Begin by centering the data in each group
             * 
             *       a1         a2         a3
             * 6 - 5 = 1   8 - 9 = -1   13 - 10 = 3
             * 8 - 5 = 3   12 - 9 = 3   9 - 10 = -1
             * 4 - 5 = -1   9 - 9 = 0   11 - 10 = 1
             * 5 - 5 = 0   11 - 9 = 2   8 - 10 = -2
             * 3 - 5 = -2   6 - 9 = -3   7 - 10 = -3
             * 4 - 5 = -1   8 - 9 = -1   12 - 10 = 2
             * 
             *    within-group sum of squares = 1^2 + 3^2 + (-1)^2 + 0^2 + (-2)^2 + (-1)^2 +
             *                           (-1)^2 + 3^2 + 0^2 + 2^2 + (-3)^2 + (-1)^2 +
             *                           3^2 + (-1)^2 + 1^2 + (-2)^2 + (-3)^2 + 2^2
             * 
             *                         = 1 + 9 + 1 + 0 + 4 + 1 + 1 + 9 + 0 + 4 + 9 + 1 + 9 + 1 + 1 + 4 + 9 + 4
             *                         = 68
             * 
             * Step 7: The within-group degrees of freedom is 
             *  
             *  within-group degrees of freedom = a(n-1)
             *                          = 3(6-1)
             *                          = 15
             * 
             * Step 8: Thus the within-group mean square value is,
             * 
             *  within-group mean square value = within-group sum of squares / within-group degrees of freedom
             *                          = 68 / 15
             *                          = 4.5
             * Step 9: The F-ratio is
             * 
             *  F-ratio = between-group mean square value /  within-group mean square value
             *        = 42/4.5
             *        = 9.3
             *  
             *  The critical value is the number that the test statistic must exceed to reject the test.
             *  In this case, Fcrit(2,15) = 3.68 at alpha = 0.05. Since F = 9.3 > 3.68, the results are
             *  significant at the 5% significance level. One would reject the null hypothesis, concluding
             *  that there is strong evidence that the expected values in the three groups differ. 
             *  The p-value for this test is 0.002.
             */

            /**
             * ANOVA Variables:
             * 
             * a         =   Number of distinct test groups (corresponds to number of input files).
             * 
             * n         =   Number of data items per test group (corresponds to data items in a test block).
             * 
             * overallMeans   =   An array which stores the means for each metric recorded in a test block.
             * 
             * sumSquaresBetweenGroup   =   the "between-group" sum of squares.
             * 
             * freedomBetweenGroup      =   The between-group degrees of freedom is one less than the number of groups.
             * 
             * meanSquareBetweenGroup   =   Stores the between-group mean square values.
             * 
             * sumSquaresWithinGroup   =   The within-group sum of squares is the sum of squares.
             * 
             * freedomWithinGroup      =   The within-group degrees of freedom is.
             * 
             * meanSquareWithinGroup   =   Stores the within-group mean square values.
             * 
             * F_Ratios               =   The F-ratio's.
             */

            int a = vector.size();// Number of groups.
            int n = vector.elementAt(0).getRows();// Number of data values per group.

            // Number of recorded metrics per test (number of variables).
            int metrics = vector.elementAt(0).getColumns();

            double[] overallMeans = new double[metrics];
            double[] sumSquaresBetweenGroup = new double[metrics];
            double[] meanSquareBetweenGroup = new double[metrics];
            double[] sumSquaresWithinGroup = new double[metrics];
            double[] meanSquareWithinGroup = new double[metrics];
            double[] F_Ratios = new double[metrics];

            //STEP 1. Calculate the overall means.
            for (int i = 0; i < vector.size(); i++)
                for (int j = 0; j < vector.elementAt(0).getColumns(); j++)
                    overallMeans[j] += vector.elementAt(i).getMean(j);

            //STEP 2. Divide the overall means by the number of groups.
            for (int j = 0; j < overallMeans.length; j++)
                overallMeans[j] = overallMeans[j] / (double) vector.size();

            //STEP 3.  Calculate the "between-group" sum of squares:
            for (int i = 0; i < vector.size(); i++)
                for (int j = 0; j < vector.elementAt(0).getColumns(); j++)
                    sumSquaresBetweenGroup[j] += (double) n
                            * (Math.pow((vector.elementAt(i).getMean(j) - overallMeans[j]), 2));

            //STEP 4: The between-group degrees of freedom
            double freedomBetweenGroup = a - 1;

            //STEP 5. between-group mean square value
            for (int i = 0; i < meanSquareBetweenGroup.length; i++)
                meanSquareBetweenGroup[i] = sumSquaresBetweenGroup[i] / freedomBetweenGroup;

            //STEP 6. Sum of centered squares (partly already calculated by ANOVA objects.
            for (int i = 0; i < vector.size(); i++)
                for (int j = 0; j < vector.elementAt(0).getColumns(); j++)
                    sumSquaresWithinGroup[j] += vector.elementAt(i).getSumCentredSquares(j);

            //STEP 7.
            double freedomWithinGroup = (double) a * (n - 1);

            //STEP 8. The within-group mean square value is...
            for (int i = 0; i < meanSquareWithinGroup.length; i++)
                meanSquareWithinGroup[i] = sumSquaresWithinGroup[i] / freedomWithinGroup;

            // STEP 9. The final F-ratios are...
            for (int i = 0; i < F_Ratios.length; i++)
                F_Ratios[i] = meanSquareBetweenGroup[i] / meanSquareWithinGroup[i];

            Writer.append(outputPath, key + ",");

            for (int i = 0; i < F_Ratios.length; i++) {
                // The p-value is the probability of obtaining a test statistic,
                // at least as extreme as the one that was actually observed, 
                // assuming that the null hypothesis is true.
                FDistribution fdist = new FDistribution(freedomBetweenGroup, freedomWithinGroup);

                double pValue = (1.0 - fdist.cumulativeProbability(F_Ratios[i]));

                // headers[i]+",F-ratio,P-value,Fb,Fw,H0 (1=rejected & 0=accepted), alpha="+alpha+","
                if (pValue < alpha)
                    Writer.append(outputPath, "," + F_Ratios[i] + "," + pValue + "," + freedomBetweenGroup + ","
                            + freedomWithinGroup + "," + "1,,");
                else
                    Writer.append(outputPath, "," + F_Ratios[i] + "," + pValue + "," + freedomBetweenGroup + ","
                            + freedomWithinGroup + "," + "0,,");
            }

            Writer.append(outputPath, "\n");

            /**
             * TUKEY TEST
             * 
             * Now we have established the ANOVA results, that is we know the significance of the variance
             * between the individual test results. But knowing that there is a significant difference is not
             * enough. We need to know which test results were better and which were worse in order to determine
             * which algorithm performed better. To do this we need to perform the Tukey test. It performs a pair
             * wise comparison of the results so that they can be ranked.
             * 
             * The Studentized range statistic can then be calculated for any particular pair as:
             * 
             *    Q = ( ML  MS ) / sqrt( meanSquareWithinGroup / values per sample)
             * 
             *  and ML is the largest mean for a group, and MS is the smallest mean for a group.
             */

            // PAIRWISE COMPARISON
            for (int i = 0; i < vector.size(); i++) {
                for (int j = i + 1; j < vector.size(); j++) {
                    // Here the comparison is performed. Remember we must do the Tukey test
                    // on each metric. So we will calculate the HSD (Honestly Significant Difference)
                    // multiple times.

                    // For each metric
                    for (int k = 0; k < vector.elementAt(i).getColumns(); k++) {
                        double mean_one = vector.elementAt(i).getMean(k);
                        double mean_two = vector.elementAt(j).getMean(k);
                        double meanSquaredWithinGroup = meanSquareWithinGroup[k];
                        double valuesPerSample = vector.elementAt(i).getRows();// All objects have same number of rows here.

                        double Q = 0;

                        // This is a string used to summarize the outcome of the test.
                        String outcome = vector.elementAt(i).getFileName() + " - "
                                + vector.elementAt(j).getFileName() + " +";

                        if (Double.compare(mean_one, mean_two) < 0) // mean_one < mean_two
                        {
                            Q = (mean_two - mean_one) / Math.sqrt(meanSquaredWithinGroup / valuesPerSample);
                            outcome = outcome.replace("-", " < ");
                        } else if (Double.compare(mean_one, mean_two) > 0) // mean_one > mean_two
                        {
                            Q = (mean_one - mean_two) / Math.sqrt(meanSquaredWithinGroup / valuesPerSample);
                            outcome = outcome.replace("-", " > ");
                        }

                        String H0Result = ""; // 1=rejected & 0=accepted
                        double QDist = getQDist(freedomWithinGroup, a, alpha);

                        if (Double.compare(Q, QDist) < 0) {
                            H0Result = "0";
                            outcome = outcome.replace("+", "H0 Accepted");
                        } else if (Double.compare(Q, QDist) > 0) {
                            H0Result = "1";
                            outcome = outcome.replace("+", "H0 Rejected");
                        } else {
                            H0Result = "-1";
                            outcome = outcome.replace("+", "H0 Accepted");
                        }

                        Writer.append(tukeyOutputPath,
                                vector.elementAt(i).getFileName() + "," + vector.elementAt(j).getFileName() + ","
                                        + key + "," + headers[k + 1] + "," + meanSquaredWithinGroup + ","
                                        + freedomWithinGroup + "," + valuesPerSample + "," + alpha + "," + Q + ","
                                        + H0Result + "," + outcome + "\n");
                    }

                    Writer.append(tukeyOutputPath, ",,,,\n");
                }

                Writer.append(tukeyOutputPath, ",,,,\n");
            }

            //System.out.println("\n\n");
        }
    }

    /**
     * Gets the value of the Studentized Q distribution given a specified degree of freedom,
     * the number of groups and an alpha significance level.
     * @param degressOfFreedom the degrees of freedom.
     * @param groups the number of groups.
     * @param a the alpha value.
     * @return the critical Studentized Q distribution value.
     */
    private static double getQDist(double degressOfFreedom, double groups, double a) {
        if (Double.compare(a, 0.01) == 0)
            return QDist_alpha_0_01[(int) degressOfFreedom - 1][(int) groups - 1];
        else if (Double.compare(a, 0.05) == 0)
            return QDist_alpha_0_05[(int) degressOfFreedom - 1][(int) groups - 1];
        else
            return 0;
    }

    /**
     * DISTRIBUTION VALUES. 
     */

    /**
     * Studentized Q Distribution for alpha = 0.05
     */
    private static double[][] QDist_alpha_0_05 = {
            { 18.066, 27.066, 32.925, 37.149, 40.481, 43.203, 45.501, 47.482, 49.22, 50.78, 52.161, 53.346, 54.469,
                    55.53, 56.486, 57.349, 58.172, 58.941, 59.663 },
            { 6.101, 8.344, 9.813, 10.891, 11.744, 12.444, 13.039, 13.552, 14.003, 14.407, 14.761, 15.086, 15.386,
                    15.662, 15.921, 16.157, 16.379, 16.588, 16.784 },
            { 4.508, 5.914, 6.828, 7.504, 8.039, 8.48, 8.855, 9.18, 9.465, 9.721, 9.948, 10.156, 10.347, 10.524,
                    10.689, 10.841, 10.985, 11.119, 11.245 },
            { 3.932, 5.044, 5.761, 6.29, 6.709, 7.055, 7.349, 7.604, 7.829, 8.031, 8.212, 8.376, 8.527, 8.666,
                    8.796, 8.918, 9.031, 9.137, 9.238 },
            { 3.639, 4.605, 5.221, 5.676, 6.035, 6.332, 6.585, 6.804, 6.997, 7.171, 7.325, 7.467, 7.598, 7.718,
                    7.83, 7.935, 8.033, 8.125, 8.211 },
            { 3.464, 4.342, 4.898, 5.307, 5.63, 5.897, 6.124, 6.321, 6.495, 6.651, 6.791, 6.918, 7.036, 7.145,
                    7.245, 7.34, 7.428, 7.511, 7.589 },
            { 3.347, 4.167, 4.683, 5.062, 5.361, 5.607, 5.817, 5.999, 6.16, 6.304, 6.433, 6.551, 6.66, 6.76, 6.853,
                    6.941, 7.022, 7.099, 7.171 },
            { 3.264, 4.043, 4.531, 4.888, 5.169, 5.4, 5.598, 5.769, 5.92, 6.055, 6.177, 6.288, 6.39, 6.484, 6.572,
                    6.654, 6.731, 6.803, 6.871 },
            { 3.202, 3.951, 4.416, 4.757, 5.025, 5.246, 5.433, 5.596, 5.74, 5.868, 5.985, 6.09, 6.187, 6.277, 6.36,
                    6.439, 6.512, 6.58, 6.645 },
            { 3.153, 3.879, 4.328, 4.656, 4.913, 5.126, 5.305, 5.462, 5.6, 5.723, 5.835, 5.936, 6.029, 6.115, 6.195,
                    6.27, 6.34, 6.406, 6.468 },
            { 3.115, 3.822, 4.258, 4.575, 4.824, 5.03, 5.203, 5.354, 5.487, 5.607, 5.714, 5.812, 5.902, 5.986,
                    6.063, 6.135, 6.203, 6.266, 6.327 },
            { 3.083, 3.775, 4.2, 4.509, 4.752, 4.951, 5.12, 5.266, 5.396, 5.511, 5.616, 5.711, 5.798, 5.879, 5.954,
                    6.024, 6.09, 6.152, 6.21 },
            { 3.057, 3.736, 4.152, 4.454, 4.691, 4.885, 5.05, 5.193, 5.319, 5.432, 5.534, 5.627, 5.711, 5.79, 5.863,
                    5.932, 5.996, 6.056, 6.113 },
            { 3.035, 3.703, 4.112, 4.408, 4.64, 4.83, 4.992, 5.131, 5.254, 5.365, 5.464, 5.555, 5.638, 5.715, 5.786,
                    5.853, 5.916, 5.974, 6.03 },
            { 3.016, 3.675, 4.077, 4.368, 4.596, 4.783, 4.941, 5.078, 5.199, 5.307, 5.404, 5.493, 5.575, 5.65, 5.72,
                    5.786, 5.847, 5.905, 5.959 },
            { 3, 3.651, 4.047, 4.334, 4.558, 4.742, 4.897, 5.032, 5.151, 5.257, 5.353, 5.44, 5.52, 5.594, 5.663,
                    5.727, 5.787, 5.844, 5.897 },
            { 2.985, 3.63, 4.021, 4.304, 4.525, 4.706, 4.859, 4.992, 5.109, 5.213, 5.307, 5.393, 5.472, 5.545,
                    5.612, 5.676, 5.735, 5.791, 5.843 },
            { 2.973, 3.611, 3.998, 4.277, 4.495, 4.674, 4.825, 4.956, 5.071, 5.174, 5.267, 5.352, 5.43, 5.502,
                    5.568, 5.63, 5.689, 5.744, 5.796 },
            { 2.962, 3.594, 3.978, 4.254, 4.47, 4.646, 4.795, 4.925, 5.038, 5.14, 5.232, 5.315, 5.392, 5.463, 5.529,
                    5.59, 5.648, 5.702, 5.753 },
            { 2.952, 3.579, 3.959, 4.233, 4.446, 4.621, 4.769, 4.896, 5.009, 5.109, 5.2, 5.282, 5.358, 5.428, 5.493,
                    5.554, 5.611, 5.664, 5.715 },
            { 2.943, 3.566, 3.943, 4.214, 4.425, 4.599, 4.745, 4.871, 4.982, 5.081, 5.171, 5.253, 5.328, 5.397,
                    5.461, 5.521, 5.577, 5.63, 5.68 },
            { 2.935, 3.554, 3.928, 4.197, 4.407, 4.578, 4.723, 4.848, 4.958, 5.056, 5.145, 5.226, 5.3, 5.369, 5.432,
                    5.492, 5.548, 5.6, 5.649 },
            { 2.927, 3.543, 3.915, 4.182, 4.389, 4.559, 4.703, 4.827, 4.936, 5.033, 5.121, 5.201, 5.275, 5.343,
                    5.406, 5.465, 5.52, 5.572, 5.62 },
            { 2.92, 3.533, 3.902, 4.167, 4.374, 4.542, 4.685, 4.808, 4.916, 5.013, 5.1, 5.179, 5.252, 5.319, 5.382,
                    5.44, 5.495, 5.546, 5.594 },
            { 2.914, 3.524, 3.891, 4.154, 4.359, 4.527, 4.668, 4.791, 4.898, 4.993, 5.08, 5.159, 5.231, 5.298, 5.36,
                    5.418, 5.472, 5.523, 5.571 },
            { 2.909, 3.515, 3.881, 4.143, 4.346, 4.513, 4.653, 4.774, 4.881, 4.976, 5.062, 5.14, 5.212, 5.278,
                    5.339, 5.397, 5.451, 5.501, 5.549 },
            { 2.903, 3.508, 3.872, 4.132, 4.334, 4.499, 4.639, 4.76, 4.865, 4.96, 5.045, 5.123, 5.194, 5.26, 5.321,
                    5.378, 5.431, 5.481, 5.529 },
            { 2.898, 3.5, 3.863, 4.121, 4.323, 4.487, 4.626, 4.746, 4.851, 4.945, 5.029, 5.106, 5.177, 5.243, 5.303,
                    5.36, 5.413, 5.463, 5.51 },
            { 2.894, 3.494, 3.854, 4.112, 4.312, 4.476, 4.614, 4.733, 4.837, 4.931, 5.015, 5.092, 5.162, 5.227,
                    5.287, 5.343, 5.396, 5.446, 5.492 },
            { 2.89, 3.488, 3.847, 4.103, 4.302, 4.465, 4.602, 4.721, 4.825, 4.918, 5.001, 5.078, 5.147, 5.212,
                    5.272, 5.328, 5.38, 5.429, 5.476 },
            { 2.886, 3.482, 3.84, 4.095, 4.293, 4.455, 4.592, 4.71, 4.814, 4.906, 4.989, 5.065, 5.134, 5.198, 5.258,
                    5.314, 5.366, 5.415, 5.461 },
            { 2.882, 3.476, 3.833, 4.087, 4.285, 4.446, 4.582, 4.699, 4.803, 4.894, 4.977, 5.052, 5.122, 5.185,
                    5.245, 5.3, 5.352, 5.4, 5.446 },
            { 2.878, 3.471, 3.827, 4.08, 4.277, 4.437, 4.573, 4.69, 4.792, 4.884, 4.966, 5.041, 5.11, 5.173, 5.232,
                    5.287, 5.339, 5.387, 5.433 },
            { 2.875, 3.466, 3.821, 4.073, 4.269, 4.429, 4.564, 4.68, 4.783, 4.874, 4.956, 5.03, 5.099, 5.162, 5.221,
                    5.276, 5.327, 5.375, 5.42 },
            { 2.872, 3.462, 3.815, 4.067, 4.262, 4.422, 4.556, 4.672, 4.774, 4.864, 4.946, 5.02, 5.088, 5.151, 5.21,
                    5.264, 5.315, 5.363, 5.409 },
            { 2.869, 3.458, 3.81, 4.061, 4.256, 4.414, 4.548, 4.664, 4.765, 4.856, 4.937, 5.011, 5.079, 5.141,
                    5.199, 5.254, 5.305, 5.352, 5.397 },
            { 2.867, 3.454, 3.805, 4.055, 4.249, 4.408, 4.541, 4.656, 4.757, 4.847, 4.928, 5.002, 5.069, 5.132,
                    5.19, 5.244, 5.294, 5.342, 5.387 },
            { 2.864, 3.45, 3.801, 4.05, 4.243, 4.401, 4.534, 4.649, 4.749, 4.839, 4.92, 4.993, 5.06, 5.123, 5.18,
                    5.234, 5.285, 5.332, 5.377 },
            { 2.862, 3.446, 3.796, 4.045, 4.238, 4.395, 4.527, 4.642, 4.742, 4.832, 4.912, 4.985, 5.052, 5.114,
                    5.172, 5.225, 5.276, 5.323, 5.367 },
            { 2.859, 3.443, 3.792, 4.04, 4.232, 4.389, 4.521, 4.635, 4.735, 4.825, 4.905, 4.977, 5.044, 5.106,
                    5.163, 5.217, 5.267, 5.314, 5.358 },
            { 2.857, 3.44, 3.788, 4.036, 4.227, 4.384, 4.515, 4.629, 4.729, 4.818, 4.898, 4.97, 5.037, 5.098, 5.155,
                    5.209, 5.259, 5.306, 5.35 },
            { 2.855, 3.437, 3.784, 4.031, 4.223, 4.378, 4.51, 4.623, 4.723, 4.811, 4.891, 4.963, 5.03, 5.091, 5.148,
                    5.201, 5.251, 5.298, 5.342 },
            { 2.853, 3.434, 3.781, 4.027, 4.218, 4.373, 4.504, 4.618, 4.717, 4.805, 4.885, 4.957, 5.023, 5.084,
                    5.141, 5.194, 5.243, 5.29, 5.334 },
            { 2.851, 3.431, 3.777, 4.023, 4.214, 4.369, 4.499, 4.612, 4.711, 4.799, 4.879, 4.95, 5.016, 5.077,
                    5.134, 5.187, 5.236, 5.283, 5.326 },
            { 2.849, 3.429, 3.774, 4.019, 4.21, 4.364, 4.495, 4.607, 4.706, 4.794, 4.873, 4.944, 5.01, 5.071, 5.128,
                    5.18, 5.229, 5.276, 5.319 },
            { 2.848, 3.426, 3.771, 4.016, 4.205, 4.36, 4.49, 4.602, 4.7, 4.788, 4.867, 4.939, 5.004, 5.065, 5.121,
                    5.174, 5.223, 5.269, 5.313 },
            { 2.846, 3.424, 3.768, 4.012, 4.202, 4.356, 4.486, 4.597, 4.696, 4.783, 4.862, 4.933, 4.999, 5.059,
                    5.115, 5.168, 5.217, 5.263, 5.306 },
            { 2.844, 3.421, 3.765, 4.009, 4.198, 4.352, 4.481, 4.593, 4.691, 4.778, 4.857, 4.928, 4.993, 5.054,
                    5.11, 5.162, 5.211, 5.257, 5.3 },
            { 2.843, 3.419, 3.762, 4.006, 4.194, 4.348, 4.477, 4.589, 4.686, 4.774, 4.852, 4.923, 4.988, 5.048,
                    5.104, 5.156, 5.205, 5.251, 5.294 },
            { 2.842, 3.417, 3.759, 4.003, 4.191, 4.344, 4.473, 4.585, 4.682, 4.769, 4.847, 4.918, 4.983, 5.043,
                    5.099, 5.151, 5.2, 5.245, 5.288 },
            { 2.84, 3.415, 3.757, 4, 4.188, 4.341, 4.47, 4.581, 4.678, 4.765, 4.843, 4.914, 4.978, 5.038, 5.094,
                    5.146, 5.194, 5.24, 5.283 },
            { 2.839, 3.413, 3.754, 3.997, 4.185, 4.337, 4.466, 4.577, 4.674, 4.761, 4.839, 4.909, 4.974, 5.034,
                    5.089, 5.141, 5.189, 5.235, 5.278 },
            { 2.838, 3.411, 3.752, 3.995, 4.182, 4.334, 4.463, 4.573, 4.67, 4.757, 4.834, 4.905, 4.97, 5.029, 5.084,
                    5.136, 5.184, 5.23, 5.273 },
            { 2.836, 3.409, 3.75, 3.992, 4.179, 4.331, 4.459, 4.57, 4.666, 4.753, 4.83, 4.901, 4.965, 5.025, 5.08,
                    5.132, 5.18, 5.225, 5.268 },
            { 2.835, 3.407, 3.748, 3.99, 4.176, 4.328, 4.456, 4.566, 4.663, 4.749, 4.827, 4.897, 4.961, 5.021,
                    5.076, 5.127, 5.175, 5.22, 5.263 },
            { 2.834, 3.406, 3.746, 3.987, 4.174, 4.325, 4.453, 4.563, 4.659, 4.745, 4.823, 4.893, 4.958, 5.016,
                    5.071, 5.123, 5.171, 5.216, 5.258 },
            { 2.833, 3.404, 3.744, 3.985, 4.171, 4.323, 4.45, 4.56, 4.656, 4.742, 4.819, 4.889, 4.954, 5.013, 5.067,
                    5.119, 5.167, 5.212, 5.254 },
            { 2.832, 3.403, 3.742, 3.983, 4.168, 4.32, 4.447, 4.557, 4.653, 4.739, 4.816, 4.886, 4.95, 5.009, 5.063,
                    5.115, 5.163, 5.207, 5.25 },
            { 2.831, 3.401, 3.74, 3.981, 4.166, 4.317, 4.444, 4.554, 4.65, 4.735, 4.812, 4.882, 4.946, 5.005, 5.06,
                    5.111, 5.159, 5.203, 5.246 },
            { 2.83, 3.399, 3.738, 3.979, 4.164, 4.315, 4.442, 4.551, 4.647, 4.732, 4.809, 4.879, 4.943, 5.002,
                    5.056, 5.107, 5.155, 5.2, 5.242 },
            { 2.829, 3.398, 3.736, 3.976, 4.162, 4.312, 4.439, 4.548, 4.644, 4.729, 4.806, 4.876, 4.94, 4.998,
                    5.053, 5.104, 5.151, 5.196, 5.238 },
            { 2.828, 3.397, 3.735, 3.975, 4.16, 4.31, 4.437, 4.546, 4.641, 4.726, 4.803, 4.873, 4.937, 4.995, 5.049,
                    5.1, 5.148, 5.192, 5.234 },
            { 2.827, 3.395, 3.733, 3.973, 4.158, 4.308, 4.434, 4.543, 4.639, 4.724, 4.8, 4.87, 4.933, 4.992, 5.046,
                    5.097, 5.144, 5.189, 5.231 },
            { 2.826, 3.394, 3.731, 3.971, 4.156, 4.306, 4.432, 4.541, 4.636, 4.721, 4.797, 4.867, 4.93, 4.989,
                    5.043, 5.094, 5.141, 5.185, 5.227 },
            { 2.825, 3.393, 3.73, 3.969, 4.154, 4.303, 4.43, 4.538, 4.634, 4.718, 4.795, 4.864, 4.928, 4.986, 5.04,
                    5.09, 5.138, 5.182, 5.224 },
            { 2.825, 3.392, 3.728, 3.967, 4.152, 4.301, 4.427, 4.536, 4.631, 4.716, 4.792, 4.861, 4.925, 4.983,
                    5.037, 5.087, 5.135, 5.179, 5.221 },
            { 2.824, 3.39, 3.727, 3.966, 4.15, 4.299, 4.425, 4.534, 4.629, 4.713, 4.789, 4.859, 4.922, 4.98, 5.034,
                    5.084, 5.132, 5.176, 5.218 },
            { 2.823, 3.389, 3.726, 3.964, 4.148, 4.297, 4.423, 4.531, 4.626, 4.711, 4.787, 4.856, 4.919, 4.978,
                    5.031, 5.082, 5.129, 5.173, 5.215 },
            { 2.822, 3.388, 3.724, 3.962, 4.146, 4.296, 4.421, 4.529, 4.624, 4.709, 4.784, 4.853, 4.917, 4.975,
                    5.029, 5.079, 5.126, 5.17, 5.212 },
            { 2.822, 3.387, 3.723, 3.961, 4.145, 4.294, 4.419, 4.527, 4.622, 4.706, 4.782, 4.851, 4.914, 4.972,
                    5.026, 5.076, 5.123, 5.167, 5.209 },
            { 2.821, 3.386, 3.722, 3.959, 4.143, 4.292, 4.417, 4.525, 4.62, 4.704, 4.78, 4.849, 4.912, 4.97, 5.024,
                    5.074, 5.12, 5.164, 5.206 },
            { 2.82, 3.385, 3.72, 3.958, 4.141, 4.29, 4.415, 4.523, 4.618, 4.702, 4.778, 4.846, 4.909, 4.967, 5.021,
                    5.071, 5.118, 5.162, 5.203 },
            { 2.82, 3.384, 3.719, 3.957, 4.14, 4.289, 4.414, 4.521, 4.616, 4.7, 4.776, 4.844, 4.907, 4.965, 5.019,
                    5.069, 5.115, 5.159, 5.201 },
            { 2.819, 3.383, 3.718, 3.955, 4.138, 4.287, 4.412, 4.519, 4.614, 4.698, 4.773, 4.842, 4.905, 4.963,
                    5.016, 5.066, 5.113, 5.157, 5.198 },
            { 2.818, 3.382, 3.717, 3.954, 4.137, 4.285, 4.41, 4.518, 4.612, 4.696, 4.771, 4.84, 4.903, 4.961, 5.014,
                    5.064, 5.11, 5.154, 5.196 },
            { 2.818, 3.381, 3.716, 3.953, 4.135, 4.284, 4.409, 4.516, 4.61, 4.694, 4.769, 4.838, 4.901, 4.958,
                    5.012, 5.061, 5.108, 5.152, 5.193 },
            { 2.817, 3.38, 3.715, 3.952, 4.134, 4.282, 4.407, 4.514, 4.608, 4.692, 4.767, 4.836, 4.899, 4.956, 5.01,
                    5.059, 5.106, 5.15, 5.191 },
            { 2.817, 3.38, 3.714, 3.95, 4.133, 4.281, 4.405, 4.513, 4.607, 4.69, 4.766, 4.834, 4.897, 4.954, 5.008,
                    5.057, 5.104, 5.147, 5.188 },
            { 2.816, 3.379, 3.713, 3.949, 4.131, 4.279, 4.404, 4.511, 4.605, 4.689, 4.764, 4.832, 4.895, 4.952,
                    5.006, 5.055, 5.101, 5.145, 5.186 },
            { 2.815, 3.378, 3.712, 3.948, 4.13, 4.278, 4.402, 4.509, 4.603, 4.687, 4.762, 4.83, 4.893, 4.95, 5.003,
                    5.053, 5.099, 5.143, 5.184 },
            { 2.815, 3.377, 3.711, 3.947, 4.129, 4.277, 4.401, 4.508, 4.602, 4.685, 4.76, 4.828, 4.891, 4.948,
                    5.002, 5.051, 5.097, 5.141, 5.182 },
            { 2.814, 3.376, 3.71, 3.946, 4.128, 4.275, 4.399, 4.506, 4.6, 4.684, 4.758, 4.827, 4.889, 4.946, 5,
                    5.049, 5.095, 5.139, 5.18 },
            { 2.814, 3.376, 3.709, 3.945, 4.127, 4.274, 4.398, 4.505, 4.599, 4.682, 4.757, 4.825, 4.887, 4.945,
                    4.998, 5.047, 5.093, 5.137, 5.178 },
            { 2.813, 3.375, 3.708, 3.944, 4.125, 4.273, 4.397, 4.503, 4.597, 4.68, 4.755, 4.823, 4.885, 4.943,
                    4.996, 5.046, 5.091, 5.135, 5.176 },
            { 2.813, 3.374, 3.707, 3.943, 4.124, 4.272, 4.395, 4.502, 4.596, 4.679, 4.754, 4.822, 4.884, 4.941,
                    4.994, 5.044, 5.09, 5.133, 5.174 },
            { 2.812, 3.374, 3.706, 3.942, 4.123, 4.27, 4.394, 4.501, 4.594, 4.677, 4.752, 4.82, 4.882, 4.939, 4.992,
                    5.042, 5.088, 5.131, 5.172 },
            { 2.812, 3.373, 3.705, 3.941, 4.122, 4.269, 4.393, 4.499, 4.593, 4.676, 4.751, 4.818, 4.88, 4.938,
                    4.991, 5.04, 5.086, 5.129, 5.17 },
            { 2.811, 3.372, 3.704, 3.94, 4.121, 4.268, 4.392, 4.498, 4.592, 4.675, 4.749, 4.817, 4.879, 4.936,
                    4.989, 5.039, 5.084, 5.127, 5.168 },
            { 2.811, 3.372, 3.704, 3.939, 4.12, 4.267, 4.39, 4.497, 4.59, 4.673, 4.748, 4.815, 4.877, 4.934, 4.987,
                    5.037, 5.083, 5.126, 5.167 },
            { 2.811, 3.371, 3.703, 3.938, 4.119, 4.266, 4.389, 4.496, 4.589, 4.672, 4.746, 4.814, 4.876, 4.933,
                    4.986, 5.035, 5.081, 5.124, 5.165 },
            { 2.81, 3.37, 3.702, 3.937, 4.118, 4.265, 4.388, 4.494, 4.588, 4.67, 4.745, 4.813, 4.874, 4.931, 4.984,
                    5.034, 5.08, 5.122, 5.163 },
            { 2.81, 3.37, 3.701, 3.936, 4.117, 4.264, 4.387, 4.493, 4.586, 4.669, 4.744, 4.811, 4.873, 4.93, 4.983,
                    5.032, 5.078, 5.121, 5.161 },
            { 2.809, 3.369, 3.701, 3.935, 4.116, 4.263, 4.386, 4.492, 4.585, 4.668, 4.742, 4.81, 4.872, 4.928,
                    4.981, 5.031, 5.077, 5.119, 5.16 },
            { 2.809, 3.368, 3.7, 3.934, 4.115, 4.262, 4.385, 4.491, 4.584, 4.667, 4.741, 4.808, 4.87, 4.927, 4.98,
                    5.029, 5.075, 5.118, 5.158 },
            { 2.809, 3.368, 3.699, 3.934, 4.114, 4.261, 4.384, 4.49, 4.583, 4.665, 4.74, 4.807, 4.869, 4.926, 4.978,
                    5.028, 5.074, 5.116, 5.157 },
            { 2.808, 3.367, 3.698, 3.933, 4.114, 4.26, 4.383, 4.489, 4.582, 4.664, 4.738, 4.806, 4.867, 4.924,
                    4.977, 5.026, 5.072, 5.115, 5.155 },
            { 2.808, 3.367, 3.698, 3.932, 4.113, 4.259, 4.382, 4.488, 4.581, 4.663, 4.737, 4.805, 4.866, 4.923,
                    4.976, 5.025, 5.071, 5.113, 5.154 },
            { 2.807, 3.366, 3.697, 3.931, 4.112, 4.258, 4.381, 4.487, 4.579, 4.662, 4.736, 4.803, 4.865, 4.922,
                    4.974, 5.023, 5.069, 5.112, 5.152 },
            { 2.807, 3.366, 3.696, 3.931, 4.111, 4.257, 4.38, 4.486, 4.578, 4.661, 4.735, 4.802, 4.864, 4.92, 4.973,
                    5.022, 5.068, 5.111, 5.151 },
            { 2.807, 3.365, 3.696, 3.93, 4.11, 4.256, 4.379, 4.485, 4.577, 4.66, 4.734, 4.801, 4.862, 4.919, 4.972,
                    5.021, 5.067, 5.109, 5.149 },
            { 2.806, 3.365, 3.695, 3.929, 4.11, 4.255, 4.378, 4.484, 4.576, 4.659, 4.733, 4.8, 4.861, 4.918, 4.97,
                    5.02, 5.065, 5.108, 5.148 },
            { 2.806, 3.364, 3.695, 3.928, 4.109, 4.255, 4.377, 4.483, 4.575, 4.658, 4.732, 4.799, 4.86, 4.917,
                    4.969, 5.018, 5.064, 5.107, 5.147 },
            { 2.806, 3.364, 3.694, 3.928, 4.108, 4.254, 4.376, 4.482, 4.574, 4.657, 4.73, 4.798, 4.859, 4.916,
                    4.968, 5.017, 5.063, 5.106, 5.145 },
            { 2.805, 3.363, 3.693, 3.927, 4.107, 4.253, 4.375, 4.481, 4.573, 4.656, 4.729, 4.796, 4.858, 4.914,
                    4.967, 5.016, 5.061, 5.104, 5.144 },
            { 2.805, 3.363, 3.693, 3.926, 4.106, 4.252, 4.374, 4.48, 4.572, 4.655, 4.728, 4.795, 4.857, 4.913,
                    4.966, 5.015, 5.06, 5.103, 5.143 },
            { 2.805, 3.362, 3.692, 3.926, 4.106, 4.251, 4.374, 4.479, 4.571, 4.654, 4.727, 4.794, 4.856, 4.912,
                    4.964, 5.013, 5.059, 5.102, 5.142 },
            { 2.804, 3.362, 3.692, 3.925, 4.105, 4.251, 4.373, 4.478, 4.571, 4.653, 4.726, 4.793, 4.855, 4.911,
                    4.963, 5.012, 5.058, 5.101, 5.141 },
            { 2.804, 3.362, 3.691, 3.925, 4.104, 4.25, 4.372, 4.477, 4.57, 4.652, 4.725, 4.792, 4.854, 4.91, 4.962,
                    5.011, 5.057, 5.099, 5.14 },
            { 2.804, 3.361, 3.691, 3.924, 4.104, 4.249, 4.371, 4.476, 4.569, 4.651, 4.724, 4.791, 4.853, 4.909,
                    4.961, 5.01, 5.055, 5.098, 5.138 },
            { 2.804, 3.361, 3.69, 3.923, 4.103, 4.248, 4.37, 4.476, 4.568, 4.65, 4.724, 4.79, 4.852, 4.908, 4.96,
                    5.009, 5.054, 5.097, 5.137 },
            { 2.803, 3.36, 3.69, 3.923, 4.102, 4.248, 4.37, 4.475, 4.567, 4.649, 4.723, 4.789, 4.851, 4.907, 4.959,
                    5.008, 5.053, 5.096, 5.136 },
            { 2.803, 3.36, 3.689, 3.922, 4.102, 4.247, 4.369, 4.474, 4.566, 4.648, 4.722, 4.788, 4.85, 4.906, 4.958,
                    5.007, 5.052, 5.095, 5.135 },
            { 2.803, 3.359, 3.689, 3.922, 4.101, 4.246, 4.368, 4.473, 4.565, 4.647, 4.721, 4.788, 4.849, 4.905,
                    4.957, 5.006, 5.051, 5.094, 5.134 },
            { 2.802, 3.359, 3.688, 3.921, 4.1, 4.246, 4.367, 4.472, 4.565, 4.646, 4.72, 4.787, 4.848, 4.904, 4.956,
                    5.005, 5.05, 5.093, 5.133 },
            { 2.802, 3.359, 3.688, 3.92, 4.1, 4.245, 4.367, 4.472, 4.564, 4.646, 4.719, 4.786, 4.847, 4.903, 4.955,
                    5.004, 5.049, 5.092, 5.132 },
            { 2.802, 3.358, 3.687, 3.92, 4.099, 4.244, 4.366, 4.471, 4.563, 4.645, 4.718, 4.785, 4.846, 4.902,
                    4.954, 5.003, 5.048, 5.091, 5.131 },
            { 2.802, 3.358, 3.687, 3.919, 4.099, 4.244, 4.365, 4.47, 4.562, 4.644, 4.717, 4.784, 4.845, 4.901,
                    4.953, 5.002, 5.047, 5.09, 5.13 },
            { 2.801, 3.358, 3.686, 3.919, 4.098, 4.243, 4.365, 4.47, 4.562, 4.643, 4.717, 4.783, 4.844, 4.9, 4.952,
                    5.001, 5.046, 5.089, 5.129 },
            { 2.801, 3.357, 3.686, 3.918, 4.097, 4.242, 4.364, 4.469, 4.561, 4.642, 4.716, 4.782, 4.843, 4.899,
                    4.951, 5, 5.045, 5.088, 5.128 },
            { 2.801, 3.357, 3.685, 3.918, 4.097, 4.242, 4.363, 4.468, 4.56, 4.642, 4.715, 4.782, 4.842, 4.899, 4.95,
                    4.999, 5.044, 5.087, 5.127 } };

    /**
     * Studentized Q Distribution for alpha = 0.01
     */
    private static double[][] QDist_alpha_0_01 = {
            { 93.157, 138.306, 168.728, 189.173, 206.203, 219.531, 231.719, 241.881, 250.842, 258.985, 266.339,
                    271.083, 277.48, 283.748, 289.019, 293.348, 298.008, 302.417, 306.636 },
            { 14.25, 19.206, 22.522, 24.897, 26.813, 28.382, 29.75, 30.923, 31.929, 32.874, 33.644, 34.373, 35.059,
                    35.693, 36.321, 36.804, 37.316, 37.798, 38.253 },
            { 8.314, 10.664, 12.225, 13.362, 14.284, 15.032, 15.691, 16.254, 16.752, 17.197, 17.569, 17.926, 18.26,
                    18.569, 18.868, 19.124, 19.374, 19.61, 19.832 },
            { 6.541, 8.152, 9.211, 9.988, 10.613, 11.127, 11.573, 11.96, 12.301, 12.613, 12.871, 13.117, 13.35,
                    13.563, 13.766, 13.952, 14.125, 14.288, 14.442 },
            { 5.727, 7.002, 7.828, 8.442, 8.933, 9.339, 9.691, 9.997, 10.265, 10.511, 10.718, 10.916, 11.098,
                    11.267, 11.425, 11.577, 11.711, 11.841, 11.964 },
            { 5.268, 6.351, 7.05, 7.572, 7.988, 8.337, 8.63, 8.887, 9.115, 9.325, 9.5, 9.668, 9.824, 9.967, 10.101,
                    10.23, 10.346, 10.456, 10.561 },
            { 4.967, 5.934, 6.557, 7.018, 7.386, 7.692, 7.953, 8.18, 8.383, 8.567, 8.723, 8.872, 9.009, 9.137,
                    9.255, 9.369, 9.473, 9.571, 9.664 },
            { 4.761, 5.648, 6.219, 6.637, 6.97, 7.248, 7.485, 7.693, 7.876, 8.043, 8.185, 8.321, 8.446, 8.562,
                    8.669, 8.773, 8.868, 8.957, 9.042 },
            { 4.609, 5.439, 5.969, 6.358, 6.666, 6.924, 7.145, 7.336, 7.506, 7.66, 7.793, 7.918, 8.033, 8.14, 8.24,
                    8.337, 8.425, 8.508, 8.586 },
            { 4.495, 5.282, 5.78, 6.145, 6.435, 6.677, 6.884, 7.064, 7.223, 7.368, 7.493, 7.61, 7.719, 7.82, 7.914,
                    8.004, 8.086, 8.164, 8.237 },
            { 4.405, 5.157, 5.631, 5.979, 6.254, 6.484, 6.679, 6.85, 7.001, 7.138, 7.257, 7.369, 7.471, 7.567,
                    7.656, 7.741, 7.82, 7.893, 7.963 },
            { 4.333, 5.056, 5.511, 5.844, 6.108, 6.328, 6.515, 6.677, 6.822, 6.953, 7.066, 7.173, 7.271, 7.363,
                    7.448, 7.529, 7.604, 7.674, 7.74 },
            { 4.272, 4.973, 5.412, 5.733, 5.987, 6.199, 6.379, 6.535, 6.674, 6.8, 6.909, 7.012, 7.107, 7.194, 7.276,
                    7.354, 7.425, 7.493, 7.557 },
            { 4.221, 4.903, 5.33, 5.642, 5.886, 6.092, 6.265, 6.416, 6.551, 6.672, 6.778, 6.877, 6.968, 7.052,
                    7.131, 7.207, 7.276, 7.341, 7.403 },
            { 4.178, 4.844, 5.259, 5.563, 5.802, 6, 6.168, 6.316, 6.446, 6.563, 6.666, 6.762, 6.85, 6.933, 7.008,
                    7.081, 7.149, 7.212, 7.272 },
            { 4.141, 4.793, 5.199, 5.496, 5.728, 5.922, 6.085, 6.229, 6.355, 6.469, 6.571, 6.663, 6.749, 6.829,
                    6.903, 6.974, 7.039, 7.1, 7.159 },
            { 4.109, 4.749, 5.147, 5.437, 5.664, 5.853, 6.013, 6.153, 6.276, 6.388, 6.487, 6.577, 6.661, 6.739,
                    6.811, 6.88, 6.944, 7.004, 7.06 },
            { 4.081, 4.711, 5.101, 5.386, 5.609, 5.794, 5.95, 6.087, 6.207, 6.316, 6.413, 6.501, 6.583, 6.659, 6.73,
                    6.798, 6.86, 6.918, 6.974 },
            { 4.056, 4.677, 5.06, 5.341, 5.559, 5.741, 5.894, 6.028, 6.146, 6.253, 6.348, 6.434, 6.515, 6.589,
                    6.659, 6.725, 6.786, 6.843, 6.898 },
            { 4.034, 4.646, 5.024, 5.3, 5.515, 5.693, 5.844, 5.976, 6.092, 6.197, 6.29, 6.375, 6.454, 6.527, 6.595,
                    6.661, 6.72, 6.776, 6.83 },
            { 4.014, 4.619, 4.992, 5.264, 5.476, 5.651, 5.8, 5.929, 6.043, 6.146, 6.239, 6.323, 6.399, 6.471, 6.538,
                    6.602, 6.661, 6.716, 6.768 },
            { 3.995, 4.594, 4.963, 5.231, 5.44, 5.613, 5.759, 5.887, 5.999, 6.101, 6.192, 6.275, 6.35, 6.421, 6.487,
                    6.55, 6.608, 6.662, 6.714 },
            { 3.979, 4.572, 4.936, 5.202, 5.408, 5.579, 5.723, 5.848, 5.959, 6.06, 6.15, 6.231, 6.305, 6.375, 6.44,
                    6.502, 6.559, 6.613, 6.664 },
            { 3.964, 4.552, 4.912, 5.175, 5.379, 5.547, 5.69, 5.814, 5.923, 6.022, 6.111, 6.191, 6.265, 6.334,
                    6.398, 6.459, 6.515, 6.568, 6.618 },
            { 3.951, 4.533, 4.89, 5.151, 5.352, 5.519, 5.66, 5.782, 5.89, 5.988, 6.076, 6.155, 6.228, 6.296, 6.359,
                    6.419, 6.475, 6.527, 6.577 },
            { 3.938, 4.516, 4.87, 5.128, 5.327, 5.493, 5.632, 5.753, 5.86, 5.957, 6.043, 6.122, 6.193, 6.261, 6.323,
                    6.383, 6.438, 6.49, 6.539 },
            { 3.927, 4.5, 4.852, 5.108, 5.305, 5.469, 5.606, 5.726, 5.832, 5.928, 6.014, 6.091, 6.163, 6.229, 6.291,
                    6.35, 6.404, 6.455, 6.503 },
            { 3.916, 4.486, 4.835, 5.089, 5.284, 5.446, 5.583, 5.701, 5.806, 5.901, 5.986, 6.063, 6.134, 6.199,
                    6.26, 6.319, 6.373, 6.423, 6.471 },
            { 3.906, 4.473, 4.819, 5.071, 5.266, 5.425, 5.561, 5.678, 5.782, 5.876, 5.961, 6.036, 6.107, 6.172,
                    6.232, 6.29, 6.343, 6.393, 6.441 },
            { 3.897, 4.46, 4.804, 5.054, 5.248, 5.406, 5.54, 5.657, 5.76, 5.853, 5.937, 6.012, 6.082, 6.146, 6.206,
                    6.263, 6.316, 6.366, 6.413 },
            { 3.889, 4.449, 4.791, 5.039, 5.231, 5.388, 5.522, 5.637, 5.739, 5.832, 5.915, 5.99, 6.059, 6.122,
                    6.182, 6.239, 6.291, 6.34, 6.387 },
            { 3.881, 4.438, 4.778, 5.024, 5.215, 5.372, 5.504, 5.619, 5.72, 5.812, 5.894, 5.969, 6.037, 6.1, 6.159,
                    6.215, 6.267, 6.316, 6.362 },
            { 3.872, 4.428, 4.766, 5.011, 5.2, 5.356, 5.487, 5.601, 5.702, 5.793, 5.875, 5.95, 6.017, 6.079, 6.138,
                    6.194, 6.245, 6.294, 6.339 },
            { 3.865, 4.419, 4.755, 4.998, 5.187, 5.341, 5.472, 5.585, 5.685, 5.776, 5.857, 5.931, 5.998, 6.061,
                    6.118, 6.173, 6.224, 6.273, 6.318 },
            { 3.859, 4.41, 4.744, 4.987, 5.174, 5.327, 5.457, 5.57, 5.67, 5.759, 5.84, 5.913, 5.98, 6.042, 6.099,
                    6.154, 6.205, 6.253, 6.298 },
            { 3.853, 4.401, 4.734, 4.975, 5.162, 5.314, 5.444, 5.556, 5.655, 5.744, 5.824, 5.897, 5.963, 6.025,
                    6.082, 6.136, 6.187, 6.234, 6.279 },
            { 3.847, 4.393, 4.725, 4.965, 5.15, 5.302, 5.431, 5.542, 5.641, 5.729, 5.809, 5.881, 5.947, 6.009,
                    6.065, 6.119, 6.169, 6.217, 6.261 },
            { 3.841, 4.386, 4.716, 4.955, 5.14, 5.291, 5.419, 5.529, 5.627, 5.715, 5.795, 5.867, 5.932, 5.993,
                    6.049, 6.103, 6.153, 6.2, 6.244 },
            { 3.836, 4.379, 4.708, 4.946, 5.13, 5.28, 5.407, 5.517, 5.615, 5.702, 5.781, 5.853, 5.918, 5.979, 6.034,
                    6.088, 6.138, 6.184, 6.228 },
            { 3.831, 4.372, 4.7, 4.937, 5.12, 5.269, 5.396, 5.506, 5.603, 5.69, 5.768, 5.84, 5.905, 5.965, 6.02,
                    6.074, 6.123, 6.17, 6.213 },
            { 3.827, 4.366, 4.693, 4.929, 5.112, 5.26, 5.386, 5.495, 5.591, 5.678, 5.756, 5.827, 5.892, 5.952,
                    6.008, 6.06, 6.109, 6.155, 6.199 },
            { 3.822, 4.36, 4.686, 4.92, 5.103, 5.25, 5.376, 5.484, 5.581, 5.667, 5.744, 5.815, 5.88, 5.939, 5.995,
                    6.047, 6.096, 6.142, 6.185 },
            { 3.818, 4.355, 4.679, 4.913, 5.095, 5.241, 5.367, 5.475, 5.57, 5.656, 5.733, 5.804, 5.868, 5.927,
                    5.983, 6.035, 6.083, 6.129, 6.172 },
            { 3.814, 4.349, 4.673, 4.906, 5.086, 5.233, 5.358, 5.465, 5.56, 5.646, 5.723, 5.793, 5.857, 5.916,
                    5.971, 6.023, 6.071, 6.117, 6.16 },
            { 3.81, 4.344, 4.666, 4.899, 5.079, 5.225, 5.349, 5.456, 5.551, 5.636, 5.713, 5.783, 5.847, 5.905, 5.96,
                    6.012, 6.06, 6.105, 6.148 },
            { 3.807, 4.339, 4.66, 4.892, 5.072, 5.217, 5.341, 5.448, 5.542, 5.627, 5.703, 5.773, 5.837, 5.895, 5.95,
                    6.001, 6.049, 6.094, 6.137 },
            { 3.803, 4.334, 4.655, 4.886, 5.065, 5.21, 5.333, 5.439, 5.534, 5.618, 5.694, 5.764, 5.827, 5.885, 5.94,
                    5.991, 6.039, 6.083, 6.126 },
            { 3.8, 4.33, 4.649, 4.88, 5.058, 5.203, 5.326, 5.432, 5.526, 5.61, 5.685, 5.755, 5.818, 5.876, 5.93,
                    5.981, 6.029, 6.073, 6.115 },
            { 3.797, 4.325, 4.644, 4.874, 5.052, 5.196, 5.318, 5.424, 5.518, 5.602, 5.677, 5.746, 5.809, 5.867,
                    5.921, 5.972, 6.019, 6.064, 6.106 },
            { 3.793, 4.321, 4.639, 4.868, 5.046, 5.189, 5.312, 5.417, 5.51, 5.594, 5.669, 5.738, 5.801, 5.859,
                    5.912, 5.964, 6.01, 6.054, 6.096 },
            { 3.79, 4.317, 4.634, 4.863, 5.04, 5.183, 5.305, 5.41, 5.503, 5.586, 5.661, 5.73, 5.793, 5.851, 5.904,
                    5.955, 6.001, 6.045, 6.087 },
            { 3.788, 4.313, 4.63, 4.858, 5.034, 5.177, 5.299, 5.403, 5.496, 5.579, 5.654, 5.722, 5.785, 5.843,
                    5.896, 5.947, 5.993, 6.037, 6.078 },
            { 3.785, 4.309, 4.625, 4.853, 5.029, 5.171, 5.293, 5.397, 5.49, 5.573, 5.647, 5.715, 5.777, 5.835,
                    5.888, 5.939, 5.984, 6.029, 6.07 },
            { 3.782, 4.306, 4.621, 4.848, 5.024, 5.166, 5.287, 5.391, 5.483, 5.566, 5.64, 5.708, 5.77, 5.828, 5.88,
                    5.931, 5.977, 6.02, 6.062 },
            { 3.78, 4.302, 4.617, 4.843, 5.019, 5.161, 5.281, 5.385, 5.477, 5.56, 5.634, 5.701, 5.763, 5.821, 5.873,
                    5.924, 5.969, 6.013, 6.054 },
            { 3.777, 4.299, 4.613, 4.839, 5.014, 5.156, 5.276, 5.379, 5.471, 5.554, 5.627, 5.695, 5.757, 5.814,
                    5.866, 5.916, 5.962, 6.005, 6.047 },
            { 3.775, 4.296, 4.609, 4.835, 5.009, 5.151, 5.27, 5.374, 5.466, 5.548, 5.621, 5.689, 5.75, 5.807, 5.859,
                    5.91, 5.955, 5.998, 6.039 },
            { 3.773, 4.293, 4.606, 4.831, 5.005, 5.146, 5.265, 5.369, 5.46, 5.542, 5.615, 5.682, 5.744, 5.801,
                    5.853, 5.903, 5.948, 5.992, 6.032 },
            { 3.77, 4.29, 4.602, 4.827, 5.001, 5.142, 5.261, 5.364, 5.455, 5.537, 5.61, 5.677, 5.738, 5.795, 5.847,
                    5.897, 5.942, 5.985, 6.026 },
            { 3.768, 4.287, 4.599, 4.823, 4.996, 5.137, 5.256, 5.359, 5.45, 5.531, 5.604, 5.671, 5.732, 5.789,
                    5.841, 5.89, 5.936, 5.979, 6.019 },
            { 3.766, 4.284, 4.596, 4.819, 4.992, 5.133, 5.251, 5.354, 5.445, 5.526, 5.599, 5.665, 5.727, 5.783,
                    5.835, 5.884, 5.93, 5.972, 6.013 },
            { 3.764, 4.282, 4.592, 4.815, 4.989, 5.129, 5.247, 5.349, 5.441, 5.521, 5.594, 5.66, 5.722, 5.778, 5.83,
                    5.879, 5.924, 5.966, 6.007 },
            { 3.762, 4.279, 4.589, 4.812, 4.985, 5.125, 5.243, 5.345, 5.436, 5.517, 5.589, 5.655, 5.716, 5.772,
                    5.825, 5.873, 5.919, 5.961, 6.001 },
            { 3.761, 4.277, 4.587, 4.809, 4.981, 5.121, 5.239, 5.341, 5.432, 5.512, 5.584, 5.65, 5.711, 5.767,
                    5.819, 5.868, 5.913, 5.955, 5.995 },
            { 3.759, 4.274, 4.584, 4.805, 4.978, 5.117, 5.235, 5.337, 5.427, 5.507, 5.58, 5.645, 5.706, 5.762,
                    5.814, 5.862, 5.908, 5.95, 5.99 },
            { 3.757, 4.272, 4.581, 4.802, 4.974, 5.114, 5.231, 5.333, 5.423, 5.503, 5.575, 5.641, 5.702, 5.757,
                    5.809, 5.857, 5.903, 5.944, 5.985 },
            { 3.755, 4.27, 4.578, 4.799, 4.971, 5.11, 5.227, 5.329, 5.419, 5.499, 5.571, 5.637, 5.697, 5.753, 5.804,
                    5.853, 5.898, 5.939, 5.979 },
            { 3.754, 4.267, 4.576, 4.796, 4.968, 5.107, 5.224, 5.325, 5.415, 5.495, 5.566, 5.632, 5.693, 5.748, 5.8,
                    5.848, 5.893, 5.934, 5.974 },
            { 3.752, 4.265, 4.573, 4.793, 4.965, 5.103, 5.22, 5.322, 5.411, 5.491, 5.562, 5.628, 5.688, 5.744,
                    5.795, 5.843, 5.888, 5.93, 5.97 },
            { 3.751, 4.263, 4.571, 4.791, 4.962, 5.1, 5.217, 5.318, 5.408, 5.487, 5.558, 5.624, 5.684, 5.739, 5.791,
                    5.839, 5.884, 5.925, 5.965 },
            { 3.749, 4.261, 4.568, 4.788, 4.959, 5.097, 5.213, 5.315, 5.404, 5.483, 5.555, 5.62, 5.68, 5.735, 5.787,
                    5.835, 5.879, 5.921, 5.96 },
            { 3.748, 4.259, 4.566, 4.786, 4.956, 5.094, 5.21, 5.311, 5.4, 5.48, 5.551, 5.616, 5.676, 5.731, 5.783,
                    5.83, 5.875, 5.917, 5.956 },
            { 3.746, 4.257, 4.564, 4.783, 4.953, 5.091, 5.207, 5.308, 5.397, 5.476, 5.547, 5.612, 5.672, 5.728,
                    5.779, 5.826, 5.871, 5.913, 5.952 },
            { 3.745, 4.255, 4.561, 4.781, 4.951, 5.088, 5.204, 5.305, 5.394, 5.473, 5.544, 5.609, 5.669, 5.724,
                    5.775, 5.823, 5.867, 5.909, 5.947 },
            { 3.744, 4.254, 4.559, 4.778, 4.948, 5.086, 5.201, 5.302, 5.391, 5.469, 5.54, 5.605, 5.665, 5.72, 5.771,
                    5.819, 5.863, 5.905, 5.943 },
            { 3.742, 4.252, 4.557, 4.776, 4.946, 5.083, 5.198, 5.299, 5.387, 5.466, 5.537, 5.602, 5.662, 5.716,
                    5.767, 5.815, 5.859, 5.901, 5.939 },
            { 3.741, 4.25, 4.555, 4.774, 4.943, 5.081, 5.195, 5.296, 5.384, 5.463, 5.534, 5.599, 5.658, 5.713,
                    5.764, 5.812, 5.855, 5.897, 5.936 },
            { 3.74, 4.249, 4.553, 4.771, 4.941, 5.079, 5.193, 5.293, 5.381, 5.46, 5.531, 5.595, 5.655, 5.709, 5.76,
                    5.808, 5.852, 5.893, 5.932 },
            { 3.739, 4.247, 4.551, 4.769, 4.938, 5.076, 5.19, 5.29, 5.378, 5.457, 5.528, 5.592, 5.651, 5.706, 5.757,
                    5.805, 5.848, 5.89, 5.928 },
            { 3.738, 4.245, 4.55, 4.767, 4.936, 5.074, 5.187, 5.288, 5.376, 5.454, 5.525, 5.589, 5.648, 5.703,
                    5.753, 5.801, 5.845, 5.886, 5.925 },
            { 3.736, 4.244, 4.548, 4.765, 4.934, 5.071, 5.185, 5.285, 5.373, 5.452, 5.522, 5.586, 5.645, 5.7, 5.75,
                    5.798, 5.841, 5.883, 5.921 },
            { 3.735, 4.242, 4.546, 4.763, 4.932, 5.069, 5.183, 5.282, 5.37, 5.449, 5.519, 5.583, 5.642, 5.696,
                    5.747, 5.795, 5.838, 5.879, 5.918 },
            { 3.734, 4.241, 4.544, 4.761, 4.93, 5.067, 5.18, 5.28, 5.368, 5.446, 5.516, 5.58, 5.639, 5.693, 5.744,
                    5.792, 5.835, 5.876, 5.914 },
            { 3.733, 4.239, 4.543, 4.759, 4.927, 5.065, 5.178, 5.278, 5.365, 5.443, 5.513, 5.578, 5.636, 5.691,
                    5.741, 5.788, 5.832, 5.873, 5.912 },
            { 3.732, 4.238, 4.541, 4.757, 4.925, 5.062, 5.176, 5.275, 5.363, 5.441, 5.511, 5.575, 5.634, 5.688,
                    5.738, 5.785, 5.828, 5.87, 5.909 },
            { 3.731, 4.237, 4.539, 4.755, 4.924, 5.06, 5.174, 5.273, 5.361, 5.438, 5.508, 5.572, 5.631, 5.685,
                    5.735, 5.783, 5.825, 5.867, 5.906 },
            { 3.73, 4.235, 4.538, 4.754, 4.922, 5.058, 5.171, 5.271, 5.358, 5.436, 5.506, 5.57, 5.628, 5.682, 5.732,
                    5.78, 5.823, 5.864, 5.903 },
            { 3.729, 4.234, 4.536, 4.752, 4.92, 5.056, 5.169, 5.269, 5.356, 5.433, 5.503, 5.567, 5.626, 5.679,
                    5.729, 5.777, 5.82, 5.861, 5.9 },
            { 3.728, 4.233, 4.535, 4.75, 4.918, 5.054, 5.167, 5.266, 5.354, 5.431, 5.501, 5.565, 5.623, 5.677,
                    5.727, 5.774, 5.818, 5.858, 5.897 },
            { 3.727, 4.232, 4.533, 4.749, 4.916, 5.052, 5.165, 5.264, 5.351, 5.429, 5.499, 5.562, 5.62, 5.674,
                    5.724, 5.771, 5.815, 5.855, 5.894 },
            { 3.726, 4.23, 4.532, 4.747, 4.914, 5.051, 5.163, 5.262, 5.349, 5.427, 5.496, 5.56, 5.618, 5.672, 5.722,
                    5.769, 5.812, 5.853, 5.891 },
            { 3.726, 4.229, 4.53, 4.745, 4.913, 5.049, 5.161, 5.26, 5.347, 5.424, 5.494, 5.557, 5.616, 5.669, 5.719,
                    5.766, 5.81, 5.85, 5.888 },
            { 3.725, 4.228, 4.529, 4.744, 4.911, 5.047, 5.159, 5.258, 5.345, 5.422, 5.492, 5.555, 5.613, 5.667,
                    5.717, 5.764, 5.807, 5.847, 5.886 },
            { 3.724, 4.227, 4.528, 4.742, 4.909, 5.045, 5.158, 5.256, 5.343, 5.42, 5.49, 5.553, 5.611, 5.664, 5.714,
                    5.761, 5.805, 5.845, 5.883 },
            { 3.723, 4.226, 4.526, 4.741, 4.908, 5.043, 5.156, 5.254, 5.341, 5.418, 5.488, 5.551, 5.609, 5.662,
                    5.712, 5.759, 5.802, 5.842, 5.881 },
            { 3.722, 4.225, 4.525, 4.739, 4.906, 5.042, 5.154, 5.253, 5.339, 5.416, 5.486, 5.549, 5.607, 5.66, 5.71,
                    5.757, 5.8, 5.84, 5.878 },
            { 3.721, 4.224, 4.524, 4.738, 4.905, 5.04, 5.153, 5.251, 5.337, 5.414, 5.484, 5.547, 5.604, 5.658,
                    5.707, 5.754, 5.797, 5.837, 5.876 },
            { 3.721, 4.223, 4.523, 4.737, 4.903, 5.039, 5.151, 5.249, 5.335, 5.412, 5.482, 5.544, 5.602, 5.656,
                    5.705, 5.752, 5.795, 5.835, 5.873 },
            { 3.72, 4.222, 4.521, 4.735, 4.902, 5.037, 5.149, 5.247, 5.334, 5.41, 5.48, 5.542, 5.6, 5.654, 5.703,
                    5.75, 5.793, 5.833, 5.871 },
            { 3.719, 4.221, 4.52, 4.734, 4.9, 5.035, 5.148, 5.246, 5.332, 5.409, 5.478, 5.541, 5.598, 5.651, 5.701,
                    5.748, 5.791, 5.83, 5.868 },
            { 3.718, 4.22, 4.519, 4.733, 4.899, 5.034, 5.146, 5.244, 5.33, 5.407, 5.476, 5.539, 5.596, 5.649, 5.699,
                    5.745, 5.788, 5.828, 5.866 },
            { 3.718, 4.219, 4.518, 4.731, 4.897, 5.032, 5.145, 5.242, 5.328, 5.405, 5.474, 5.537, 5.594, 5.647,
                    5.697, 5.743, 5.786, 5.826, 5.864 },
            { 3.717, 4.218, 4.517, 4.73, 4.896, 5.031, 5.143, 5.241, 5.327, 5.403, 5.472, 5.535, 5.592, 5.645,
                    5.695, 5.741, 5.784, 5.824, 5.862 },
            { 3.716, 4.217, 4.516, 4.729, 4.895, 5.03, 5.141, 5.239, 5.325, 5.402, 5.47, 5.533, 5.591, 5.643, 5.693,
                    5.739, 5.782, 5.822, 5.86 },
            { 3.716, 4.216, 4.515, 4.728, 4.893, 5.028, 5.14, 5.237, 5.323, 5.4, 5.469, 5.531, 5.589, 5.642, 5.691,
                    5.737, 5.78, 5.821, 5.858 },
            { 3.715, 4.215, 4.513, 4.727, 4.892, 5.027, 5.139, 5.236, 5.322, 5.398, 5.467, 5.53, 5.587, 5.64, 5.689,
                    5.735, 5.778, 5.819, 5.856 },
            { 3.714, 4.214, 4.512, 4.725, 4.891, 5.026, 5.137, 5.234, 5.32, 5.397, 5.465, 5.528, 5.585, 5.638,
                    5.687, 5.734, 5.776, 5.817, 5.854 },
            { 3.713, 4.214, 4.511, 4.724, 4.89, 5.024, 5.136, 5.233, 5.319, 5.395, 5.464, 5.526, 5.583, 5.636,
                    5.685, 5.732, 5.775, 5.815, 5.852 },
            { 3.713, 4.213, 4.51, 4.723, 4.888, 5.023, 5.134, 5.232, 5.317, 5.393, 5.462, 5.524, 5.582, 5.634,
                    5.683, 5.73, 5.773, 5.813, 5.85 },
            { 3.712, 4.212, 4.51, 4.722, 4.887, 5.022, 5.133, 5.23, 5.316, 5.392, 5.461, 5.523, 5.58, 5.633, 5.682,
                    5.728, 5.771, 5.811, 5.848 },
            { 3.712, 4.211, 4.509, 4.721, 4.886, 5.02, 5.132, 5.229, 5.314, 5.39, 5.459, 5.521, 5.578, 5.631, 5.68,
                    5.726, 5.769, 5.809, 5.846 },
            { 3.711, 4.21, 4.508, 4.72, 4.885, 5.019, 5.131, 5.227, 5.313, 5.389, 5.458, 5.52, 5.577, 5.629, 5.678,
                    5.725, 5.767, 5.807, 5.845 },
            { 3.71, 4.21, 4.507, 4.719, 4.884, 5.018, 5.13, 5.226, 5.312, 5.388, 5.456, 5.518, 5.575, 5.628, 5.677,
                    5.723, 5.766, 5.806, 5.843 },
            { 3.71, 4.209, 4.506, 4.718, 4.883, 5.017, 5.128, 5.225, 5.31, 5.386, 5.455, 5.517, 5.574, 5.626, 5.675,
                    5.721, 5.764, 5.804, 5.841 },
            { 3.709, 4.208, 4.505, 4.717, 4.882, 5.016, 5.127, 5.224, 5.309, 5.385, 5.453, 5.515, 5.572, 5.625,
                    5.674, 5.72, 5.762, 5.802, 5.839 },
            { 3.709, 4.207, 4.504, 4.716, 4.881, 5.015, 5.126, 5.222, 5.308, 5.383, 5.452, 5.514, 5.571, 5.623,
                    5.672, 5.718, 5.761, 5.801, 5.838 },
            { 3.708, 4.207, 4.503, 4.715, 4.88, 5.013, 5.125, 5.221, 5.306, 5.382, 5.45, 5.512, 5.569, 5.622, 5.67,
                    5.717, 5.759, 5.799, 5.836 },
            { 3.708, 4.206, 4.502, 4.714, 4.879, 5.012, 5.124, 5.22, 5.306, 5.381, 5.449, 5.511, 5.568, 5.62, 5.669,
                    5.715, 5.758, 5.797, 5.834 },
            { 3.707, 4.205, 4.502, 4.713, 4.878, 5.011, 5.123, 5.219, 5.304, 5.38, 5.448, 5.509, 5.566, 5.619,
                    5.667, 5.714, 5.756, 5.796, 5.833 },
            { 3.707, 4.205, 4.501, 4.712, 4.877, 5.01, 5.121, 5.217, 5.303, 5.378, 5.446, 5.508, 5.565, 5.617,
                    5.666, 5.712, 5.755, 5.794, 5.831 } };
}