Example usage for java.util Locale setDefault

List of usage examples for java.util Locale setDefault

Introduction

In this page you can find the example usage for java.util Locale setDefault.

Prototype

public static synchronized void setDefault(Locale newLocale) 

Source Link

Document

Sets the default locale for this instance of the Java Virtual Machine.

Usage

From source file:course_generator.frmMain.java

/**
 * Creates new form frmMain ------------------------------- !!!! Everything
 * start here !!!! ------------------------
 *///from  w  w  w  .  j  a v  a 2  s.  c o m
public frmMain(String args[]) {
    // Initialize data
    DataDir = Utils.GetHomeDir();
    Track = new TrackData();
    Resume = new ResumeData();
    Settings = new CgSettings();
    ModelTableMain = new TrackDataModel(Track, Settings);
    ModelTableResume = new ResumeModel(Resume, Settings);

    dataset = new XYSeriesCollection();
    // dataset = CreateDataset();
    chart = CreateChartProfil(dataset);

    // -- Load configuration
    LoadConfig();

    // -- Set the language
    if (Settings.Language.equalsIgnoreCase("FR")) {
        Locale.setDefault(Locale.FRANCE);
    } else if (Settings.Language.equalsIgnoreCase("EN")) {
        Locale.setDefault(Locale.US);
    }

    // -- Set default font
    setUIFont(new javax.swing.plaf.FontUIResource("Tahoma", // Settings.DefaultFont.getFontName(),
            // //"Tahoma"
            Font.PLAIN, // Settings.DefaultFont.getStyle(), // Font.PLAIN,
            14)); // Settings.DefaultFont.getSize())); //24));

    // -- Initialize the string resource for internationalization
    bundle = java.util.ResourceBundle.getBundle("course_generator/Bundle");

    // -- Configure the main form
    initComponents();

    // -- Set the icon of the application
    setIconImage(createImageIcon("/course_generator/images/cg.png", "").getImage());

    // -- Set the preferred column width
    for (int i = 0; i < 15; i++) {
        TableMain.getColumnModel().getColumn(i).setPreferredWidth(Settings.TableMainColWidth[i]);
    }
    RefreshTableMain();

    // -- Set the windows size and center it in the screen - Not tested on
    // multi-screen configuration
    Rectangle r = getBounds();
    r.width = Settings.MainWindowWidth;
    r.height = Settings.MainWindowHeight;
    Dimension screensize = Toolkit.getDefaultToolkit().getScreenSize();
    r.x = (screensize.width - r.width) / 2;
    r.y = (screensize.height - r.height) / 2;
    setBounds(r);

    // -- Set the left panel width
    SplitPaneMain.setDividerLocation(Settings.VertSplitPosition);
    SplitPaneMainRight.setDividerLocation(Settings.HorizSplitPosition);

    // -- Tests - To Remove...

    // -- Configure the tile source for the map
    MapViewer.setTileSource(new Thunderforest_Outdoors());
    //TODO Switch to OpenTopomap
    //MapViewer.setTileSource(new OpenTopoMap());

    // -- Set the counter in order near the end in order to start the
    // connection test
    cmptInternetConnexion = Settings.ConnectionTimeout - 1;

    // -- Start the 1 second timer
    timer1s = new Timer(1000, new TimerActionListener());
    timer1s.start();

    // -- Refresh
    RefreshMruCGX();
    RefreshMruGPX();
    RefreshStatusbar(Track);
}

From source file:org.apache.fop.cli.CommandLineOptions.java

private int parseLanguageOption(String[] args, int i) throws FOPException {
    if ((i + 1 == args.length) || (isOption(args[i + 1]))) {
        throw new FOPException("if you use '-l', you must specify a language");
    } else {/*from  ww  w. ja v a  2s.  c o m*/
        Locale.setDefault(new Locale(args[i + 1], ""));
        return 1;
    }
}

From source file:org.apache.hadoop.util.TestStringUtils.java

@Test
public void testLowerAndUpperStrings() {
    Locale defaultLocale = Locale.getDefault();
    try {//  www.  j  a va  2  s.  com
        Locale.setDefault(new Locale("tr", "TR"));
        String upperStr = "TITLE";
        String lowerStr = "title";
        // Confirming TR locale.
        assertNotEquals(lowerStr, upperStr.toLowerCase());
        assertNotEquals(upperStr, lowerStr.toUpperCase());
        // This should be true regardless of locale.
        assertEquals(lowerStr, StringUtils.toLowerCase(upperStr));
        assertEquals(upperStr, StringUtils.toUpperCase(lowerStr));
        assertTrue(StringUtils.equalsIgnoreCase(upperStr, lowerStr));
    } finally {
        Locale.setDefault(defaultLocale);
    }
}

From source file:be.fedict.eidviewer.gui.printing.IDPrintout.java

private void initI18N() {
    Locale.setDefault(ViewerPrefs.getLocale());
    bundle = ResourceBundle.getBundle("be/fedict/eidviewer/gui/resources/IDPrintout");
    dateFormat = DateFormat.getDateInstance(DateFormat.LONG, Locale.getDefault());
    coatOfArms = ImageUtilities.getImage(IDPrintout.class, ICONS + bundle.getString("coatOfArms"));
}

From source file:de.tudarmstadt.tk.statistics.report.ReportGenerator.java

/**
 * Creates a report of the statistical evaluation in the Latex-format
 * //from   ww w  . j a  va  2 s .  c  o  m
 * @param outputFolder
 *            the folder where the report will be written later to store
 *            related images etc. there
 * @param evalResults
 *            an object of type {@link EvaluationResults} comprising the
 *            results of the statistical evaluation
 * @return A String representing the report of the statistical evaluation in
 *         Latex-format
 */
public String createLatexReport(File outputFolder) {
    // Set locale to English globally to make reports independent of the
    // machine thei're created on, e.g. use "." as decimal points on any
    // machine
    Locale.setDefault(Locale.ENGLISH);
    StringBuilder report = new StringBuilder();
    Statistics stats = Statistics.getInstance(true);
    HashMap<String, String> methodsSummary = new HashMap<String, String>();
    HashMap<String, HashMap<String, List<String>>> testSummary = new HashMap<String, HashMap<String, List<String>>>();
    ArrayList<String[]> figures = new ArrayList<String[]>();
    testSummary.put("Parametric", new HashMap<String, List<String>>());
    testSummary.put("Non-Parametric", new HashMap<String, List<String>>());
    String outputFolderPath = "";
    if (outputFolder != null) {
        outputFolderPath = outputFolder.getAbsolutePath();
    }

    //
    // Header
    //
    // Packages
    report.append("\\documentclass[a4paper,12pt]{article}\n");
    report.append("\\usepackage[english]{babel}\n");
    report.append("\\usepackage[utf8]{inputenc}\n");
    report.append("\\usepackage{graphicx}\n");
    report.append("\\usepackage{titlesec}\n");
    report.append("\\usepackage{caption}\n");
    report.append("\\usepackage{subcaption}\n");
    report.append("\\usepackage{adjustbox}\n");
    report.append("\\usepackage{placeins}\n");
    report.append("\\usepackage{longtable}\n");
    report.append("\\usepackage{morefloats}\n");
    // Title definition
    report.append("\\titleformat*{\\section}{\\large\\bfseries}\n");
    report.append("\\titleformat*{\\subsection}{\\normalsize\\bfseries}\n");
    report.append("\\titleformat*{\\subsubsection}{\\vspace{-0.3cm}\\normalsize\\bfseries}\n");
    report.append("\\title{Statistical Evaluation Report}\n");
    report.append("\\date{\\vspace{-10ex}}\n");
    report.append("\\begin{document}\n");
    report.append("\\maketitle\n");

    //
    // Evaluation Overview
    //
    report.append("\\section{Evaluation Overview}");

    int nModels = evalResults.getSampleData().getModelMetadata().size();
    ArrayList<String> measures = evalResults.getMeasures();
    int nSamples = evalResults.getSampleData().getSamples().get(measures.get(0)).get(0).size();
    String ref = "tbl:models";

    // Separate training/testing datasets
    List<String> trainingDataList = new ArrayList<String>();
    List<String> testingDataList = new ArrayList<String>();
    List<Pair<String, String>> datasets = evalResults.getSampleData().getDatasetNames();
    Iterator<Pair<String, String>> itp = datasets.iterator();
    while (itp.hasNext()) {
        Pair<String, String> trainTest = itp.next();
        trainingDataList.add(trainTest.getKey());
        if (trainTest.getValue() != null) {
            testingDataList.add(trainTest.getValue());
        }
    }
    Set<String> trainingDataSet = new HashSet<String>(trainingDataList);
    Set<String> testingDataSet = new HashSet<String>(testingDataList);

    String pipelineDescription = null;
    String sampleOrigin = "per CV";

    ReportTypes pipelineType = this.evalResults.getSampleData().getPipelineType();
    switch (pipelineType) {
    // One-domain n-fold CV (ReportData=per Fold)
    case CV:
        pipelineDescription = String.format("%d-fold cross validation",
                evalResults.getSampleData().getnFolds());
        sampleOrigin = "per fold ";
        break;
    case MULTIPLE_CV:
        pipelineDescription = String.format("%dx%s repeated cross validation",
                evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds());
        break;
    case CV_DATASET_LVL:
        pipelineDescription = String.format("%d-fold cross validation over %d datasets",
                evalResults.getSampleData().getnFolds(), trainingDataSet.size());
        break;
    case MULTIPLE_CV_DATASET_LVL:
        pipelineDescription = String.format("%dx%s repeated cross validation over %d datasets",
                evalResults.getSampleData().getnRepetitions(), evalResults.getSampleData().getnFolds(),
                trainingDataSet.size());
        sampleOrigin = "per dataset";
        break;
    case TRAIN_TEST_DATASET_LVL:
        // In the train/test scenario, the number of datasets only includes
        // distinct ones
        Set<String> allDataSets = new HashSet<String>(testingDataSet);
        allDataSets.addAll(trainingDataSet);
        pipelineDescription = String.format("Train/Test over %d datasets", allDataSets.size());
        sampleOrigin = "per dataset";
        break;
    default:
        pipelineDescription = "!unknown pipeline type!";
        sampleOrigin = "!unknown pipeline type!";
        break;
    }

    boolean isBaselineEvaluation = evalResults.isBaselineEvaluation();
    report.append(String.format("The system performed a %s for the %d models in Tbl \\ref{%s}. ",
            pipelineDescription, nModels, ref));
    if (isBaselineEvaluation) {
        report.append(String.format("The models were compared against the first baseline model. \n",
                pipelineDescription, nModels, ref));
    } else {
        report.append(String.format("The models were compared against each other. \n", pipelineDescription,
                nModels, ref));
    }

    String[][] values = new String[nModels][3];
    for (int r = 0; r < nModels; r++) {
        values[r][0] = String.format("M%d", r);
        // Remove package prefix for algorithms, e.g. shorten "trees.J48" to "J48".
        String[] algorithm = evalResults.getSampleData().getModelMetadata().get(r).getKey().split("\\.");
        values[r][1] = escapeLatexCharacters(algorithm[algorithm.length - 1]);
        values[r][2] = escapeLatexCharacters(evalResults.getSampleData().getModelMetadata().get(r).getValue());
    }

    String table = createLatexTable("Evaluated models with classifier algorithm and feature sets", ref,
            new String[] { "Index", "Algorithm", "Feature Set" }, "|l|l|p{11cm}|", values);
    report.append(table);

    // List test/training datasets. Consider the case when these sets are
    // different.
    if (testingDataSet.isEmpty()) {
        if (trainingDataSet.size() == 1) {
            report.append(
                    String.format("The models were evaluated on the dataset %s. ", trainingDataList.get(0)));
        } else {
            report.append(String.format("The models were evaluated on the datasets %s. ",
                    this.createEnumeration(trainingDataList)));
        }
    } else {
        if (trainingDataSet.size() == 1 && testingDataSet.size() == 1) {
            report.append(
                    String.format("The models were trained on the dataset %s and tested on the dataset %s. ",
                            trainingDataList.get(0), testingDataList.get(0)));
        } else if (trainingDataSet.size() > 1 && testingDataSet.size() == 1) {
            report.append(String.format(
                    "The models were trained on the datasets %s and tested on the dataset %s. ",
                    this.createEnumeration(new ArrayList<String>(trainingDataSet)), testingDataList.get(0)));
        } else if (trainingDataSet.size() == 1 && testingDataSet.size() > 1) {
            report.append(String.format(
                    "The models were trained on the dataset %s and tested on the datasets %s. ",
                    trainingDataList.get(0), this.createEnumeration(new ArrayList<String>(testingDataSet))));
        } else {
            report.append(
                    String.format("The models were trained on the datasets %s and tested on the datasets %s. ",
                            this.createEnumeration(new ArrayList<String>(trainingDataSet)),
                            this.createEnumeration(new ArrayList<String>(testingDataSet))));
        }
    }
    report.append(String.format("Their performance was assessed with the %s", createEnumeration(measures)));
    report.append(
            ". In the analysis, the models thus represent levels of the independent variable, while the performance measures are dependent variables.\n");

    //
    // Results (for each measure separately)
    //
    report.append("\\FloatBarrier\n"); // All previous floats must be placed
    // before this point
    report.append("\\section{Results}\n");
    report.append(String.format(
            "Throughout the report, p-values are annotated if they are significant. While {\\footnotesize *} indicates low significance ($p<\\alpha=%.2f$), the annotations {\\footnotesize **} and {\\footnotesize ***} represent medium ($p<\\alpha=%.2f$) and high significance ($p<\\alpha=%.2f$).",
            significance_low, significance_medium, significance_high));

    for (int i = 0; i < measures.size(); i++) {
        /*
         * Create table with samples for the current performance measure If
         * samples are drawn over multiple datasets, transpose table
         */
        String measure = measures.get(i);
        if (!evalResults.getSampleData().getSamples().containsKey(measure)) {
            continue;
        }
        ArrayList<ArrayList<Double>> measureSamples = evalResults.getSampleData().getSamples().get(measure);
        ArrayList<Double> averageMeasureSamples = evalResults.getSampleData().getSamplesAverage().get(measure);

        report.append("\\FloatBarrier\n");
        report.append(String.format("\\subsection{%s}\n", measure));
        ref = String.format("tbl:%s", measure.replaceAll("\\s", ""));
        report.append(String.format(
                "The %s samples drawn from the %s and the %d models are presented in Tbl. \\ref{%s}.\n",
                measure, pipelineDescription, nModels, ref));

        // Plot Box-Whisker-Diagram of samples for the current measure and add the figure to the appendix
        // Use the min/max sample value as indicators for the box-plots limits
        String filename = String.format("boxPlot%s", measure.replaceAll("\\s", ""));
        String path = String.format("%s%s%s", outputFolderPath, File.separator, filename);
        String pathR = this.fixSlashes(path);
        String figRef = String.format("fig:boxPlot%s", measure.replaceAll("\\s", ""));
        String caption = String.format("Box-Whisker-Plot of %s samples. Red dots indicate means.", measure);
        double[][] samples = new double[nModels][];
        double minSample = Double.MAX_VALUE;
        double maxSample = Double.MIN_VALUE;
        for (int k = 0; k < nModels; k++) {
            ArrayList<Double> s = measureSamples.get(k);
            samples[k] = new double[s.size()];
            for (int j = 0; j < s.size(); j++) {
                samples[k][j] = s.get(j);
                if (minSample > s.get(j)) {
                    minSample = s.get(j);
                }
                if (maxSample < s.get(j)) {
                    maxSample = s.get(j);
                }
            }
        }
        double sampleRange = maxSample - minSample;
        int lowerLimit = (int) Math.floor(minSample - sampleRange * 0.1);
        int upperLimit = (int) Math.ceil(maxSample + sampleRange * 0.1);
        boolean successful = stats.plotBoxWhisker(samples, lowerLimit, upperLimit, pathR, measure);
        if (successful) {
            figures.add(new String[] { figRef, caption, filename });
            report.append(
                    String.format("See Fig. \\ref{%s} for a Box-Whisker plot of these samples. ", figRef));
        }

        caption = String.format("Samples of the %s drawn from the %s and the %d models", measure,
                pipelineDescription, nModels);
        switch (pipelineType) {
        case CV:
        case MULTIPLE_CV:
            values = new String[nModels + 1][nSamples + 2];
            for (int r = 0; r <= nModels; r++) {
                // First line of table = Fold indices
                if (r == 0) {
                    values[r][0] = "";
                    values[r][nSamples + 1] = "";
                    for (int f = 1; f <= nSamples; f++) {
                        values[r][f] = Integer.toString(f);
                    }
                    // Next lines with model indices, samples per fold and
                    // average measure over all samples
                } else {
                    values[r][0] = String.format("M%d", (r - 1));
                    //values[r][nSamples + 1] = String.format("%.2f", averageMeasureSamples.get(r - 1) * 100);
                    values[r][nSamples + 1] = String.format("%.2f", averageMeasureSamples.get(r - 1));
                    ArrayList<Double> s = measureSamples.get(r - 1);
                    for (int j = 0; j < s.size(); j++) {
                        //values[r][j + 1] = String.format("%.2f", s.get(j) * 100);
                        values[r][j + 1] = String.format("%.2f", s.get(j));
                    }
                }
            }
            if (values.length > 58) {
                table = createLatexLongTable(caption, ref,
                        new String[] { "Classifier",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nSamples, measure, sampleOrigin),
                                "Average" },
                        String.format("|%s", StringUtils.repeat("l|", nSamples + 2)), values);
            } else {
                table = createLatexTable(caption, ref,
                        new String[] { "Classifier",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nSamples, measure, sampleOrigin),
                                "Average" },
                        String.format("|%s", StringUtils.repeat("l|", nSamples + 2)), values);
            }
            break;

        case CV_DATASET_LVL:
        case MULTIPLE_CV_DATASET_LVL:
        case TRAIN_TEST_DATASET_LVL:
            values = new String[nSamples + 2][nModels + 1];
            // double[][] valuesNumeric = new double[nSamples][nModels];
            for (int r = 0; r <= nSamples + 1; r++) {
                // First line of table = Model indices
                if (r == 0) {
                    values[r][0] = "";
                    for (int j = 0; j < nModels; j++) {
                        values[r][j + 1] = String.format("M%d", (j));
                    }
                    // Last line of table = average sums
                } else if (r == nSamples + 1) {
                    values[r][0] = "Average";
                    for (int j = 0; j < nModels; j++) {
                        //values[r][j + 1] = String.format("%.2f", averageMeasureSamples.get(j) * 100);
                        values[r][j + 1] = String.format("%.2f", averageMeasureSamples.get(j));
                    }
                    // Next lines with model indices, samples per fold and
                    // average measure over all samples
                } else {
                    // Only print both train- and test set if there is more
                    // than one training set
                    Pair<String, String> trainTest = evalResults.getSampleData().getDatasetNames().get(r - 1);
                    if (pipelineType == ReportTypes.TRAIN_TEST_DATASET_LVL) {
                        if (trainingDataSet.size() > 1) {
                            values[r][0] = String.format("%s-%s", trainTest.getKey(), trainTest.getValue());
                        } else {
                            values[r][0] = trainTest.getValue();
                        }
                    } else {
                        values[r][0] = trainTest.getKey();
                    }
                    for (int j = 0; j < nModels; j++) {
                        ArrayList<Double> s = measureSamples.get(j);
                        //values[r][j + 1] = String.format("%.2f", s.get(r - 1) * 100);
                        values[r][j + 1] = String.format("%.2f", s.get(r - 1));
                    }
                }
            }
            if (values.length > 58) {
                table = createLatexLongTable(caption, ref,
                        new String[] { "Dataset",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nModels, measure,
                                        sampleOrigin) },
                        String.format("|%s", StringUtils.repeat("l|", nModels + 1)), values);
            } else {
                table = createLatexTable(caption, ref,
                        new String[] { "Dataset",
                                String.format("\\multicolumn{%d}{|c|}{%s %s}", nModels, measure,
                                        sampleOrigin) },
                        String.format("|%s", StringUtils.repeat("l|", nModels + 1)), values);
            }
            break;
        }
        report.append(table);

        //
        // Results - First parametric tests, then non-parametric (2
        // iterations)
        // Print results for alls non-parametric tests except McNemar.
        // McNemar is not based on the same performance measures but on a
        // contingency matrix, which is
        // printed in a separate section.
        for (String testType : new String[] { "Parametric", "Non-Parametric" }) {
            report.append(String.format("\\subsubsection{%s Testing}", testType));

            Pair<String, AbstractTestResult> result = null;
            if (testType.equals("Parametric")) {
                result = evalResults.getParametricTestResults().get(measure);
            } else {
                result = evalResults.getNonParametricTestResults().get(measure);
            }

            // Use pretty-print method descriptor if specified
            String method = result.getKey();
            if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
                method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
            }
            methodsSummary.put(testType, method);

            TestResult r = (TestResult) result.getValue();
            report.append(
                    String.format("The system compared the %d models using the \\emph{%s}. ", nModels, method));

            if (r != null && !Double.isNaN(r.getpValue())) {

                // A priori test: assumptions
                boolean assumptionViolated = false;
                Iterator<String> it = r.getAssumptions().keySet().iterator();
                while (it.hasNext()) {
                    String assumption = it.next();

                    TestResult at = (TestResult) r.getAssumptions().get(assumption);
                    if (at == null) {
                        report.append(String.format("Testing for %s failed. ", assumption));
                        assumptionViolated = true;
                        continue;
                    }
                    if (Double.isNaN(at.getpValue())) {
                        report.append(
                                String.format("Testing for %s using %s failed. ", assumption, at.getMethod()));
                        assumptionViolated = true;
                        continue;
                    }
                    double ap = at.getpValue();

                    if (ap <= this.significance_low) {
                        assumptionViolated = true;
                    }

                    // Verbalize result according to p value
                    Pair<String, Double> verbalizedP = verbalizeP(ap, true);

                    String testResultRepresentation = getTestResultRepresentation(at, verbalizedP.getValue());
                    report.append(String.format("%s %s violation of %s (%s). ", at.getMethod(),
                            verbalizedP.getKey(), assumption, testResultRepresentation));

                }

                // Create QQ-Normal diagram to support the analysis of a
                // normality assumption
                if (result.getKey().equals("DependentT") && samples.length == 2) {
                    filename = String.format("qqNormPlot%s", measure.replaceAll("\\s", ""));
                    path = String.format("%s%s%s", outputFolderPath, File.separator, filename);
                    pathR = this.fixSlashes(path);
                    figRef = String.format("fig:qqNormPlot%s", measure.replaceAll("\\s", ""));
                    caption = String.format("QQ-Normal plot of pairwise differences between %s samples.",
                            measure);
                    double[] differences = new double[samples[0].length];
                    for (int j = 0; j < samples[0].length; j++) {
                        differences[j] = samples[0][j] - samples[1][j];
                    }
                    successful = stats.plotQQNorm(differences, "M0-M1", measure, pathR);
                    if (successful) {
                        figures.add(new String[] { figRef, caption, filename });
                        report.append(String.format("See Fig. \\ref{%s} for a QQ-Normal plot of the samples. ",
                                figRef));
                    }
                }

                if (assumptionViolated) {
                    report.append(
                            "Given that the assumptions are violated, the following test may be corrupted. ");
                }

                // A Priori test results
                // Verbalize result according to p value
                Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false);
                String testResultRepresentation = getTestResultRepresentation(r, verbalizedP.getValue());
                report.append(String.format(
                        "The %s %s differences between the performances of the models (%s).\\\\ \n\n ", method,
                        verbalizedP.getKey(), testResultRepresentation));

                // Store result for summary
                if (testSummary.get(testType).containsKey(verbalizedP.getKey())) {
                    testSummary.get(testType).get(verbalizedP.getKey()).add(measure);
                } else {
                    ArrayList<String> list = new ArrayList<String>();
                    list.add(measure);
                    testSummary.get(testType).put(verbalizedP.getKey(), list);
                }

                // Post-hoc test for >2 models (pairwise comparisons)
                if (evalResults.getSampleData().getModelMetadata().size() > 2) {

                    Pair<String, AbstractTestResult> postHocResult = null;
                    if (testType.equals("Parametric")) {
                        postHocResult = evalResults.getParametricPostHocTestResults().get(measure);
                    } else {
                        postHocResult = evalResults.getNonParametricPostHocTestResults().get(measure);
                    }
                    method = postHocResult.getKey();
                    if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
                        method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
                    }
                    methodsSummary.put(String.format("%sPostHoc", testType), method);

                    PairwiseTestResult rPostHoc = (PairwiseTestResult) postHocResult.getValue();
                    report.append(String.format("The system performed the \\emph{%s} post-hoc. ", method));

                    if (rPostHoc == null) {
                        report.append("The test failed. ");
                        continue;
                    }

                    // Assumptions
                    boolean assumptionsViolated = false;
                    it = rPostHoc.getAssumptions().keySet().iterator();
                    while (it.hasNext()) {
                        String assumption = it.next();
                        PairwiseTestResult at = (PairwiseTestResult) rPostHoc.getAssumptions().get(assumption);
                        if (at == null) {
                            report.append(String.format("Testing for %s failed. ", assumption));
                            assumptionsViolated = true;
                            continue;
                        }

                        // Create table with pairwise p-values for
                        // assumption testing
                        double[][] ap = at.getpValue();
                        Pair<String[], String[][]> tableData = getPValueStringArray(ap, isBaselineEvaluation); // first
                        // element
                        // is
                        // header,
                        // second
                        // are
                        // values
                        caption = String.format("P-values from the %s for %s", at.getMethod(), measure);
                        ref = String.format("tbl:%s%s", at.getMethod().replaceAll("\\s", ""),
                                measure.replaceAll("\\s", ""));
                        table = createLatexTable(caption, ref, tableData.getKey(),
                                String.format("|%s", StringUtils.repeat("l|", nModels)), tableData.getValue());

                        double max = getMax(ap);
                        double min = getMin(ap);
                        verbalizedP = verbalizeP(min, true);
                        if ((max > significance_low && min <= significance_low)
                                || (max > significance_medium && min <= significance_medium)
                                || (max > significance_high && min <= significance_high)) {
                            // partly significant to degree as specified by
                            // verbalized p-value
                            report.append(String.format(
                                    "%s partly %s violation of %s ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n",
                                    at.getMethod(), verbalizedP.getKey(), assumption, verbalizedP.getValue(),
                                    ref));
                        } else {
                            report.append(String.format(
                                    "%s %s violation of %s ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n", at.getMethod(),
                                    verbalizedP.getKey(), assumption, verbalizedP.getValue(), ref));
                        }
                        report.append(table);

                        if (min <= this.significance_low) {
                            assumptionsViolated = true;
                        }

                    }

                    if (assumptionViolated) {
                        report.append(
                                "Given that the assumptions are violated, the following test may be corrupted. ");
                    }

                    // Result
                    double[][] ap = rPostHoc.getpValue();
                    Pair<String[], String[][]> tableData = getPValueStringArray(ap, isBaselineEvaluation); // first
                    // element
                    // is
                    // header,
                    // second
                    // are
                    // values
                    caption = String.format("P-values from the %s for %s", method, measure);
                    ref = String.format("tbl:%s%s", method.replaceAll("\\s", ""),
                            measure.replaceAll("\\s", ""));
                    String formatting = null;
                    if (!isBaselineEvaluation) {
                        formatting = String.format("|%s", StringUtils.repeat("l|", nModels));
                    } else {
                        formatting = String.format("|l|l|");
                    }
                    String tablePNonAdjusted = createLatexTable(caption, ref, tableData.getKey(), formatting,
                            tableData.getValue());

                    // Already fetch pairwise adjustments here in order to
                    // determine choice of words
                    double max = getMax(ap);
                    double min = getMin(ap);
                    verbalizedP = verbalizeP(min, false);
                    ArrayList<StatsConfigConstants.CORRECTION_VALUES> adjustments = new ArrayList<StatsConfigConstants.CORRECTION_VALUES>(
                            rPostHoc.getpValueCorrections().keySet());
                    String adjustWord = "";
                    if (adjustments.size() > 0) {
                        adjustWord = " for non-adjusted p-values";
                    }
                    if ((max > significance_low && min <= significance_low)
                            || (max > significance_medium && min <= significance_medium)
                            || (max > significance_high && min <= significance_high)) {
                        // partly significant to degree as specified by
                        // verbalized p-value
                        report.append(String.format(
                                "The %s partly %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ",
                                method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref));
                    } else {
                        report.append(String.format(
                                "The %s %s differences between the performances of the models%s ($\\alpha=%.2f$, Tbl. \\ref{%s}). ",
                                method, verbalizedP.getKey(), adjustWord, verbalizedP.getValue(), ref));
                    }

                    // Determine ordering of models
                    HashMap<Integer, TreeSet<Integer>> postHocOrdering = null;
                    int[][] orderingEdgeList = null;
                    if (testType.equals("Parametric")) {
                        postHocOrdering = evalResults.getParameticPostHocOrdering().get(measure);
                        orderingEdgeList = evalResults.getParameticPostHocEdgelist().get(measure);
                    } else {
                        postHocOrdering = evalResults.getNonParameticPostHocOrdering().get(measure);
                        orderingEdgeList = evalResults.getNonParameticPostHocEdgelist().get(measure);
                    }
                    String ordering = getModelOrderingRepresentation(postHocOrdering);
                    report.append(ordering);

                    // Print graphs of ordering for the current measure and
                    // add the figure to the appendix
                    filename = String.format("graphOrdering%s%s", measure.replaceAll("\\s", ""), testType);
                    path = String.format("%s%s%s", outputFolderPath, File.separator, filename);
                    pathR = this.fixSlashes(path);
                    figRef = String.format("fig:graphOrdering%s%s", measure.replaceAll("\\s", ""), testType);
                    caption = String.format(
                            "Directed graph of significant differences for %s, as indicated by the %s post-hoc test.",
                            measure, testType.toLowerCase());
                    // int nodes[] = new int[nModels];
                    // for(int j=0; j<nModels;j++){nodes[j]=j;};
                    successful = stats.plotGraph(orderingEdgeList, nModels, pathR);
                    if (successful) {
                        figures.add(new String[] { figRef, caption, filename });
                        report.append(String.format("The ordering is visualized in Fig. \\ref{%s}. ", figRef));
                    }

                    // Pairwise adjustments
                    String tablePAdjusted = null;
                    if (adjustments.size() > 0) {
                        String[] subcaption = new String[adjustments.size()];
                        String[] header = null;
                        String[][][] overallValues = new String[adjustments.size()][][];
                        double[] minAdjustments = new double[adjustments.size()];
                        double[] maxAdjustments = new double[adjustments.size()];
                        for (int j = 0; j < adjustments.size(); j++) {
                            StatsConfigConstants.CORRECTION_VALUES adjustmentMethod = adjustments.get(j);
                            subcaption[j] = adjustmentMethod.name();
                            double[][] correctedP = rPostHoc.getpValueCorrections().get(adjustmentMethod);
                            if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(adjustmentMethod)) {
                                subcaption[j] = StatsConfigConstants.PRETTY_PRINT_METHODS.get(adjustmentMethod);
                            }
                            tableData = getPValueStringArray(correctedP, isBaselineEvaluation);
                            header = tableData.getKey();
                            overallValues[j] = tableData.getValue();
                            minAdjustments[j] = getMin(correctedP);
                            maxAdjustments[j] = getMax(correctedP);
                        }

                        caption = String.format("Adjusted p-values from the %s for %s", method, measure);
                        ref = String.format("tbl:%s%sAdjusted", method.replaceAll("\\s", ""),
                                measure.replaceAll("\\s", ""));
                        formatting = null;
                        if (!isBaselineEvaluation) {
                            formatting = String.format("|%s", StringUtils.repeat("l|", nModels));
                        } else {
                            formatting = String.format("|l|l|");
                        }
                        tablePAdjusted = createLatexSubTable(caption, subcaption, ref, header, formatting,
                                overallValues);

                        min = getMin(minAdjustments);
                        max = getMax(maxAdjustments);
                        verbalizedP = verbalizeP(min, false);

                        if ((max > significance_low && min <= significance_low)
                                || (max > significance_medium && min <= significance_medium)
                                || (max > significance_high && min <= significance_high)) {
                            // partly significant to degree as specified by
                            // verbalized p-value
                            report.append(String.format(
                                    "It partly %s differences for adjusted p-values ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n\n ",
                                    verbalizedP.getKey(), verbalizedP.getValue(), ref));
                        } else {
                            report.append(String.format(
                                    "It %s differences for adjusted p-values ($\\alpha=%.2f$, Tbl. \\ref{%s}).\n\n ",
                                    verbalizedP.getKey(), verbalizedP.getValue(), ref));
                        }
                    }

                    report.append(tablePNonAdjusted);
                    if (tablePAdjusted != null) {
                        report.append(tablePAdjusted);
                    }

                }
            } else {
                report.append(String.format("The %s failed.", method));
            }
        }

    }

    //
    // Contingency table and McNemar results if this test was performed
    //
    if (evalResults.getNonParametricTest().equals("McNemar")) {
        String measure = "Contingency Table";
        String testType = "Non-Parametric";
        report.append("\\FloatBarrier\n");
        report.append("\\subsection{Contingency Table}\n");

        String caption = String
                .format("Contingency table with correctly and incorrectly classified folds for %s", measure);
        if (evalResults.getSampleData().getPipelineType() == ReportTypes.MULTIPLE_CV) {
            report.append(String.format(
                    "The contingency table drawn from the %s and the %d models is listed in Tbl. \\ref{%s}. The correctly and incorrectly classified instances per fold were averaged over all repetitions. \n",
                    pipelineDescription, nModels, ref));
            caption = String.format(
                    "Averaged contingency table with correctly and incorrectly classified folds for %s",
                    measure);
        } else {
            report.append(String.format(
                    "The contingency table drawn from the %s and the %d models is listed in Tbl. \\ref{%s}.\n",
                    pipelineDescription, nModels, ref));
        }

        int[][] contingencyMatrix = evalResults.getSampleData().getContingencyMatrix();
        ref = "tbl:ContingencyMatrix";
        values = new String[][] { { "Wrong", "", "" }, { "Correct", "", "" } };
        values[0][1] = String.valueOf(contingencyMatrix[0][0]);
        values[0][2] = String.valueOf(contingencyMatrix[0][1]);
        values[1][1] = String.valueOf(contingencyMatrix[1][0]);
        values[1][2] = String.valueOf(contingencyMatrix[1][1]);

        table = createLatexTable(caption, ref, new String[] { "M0/M1", "Wrong", "Correct" }, "|l|l|l|", values);
        report.append(table);

        // Test results
        report.append(String.format("\\subsubsection{%s Testing}", testType));
        report.append(
                String.format("The system compared the %d models using the \\emph{McNemar test}. ", nModels));
        Pair<String, AbstractTestResult> result = evalResults.getNonParametricTestResults().get(measure);

        // Use pretty-print method descriptor if specified
        String method = result.getKey();
        if (StatsConfigConstants.PRETTY_PRINT_METHODS.containsKey(method)) {
            method = StatsConfigConstants.PRETTY_PRINT_METHODS.get(method);
        }
        methodsSummary.put(testType, method);

        TestResult r = (TestResult) result.getValue();
        if (r != null && !Double.isNaN(r.getpValue())) {
            StringBuilder parameters = new StringBuilder();
            Iterator<String> it = r.getParameter().keySet().iterator();
            while (it.hasNext()) {
                String parameter = it.next();
                double value = r.getParameter().get(parameter);
                parameters.append(String.format("%s=%.3f, ", parameter, value));
            }

            // Verbalize result according to p value
            Pair<String, Double> verbalizedP = verbalizeP(r.getpValue(), false);
            report.append(String.format(
                    "The test %s differences between the performances of the models ($%sp=%.3f, \\alpha=%.2f$).\\\\ \n",
                    verbalizedP.getKey(), parameters.toString(), r.getpValue(), verbalizedP.getValue()));
            // Store result for summary
            if (testSummary.get(testType).containsKey(verbalizedP.getKey())) {
                testSummary.get(testType).get(verbalizedP.getKey()).add(measure);
            } else {
                ArrayList<String> list = new ArrayList<String>();
                list.add(measure);
                testSummary.get(testType).put(verbalizedP.getKey(), list);
            }

        } else {
            report.append("The test failed.\\\\ \n");
        }
    }

    //
    // Summary of results
    //
    report.append("\\FloatBarrier\n");
    report.append("\\section{Summary}\n");
    for (String testType : new String[] { "Parametric", "Non-Parametric" }) {
        String prefix = "";

        if (nModels == 2) {
            report.append(
                    String.format("The system performed %s testing of the %d models using a %s. The test ",
                            testType.toLowerCase(), nModels, methodsSummary.get(testType)));
            prefix = "It";
        } else {
            String postHocTesting = String.format("%sPostHoc", testType);
            report.append(String.format(
                    "The system performed %s testing of the %d models using a %s and a %s post-hoc. The tests ",
                    testType.toLowerCase(), nModels, methodsSummary.get(testType),
                    methodsSummary.get(postHocTesting)));
            prefix = "They";
        }

        // If all tests failed, there're no results to summarize.
        HashMap<String, List<String>> summary = testSummary.get(testType);
        if (summary.keySet().size() == 0) {
            report.append("failed. ");
            continue;
        }

        Iterator<String> it = summary.keySet().iterator();
        boolean usePrefix = false;
        while (it.hasNext()) {
            String pVerbalization = it.next();
            List<String> affectedMeasures = summary.get(pVerbalization);
            if (!usePrefix) {
                report.append(String.format("%s differences in performance for the %s. ", pVerbalization,
                        createEnumeration(affectedMeasures)));
            } else {
                report.append(String.format("%s %s differences in performance for the %s. ", prefix,
                        pVerbalization, createEnumeration(affectedMeasures)));
            }
            usePrefix = true;
        }
        report.append("\\\\ \n\n");

    }

    //
    // Appendix
    //
    // Add all figures
    report.append("\\FloatBarrier\n");
    report.append("\\section{Appendix}\n");
    for (int i = 0; i < figures.size(); i++) {
        ref = figures.get(i)[0];
        String caption = figures.get(i)[1];
        String filename = figures.get(i)[2];
        report.append("\\begin{figure}\n");
        report.append("\\centering\n");
        report.append(String.format("\\includegraphics[width=1\\linewidth]{%s}\n", filename));
        report.append(String.format("\\caption{%s}\n", caption));
        report.append(String.format("\\label{%s}\n", ref));
        report.append("\\end{figure}\n\n");
    }

    // Close document
    report.append("\\end{document}");
    return report.toString();

}

From source file:de.langerhans.wallet.WalletApplication.java

public void updateLocale() {
    final String locale = config.getLocale();
    Locale loc;//from   w ww  . j  a  v a 2  s  . com
    android.content.res.Configuration configuration = new android.content.res.Configuration();
    if (!locale.equals("0")) {
        if (locale.length() > 2)
            loc = new Locale(locale.substring(0, 1), locale.substring(3, 4));
        else
            loc = new Locale(locale);
    } else {
        loc = Resources.getSystem().getConfiguration().locale;
    }
    Locale.setDefault(loc);
    configuration.locale = loc;
    getBaseContext().getResources().updateConfiguration(configuration,
            getBaseContext().getResources().getDisplayMetrics());
}

From source file:com.github.spyhunter99.pdf.plugin.PdfMojo.java

/**
 * Generate the PDF.//from  w w w.j  av a  2s. c o  m
 *
 * @throws MojoExecutionException if any
 * @throws IOException if any
 * @since 1.1
 */
private void generatePdf() throws MojoExecutionException, IOException {
    Locale.setDefault(getDefaultLocale());

    for (final Locale locale : getAvailableLocales()) {
        final File workingDir = getLocaleDirectory(workingDirectory, locale);

        File siteDirectoryFile = getLocaleDirectory(getSiteDirectoryTmp(), locale);

        copyResources(locale);

        generateMavenReports(locale);

        DocumentRendererContext context = new DocumentRendererContext();
        context.put("project", project);
        context.put("settings", settings);
        context.put("PathTool", new PathTool());
        context.put("FileUtils", new FileUtils());
        context.put("StringUtils", new StringUtils());
        context.put("i18n", i18n);
        context.put("generateTOC", generateTOC);
        context.put("validate", validate);

        context.put("executiveSummaryName", executiveSummaryName);
        context.put("titleHeader", titleHeader);
        context.put("distributionStatement", distributionStatement);
        context.put("pdfHeader", pdfHeader);
        context.put("pdfFooter", pdfFooter);
        context.put("coverDate", pdfCoverDate);
        if (foStylesOverride != null && foStylesOverride.exists())
            context.put("foStylesOverride", foStylesOverride);

        if (tocMaxDepthToPrint != null && !tocMaxDepthToPrint.trim().isEmpty())
            context.put("tocMaxDepthToPrint", tocMaxDepthToPrint);

        // Put any of the properties in directly into the Velocity context
        for (Map.Entry<Object, Object> entry : project.getProperties().entrySet()) {
            context.put((String) entry.getKey(), entry.getValue());
        }

        final DocumentModel model = aggregate ? getDocumentModel(locale) : null;

        try {
            // TODO use interface see DOXIASITETOOLS-30
            ((AbstractDocumentRenderer) docRenderer).render(siteDirectoryFile, workingDir, model, context);
        } catch (DocumentRendererException e) {
            throw new MojoExecutionException("Error during document generation: " + e.getMessage(), e);
        }
    }
}

From source file:com.sanbo.moveonapp.TabsActivity.java

@Override
public void onConfigurationChanged(android.content.res.Configuration newConfig) {
    super.onConfigurationChanged(newConfig);
    if (myLocale != null) {
        newConfig.locale = myLocale;//ww  w  .j  a v a2 s . co m
        Locale.setDefault(myLocale);
        getBaseContext().getResources().updateConfiguration(newConfig,
                getBaseContext().getResources().getDisplayMetrics());
    }
}

From source file:io.mesosphere.mesos.frameworks.cassandra.scheduler.CassandraSchedulerTest.java

@Test
public void testHasResources() {
    Protos.Offer offer = Protos.Offer.newBuilder().setHostname("host1")
            .setId(Protos.OfferID.newBuilder().setValue("offer"))
            .setSlaveId(Protos.SlaveID.newBuilder().setValue("slave"))
            .setFrameworkId(Protos.FrameworkID.newBuilder().setValue("frw1")).build();

    List<String> errs = CassandraCluster.hasResources(offer, resources(0, 0, 0),
            Collections.<String, Long>emptyMap(), "*");
    assertNotNull(errs);/* w w  w  .  j  av  a  2 s .c o m*/
    assertThat(errs).isEmpty();

    Locale.setDefault(Locale.ENGLISH); // required for correct float comparison!

    errs = CassandraCluster.hasResources(offer, resources(1, 2, 3), new HashMap<String, Long>() {
        {
            put("port1", 1L);
            put("port2", 2L);
            put("port3", 3L);
        }
    }, "ROLE");
    assertNotNull(errs);
    assertThat(errs).hasSize(6).contains(
            "Not enough cpu resources for role ROLE. Required 1.0 only 0.0 available",
            "Not enough mem resources for role ROLE. Required 2 only 0 available",
            "Not enough disk resources for role ROLE. Required 3 only 0 available",
            "Unavailable port 1(port1) for role ROLE. 0 other ports available",
            "Unavailable port 2(port2) for role ROLE. 0 other ports available",
            "Unavailable port 3(port3) for role ROLE. 0 other ports available");

    offer = Protos.Offer.newBuilder().setHostname("host1").setId(Protos.OfferID.newBuilder().setValue("offer"))
            .setSlaveId(Protos.SlaveID.newBuilder().setValue("slave"))
            .setFrameworkId(Protos.FrameworkID.newBuilder().setValue("frw1"))
            .addResources(Protos.Resource.newBuilder().setName("cpus").setRole("*")
                    .setType(Protos.Value.Type.SCALAR).setScalar(Protos.Value.Scalar.newBuilder().setValue(8d)))
            .addResources(
                    Protos.Resource.newBuilder().setName("mem").setRole("*").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(
                    Protos.Resource.newBuilder().setName("disk").setRole("*").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(
                    Protos.Resource.newBuilder().setName("ports").setRole("*").setType(Protos.Value.Type.RANGES)
                            .setRanges(Protos.Value.Ranges.newBuilder()
                                    .addRange(Protos.Value.Range.newBuilder().setBegin(7000).setEnd(10000))))
            .build();

    errs = CassandraCluster.hasResources(offer, resources(8, 8192, 8192), new HashMap<String, Long>() {
        {
            put("port1", 7000L);
            put("port2", 7002L);
            put("port3", 10000L);
        }
    }, "*");
    assertNotNull(errs);
    assertThat(errs).isEmpty();

    offer = Protos.Offer.newBuilder().setHostname("host1").setId(Protos.OfferID.newBuilder().setValue("offer"))
            .setSlaveId(Protos.SlaveID.newBuilder().setValue("slave"))
            .setFrameworkId(Protos.FrameworkID.newBuilder().setValue("frw1"))
            .addResources(Protos.Resource.newBuilder().setName("cpus").setRole("BAZ")
                    .setType(Protos.Value.Type.SCALAR).setScalar(Protos.Value.Scalar.newBuilder().setValue(8d)))
            .addResources(
                    Protos.Resource.newBuilder().setName("mem").setRole("BAZ").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(Protos.Resource.newBuilder().setName("disk").setRole("BAZ")
                    .setType(Protos.Value.Type.SCALAR)
                    .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(
                    Protos.Resource.newBuilder().setName("ports").setRole("BAZ")
                            .setType(Protos.Value.Type.RANGES)
                            .setRanges(Protos.Value.Ranges.newBuilder()
                                    .addRange(Protos.Value.Range.newBuilder().setBegin(7000).setEnd(10000))))
            .build();

    errs = CassandraCluster.hasResources(offer, resources(8, 8192, 8192), new HashMap<String, Long>() {
        {
            put("port1", 7000L);
            put("port2", 7002L);
            put("port3", 10000L);
        }
    }, "BAZ");
    assertNotNull(errs);
    assertThat(errs).isEmpty();

    errs = CassandraCluster.hasResources(offer, resources(8, 8192, 8192), new HashMap<String, Long>() {
        {
            put("port1", 7000L);
            put("port2", 7002L);
            put("port3", 10000L);
        }
    }, "FOO_BAR");
    assertNotNull(errs);
    assertThat(errs).hasSize(6).contains(
            "Not enough cpu resources for role FOO_BAR. Required 8.0 only 0.0 available",
            "Not enough mem resources for role FOO_BAR. Required 8192 only 0 available",
            "Not enough disk resources for role FOO_BAR. Required 8192 only 0 available",
            "Unavailable port 7000(port1) for role FOO_BAR. 0 other ports available",
            "Unavailable port 7002(port2) for role FOO_BAR. 0 other ports available",
            "Unavailable port 10000(port3) for role FOO_BAR. 0 other ports available");

    offer = Protos.Offer.newBuilder().setHostname("host1").setId(Protos.OfferID.newBuilder().setValue("offer"))
            .setSlaveId(Protos.SlaveID.newBuilder().setValue("slave"))
            .setFrameworkId(Protos.FrameworkID.newBuilder().setValue("frw1"))
            .addResources(Protos.Resource.newBuilder().setName("cpus").setRole("*")
                    .setType(Protos.Value.Type.SCALAR).setScalar(Protos.Value.Scalar.newBuilder().setValue(8d)))
            .addResources(
                    Protos.Resource.newBuilder().setName("mem").setRole("*").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(
                    Protos.Resource.newBuilder().setName("disk").setRole("*").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(
                    Protos.Resource.newBuilder().setName("ports").setRole("*").setType(Protos.Value.Type.RANGES)
                            .setRanges(Protos.Value.Ranges.newBuilder()
                                    .addRange(Protos.Value.Range.newBuilder().setBegin(5000).setEnd(6000))))
            .addResources(
                    Protos.Resource.newBuilder().setName("ports").setRole("BAZ")
                            .setType(Protos.Value.Type.RANGES)
                            .setRanges(Protos.Value.Ranges.newBuilder()
                                    .addRange(Protos.Value.Range.newBuilder().setBegin(7000).setEnd(10000))))
            .build();

    errs = CassandraCluster.hasResources(offer, resources(8, 8192, 8192), new HashMap<String, Long>() {
        {
            put("port1", 7000L);
            put("port2", 7002L);
            put("port3", 10000L);
        }
    }, "BAZ");
    assertNotNull(errs);
    assertThat(errs).isEmpty();

    offer = Protos.Offer.newBuilder().setHostname("host1").setId(Protos.OfferID.newBuilder().setValue("offer"))
            .setSlaveId(Protos.SlaveID.newBuilder().setValue("slave"))
            .setFrameworkId(Protos.FrameworkID.newBuilder().setValue("frw1"))
            .addResources(Protos.Resource.newBuilder().setName("cpus").setRole("*")
                    .setType(Protos.Value.Type.SCALAR).setScalar(Protos.Value.Scalar.newBuilder().setValue(1d)))
            .addResources(Protos.Resource.newBuilder().setName("cpus").setRole("BAZ")
                    .setType(Protos.Value.Type.SCALAR).setScalar(Protos.Value.Scalar.newBuilder().setValue(8d)))
            .addResources(
                    Protos.Resource.newBuilder().setName("mem").setRole("*").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(100)))
            .addResources(
                    Protos.Resource.newBuilder().setName("mem").setRole("BAZ").setType(Protos.Value.Type.SCALAR)
                            .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(Protos.Resource.newBuilder().setName("disk").setRole("*")
                    .setType(Protos.Value.Type.SCALAR).setScalar(Protos.Value.Scalar.newBuilder().setValue(10)))
            .addResources(Protos.Resource.newBuilder().setName("disk").setRole("BAZ")
                    .setType(Protos.Value.Type.SCALAR)
                    .setScalar(Protos.Value.Scalar.newBuilder().setValue(8192)))
            .addResources(
                    Protos.Resource.newBuilder().setName("ports").setRole("*").setType(Protos.Value.Type.RANGES)
                            .setRanges(Protos.Value.Ranges.newBuilder()
                                    .addRange(Protos.Value.Range.newBuilder().setBegin(5000).setEnd(6000))))
            .addResources(
                    Protos.Resource.newBuilder().setName("ports").setRole("BAZ")
                            .setType(Protos.Value.Type.RANGES)
                            .setRanges(Protos.Value.Ranges.newBuilder()
                                    .addRange(Protos.Value.Range.newBuilder().setBegin(7000).setEnd(10000))))
            .build();

    errs = CassandraCluster.hasResources(offer, resources(8, 8192, 8192), new HashMap<String, Long>() {
        {
            put("port1", 7000L);
            put("port2", 7002L);
            put("port3", 10000L);
        }
    }, "BAZ");
    assertNotNull(errs);
    assertThat(errs).isEmpty();
}

From source file:de.dmarcini.submatix.pclogger.gui.MainCommGUI.java

/**
 * Create the application.//from   w  w  w  .  ja v a  2 s  . c o  m
 * 
 * @throws ConfigReadWriteException
 * @throws IOException
 */
public MainCommGUI() throws IOException, ConfigReadWriteException {
    lg = SpxPcloggerProgramConfig.LOGGER;
    setDefaultLookAndFeelDecorated(isDefaultLookAndFeelDecorated());
    // Konfiguration aus der Datei einlesen
    // bercksichtigt schon per CLI angegebene Werte als gesetzt
    new ReadConfig();
    makeLogger();
    //
    // gib ein paar informationen
    //
    lg.info("Operating System: <" + OperatingSystemDetector.getOsName() + ">");
    lg.info("Java VM Datamodel: " + OperatingSystemDetector.getDataModel() + " bits");
    lg.info("Java VM Datamodel: <" + OperatingSystemDetector.getArch() + ">");
    //
    // Grundstzliche Sachen einstellen
    //
    try {
        ResourceBundle.clearCache();
        if (SpxPcloggerProgramConfig.langCode != null) {
            lg.info("try make locale from cmd options <" + SpxPcloggerProgramConfig.langCode + ">...");
            programLocale = new Locale(SpxPcloggerProgramConfig.langCode);
        } else {
            lg.debug("try get locale from system...");
            programLocale = Locale.getDefault();
        }
        LangStrings.setLocale(programLocale);
        lg.debug(String.format("getLocale says: Display Language :<%s>, lang: <%s>",
                programLocale.getDisplayLanguage(), programLocale.getLanguage()));
        stringsBundle = ResourceBundle.getBundle("de.dmarcini.submatix.pclogger.lang.messages", programLocale);
        if (stringsBundle.getLocale().equals(programLocale)) {
            lg.debug("language accepted..");
        } else {
            lg.debug("language fallback default...");
            programLocale = Locale.ENGLISH;
            Locale.setDefault(programLocale);
            stringsBundle = ResourceBundle.getBundle("de.dmarcini.submatix.pclogger.lang.messages",
                    programLocale);
        }
        checkDatabaseDirectory();
    } catch (MissingResourceException ex) {
        lg.error("ERROR get resources <" + ex.getMessage() + "> try standart Strings...");
        System.err.println("ERROR get resources <" + ex.getMessage() + "> try standart Strings...");
        try {
            lg.debug("try get  default english locale from system...");
            programLocale = Locale.ENGLISH;
            Locale.setDefault(programLocale);
            stringsBundle = ResourceBundle.getBundle("de.dmarcini.submatix.pclogger.lang.messages_en");
        } catch (Exception ex1) {
            lg.error("ERROR get resources <" + ex1.getMessage() + "> give up...");
            System.exit(-1);
        }
    } catch (NoDatabaseException ex) {
        showErrorDialog(ex.getLocalizedMessage());
        System.exit(-1);
    }
    //
    // jetzt die wichtigen anderen Sachen, die dauern.
    //
    prepareDatabase();
    currentConfig.setLogger(lg);
    btComm = new BTCommunication(databaseUtil);
    btComm.addActionListener(this);
    try {
        initializeGUI();
    } catch (SQLException ex) {
        lg.error("SQL ERROR <" + ex.getMessage() + "> give up...");
        System.err.println("ERROR while create GUI: <" + ex.getLocalizedMessage() + ">");
        ex.printStackTrace();
        System.exit(-1);
    } catch (ClassNotFoundException ex) {
        lg.error("CLASS NOT FOUND EXCEPTION <" + ex.getMessage() + "> give up...");
        System.err.println("ERROR while create GUI: <" + ex.getLocalizedMessage() + ">");
        ex.printStackTrace();
        System.exit(-1);
    }
    // Listener setzen (braucht auch die Maps)
    setGlobalChangeListener();
    //
    initLanuageMenu(programLocale);
    if (!SpxPcloggerProgramConfig.developDebug) {
        configPanel.setAllConfigPanlelsEnabled(false);
        logListPanel.setAllLogPanelsEnabled(false);
        setElementsConnected(false);
    }
    if (setLanguageStrings() < 1) {
        lg.error("setLanguageStrings() faild. give up...");
        System.exit(-1);
    }
    connectionPanel.setVirtDevicesBoxEnabled(false);
    startVirtualPortFinder(null);
    waitForMessage = 0;
}