Example usage for java.util Collections nCopies

List of usage examples for java.util Collections nCopies

Introduction

In this page you can find the example usage for java.util Collections nCopies.

Prototype

public static <T> List<T> nCopies(int n, T o) 

Source Link

Document

Returns an immutable list consisting of n copies of the specified object.

Usage

From source file:de.bund.bfr.knime.pmm.common.math.ParameterOptimizer.java

private void useCurrentResults(List<Double> startValues) {
    double cost = optimizerValues.getCost();

    parameterValues = new ArrayList<>(parameters.size());
    sse = cost * cost;// w  w  w . j  a  va2 s.  c o m
    rms = MathUtilities.getRMSE(sse, parameters.size(), targetValues.size());
    rSquare = MathUtilities.getRSquared(sse, targetValues);
    aic = MathUtilities.akaikeCriterion(parameters.size(), targetValues.size(), sse);

    for (int i = 0; i < parameters.size(); i++) {
        parameterValues.add(optimizerValues.getPoint().getEntry(i));
    }

    try {
        if (targetValues.size() <= parameters.size()) {
            throw new RuntimeException();
        }

        double[][] covMatrix = optimizerValues.getCovariances(1e-14).getData();
        double factor = sse / (targetValues.size() - parameters.size());

        parameterStandardErrors = new ArrayList<>(parameters.size());
        parameterTValues = new ArrayList<>(parameters.size());
        parameterPValues = new ArrayList<>(parameters.size());
        covariances = new ArrayList<>();

        for (int i = 0; i < parameters.size(); i++) {
            double error = Math.sqrt(factor * covMatrix[i][i]);

            parameterStandardErrors.add(error);

            double tValue = optimizerValues.getPoint().getEntry(i) / error;
            int degreesOfFreedom = targetValues.size() - parameters.size();

            parameterTValues.add(tValue);
            parameterPValues.add(MathUtilities.getPValue(tValue, degreesOfFreedom));
        }

        for (int i = 0; i < parameters.size(); i++) {
            List<Double> cov = new ArrayList<>();

            for (int j = 0; j < parameters.size(); j++) {
                cov.add(factor * covMatrix[i][j]);
            }

            covariances.add(cov);
        }
    } catch (Exception e) {
        parameterStandardErrors = Collections.nCopies(parameters.size(), null);
        parameterTValues = Collections.nCopies(parameters.size(), null);
        parameterPValues = Collections.nCopies(parameters.size(), null);
        covariances = new ArrayList<>();

        for (int i = 0; i < parameters.size(); i++) {
            List<Double> nullList = Collections.nCopies(parameters.size(), null);

            covariances.add(nullList);
        }
    }
}

From source file:rapture.structured.StandardSqlGenerator.java

private String getPreparedStatementQuestionMarks(int rows, int num) {
    return StringUtils.join(Collections.nCopies(rows,
            String.format("(%s)", StringUtils.join(Collections.nCopies(num, "?"), ","))), ",");
}

From source file:org.jamocha.dn.NetworkToDot.java

public NetworkToDot(final SideEffectFunctionToNetwork network, final String... rules) {
    super();/*from w  ww.  j  a  v  a 2 s  .  c  o  m*/
    Arrays.sort(rules);
    for (final TerminalNode terminalNode : network.getTerminalNodes()) {
        final String ruleName = terminalNode.getRule().getParent().getName();
        if (rules.length != 0 && Arrays.binarySearch(rules, ruleName) < 0) {
            continue;
        }
        this.terminalNodes.put(terminalNode, ruleName);
        final Node sourceNode = terminalNode.getEdge().getSourceNode();

        final Map<Template, String> template2Name = new HashMap<>();
        final Map<Template, Integer> template2Occurences = new HashMap<>();
        final List<Template> templates = Arrays.asList(sourceNode.getMemory().getTemplate());
        final List<FactAddress> addresses = new ArrayList<>(Collections.nCopies(templates.size(), null));
        final List<String> names = new ArrayList<>(Collections.nCopies(templates.size(), null));
        for (int i = 0; i < templates.size(); i++) {
            final Template template = templates.get(i);
            String name = template2Name.get(template);
            if (null == name) {
                int length = 1;
                while (null == name || template2Name.containsValue(name)) {
                    name = template.getName().substring(0, length++);
                }
                template2Name.put(template, name);
            }
            final Integer occurencesI = template2Occurences.get(template);
            int occurences = 0;
            if (null != occurencesI) {
                occurences = occurencesI;
            }
            occurences++;
            template2Occurences.put(template, occurences);
            name = name + StringUtils.repeat("'", occurences - 1);
            names.set(i, name);
        }

        final String sourceNodeName = getNodeName(sourceNode);
        sourceNode.accept(new GraphConstructingNodeVisitor(names, templates, addresses));
        generateEdge(sourceNodeName, ruleName, "(" + String.join(", ", names) + ")");
    }
}

From source file:com.android.contacts.database.SimContactDaoImpl.java

private Cursor queryAccountsOfRawContacts(Set<Long> ids) {
    final StringBuilder selectionBuilder = new StringBuilder();

    final String[] args = new String[ids.size()];

    selectionBuilder.append(RawContacts._ID).append(" IN (")
            .append(Joiner.on(',').join(Collections.nCopies(args.length, '?'))).append(")");
    int i = 0;//from w ww  .java 2 s  . co  m
    for (long id : ids) {
        args[i++] = String.valueOf(id);
    }
    return mResolver.query(RawContacts.CONTENT_URI, AccountQuery.PROJECTION, selectionBuilder.toString(), args,
            null);
}

From source file:org.springframework.data.hadoop.serialization.HdfsWriterTest.java

/**
 * Test core Resource [compressed] write logic.
 * /*from   w ww  .  j a  v a  2  s  . c o  m*/
 * @param codec Used ONLY to get codec extension and its class name or alias in a type-safe manner.
 * @param useAlias If <code>true</code> uses <code>codec.getClass().getSimpleName()</code> as a codec alias.
 * Otherwise uses <code>codec.getClass().getName()</code> as a codec class name.
 */
private String testResourceWrite(SerializationFormatSupport<Resource> resourceFormat, int resourceCopies,
        CompressionCodec codec, boolean useAlias) throws Exception {

    if (codec != null) {
        // configure compression
        resourceFormat
                .setCompressionAlias(useAlias ? codec.getClass().getSimpleName() : codec.getClass().getName());
    }

    // calculates the destination for Resource source.
    String destination;
    {
        destination = hdfsOutputDir;

        // add file name
        destination += sourceResource.getFilename();
        // add files count
        destination += "_" + resourceCopies;
        // add serialization format name
        destination += "_" + resourceFormat.getClass().getSimpleName();
    }

    hdfsWrite(resourceFormat, Collections.nCopies(resourceCopies, sourceResource), destination);

    // expected destination on hdfs should have codec extension appended
    assertHdfsFileExists(destination + resourceFormat.getExtension());

    return destination;
}

From source file:annis.AnnisRunner.java

public void doBenchmark(String benchmarkCount) {

    int count = Integer.parseInt(benchmarkCount);
    out.println("---> executing " + benchmarks.size() + " queries " + count + " times");

    AnnisRunner.OS currentOS = AnnisRunner.OS.other;
    try {/*from   w ww  . j a v a 2  s.  com*/
        currentOS = AnnisRunner.OS.valueOf(System.getProperty("os.name").toLowerCase());
    } catch (IllegalArgumentException ex) {
    }

    List<AnnisRunner.Benchmark> session = new ArrayList<AnnisRunner.Benchmark>();

    // create sql + plan for each query and create count copies for each benchmark
    for (AnnisRunner.Benchmark benchmark : benchmarks) {
        if (clearCaches) {
            resetCaches(currentOS);
        }

        SqlGenerator<QueryData, ?> generator = getGeneratorForQueryFunction(benchmark.functionCall);
        benchmark.sql = getGeneratorForQueryFunction(benchmark.functionCall).toSql(benchmark.queryData);
        out.println("---> SQL query for: " + benchmark.functionCall);
        out.println(benchmark.sql);
        try {
            benchmark.plan = annisDao.explain(generator, benchmark.queryData, false);
            out.println("---> query plan for: " + benchmark.functionCall);
            out.println(benchmark.plan);
        } catch (RuntimeException e) { // nested DataAccessException would be better
            out.println("---> query plan failed for " + benchmark.functionCall);
        }
        benchmark.bestTimeInMilliseconds = Long.MAX_VALUE;
        benchmark.worstTimeInMilliseconds = Long.MIN_VALUE;
        out.println("---> running query sequentially " + SEQUENTIAL_RUNS + " times");
        String options = benchmarkOptions(benchmark.queryData);
        for (int i = 0; i < SEQUENTIAL_RUNS; ++i) {
            if (i > 0) {
                out.print(", ");
            }
            boolean error = false;
            long start = new Date().getTime();
            try {
                annisDao.executeQueryFunction(benchmark.queryData, generator);
            } catch (RuntimeException e) {
                error = true;
            }
            long end = new Date().getTime();
            long runtime = end - start;
            benchmark.values.add(runtime);
            benchmark.bestTimeInMilliseconds = Math.min(benchmark.bestTimeInMilliseconds, runtime);
            benchmark.worstTimeInMilliseconds = Math.max(benchmark.worstTimeInMilliseconds, runtime);
            ++benchmark.runs;
            if (error) {
                ++benchmark.errors;
            }

            out.print(runtime + " ms");

        }
        out.println();
        out.println(benchmark.bestTimeInMilliseconds + " ms best time for '" + benchmark.functionCall
                + ("".equals(options) ? "'" : "' with " + options));
        session.addAll(Collections.nCopies(count, benchmark));
    }

    // clear cache again in order to treat the last query in the list equal to
    // the others
    if (clearCaches) {
        resetCaches(currentOS);
    }

    // shuffle the benchmark queries
    Collections.shuffle(session);
    out.println();
    out.println("---> running queries in random order");

    // execute the queries, record test times
    for (AnnisRunner.Benchmark benchmark : session) {
        if (benchmark.errors >= 3) {
            continue;
        }
        boolean error = false;
        SqlGenerator<QueryData, ?> generator = getGeneratorForQueryFunction(benchmark.functionCall);
        long start = new Date().getTime();
        try {
            annisDao.executeQueryFunction(benchmark.queryData, generator);
        } catch (RuntimeException e) {
            error = true;
        }
        long end = new Date().getTime();
        long runtime = end - start;
        benchmark.avgTimeInMilliseconds += runtime;
        benchmark.values.add(runtime);
        benchmark.bestTimeInMilliseconds = Math.min(benchmark.bestTimeInMilliseconds, runtime);
        benchmark.worstTimeInMilliseconds = Math.max(benchmark.worstTimeInMilliseconds, runtime);

        ++benchmark.runs;
        if (error) {
            ++benchmark.errors;
        }
        String options = benchmarkOptions(benchmark.queryData);
        out.println(runtime + " ms for '" + benchmark.functionCall
                + ("".equals(options) ? "'" : "' with " + options) + (error ? " ERROR" : ""));
    }

    // compute average runtime for each query
    out.println();
    out.println("---> benchmark complete");
    for (AnnisRunner.Benchmark benchmark : benchmarks) {
        benchmark.avgTimeInMilliseconds = Math
                .round((double) benchmark.avgTimeInMilliseconds / (double) benchmark.runs);
        String options = benchmarkOptions(benchmark.queryData);
        out.println(benchmark.getMedian() + " ms (median for " + benchmark.runs + " runs"
                + (benchmark.errors > 0 ? ", " + benchmark.errors + " errors)" : ")") + " for '"
                + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options));
    }

    // show best runtime for each query
    out.println();
    out.println("---> worst times");
    for (AnnisRunner.Benchmark benchmark : benchmarks) {
        String options = benchmarkOptions(benchmark.queryData);
        out.println(benchmark.worstTimeInMilliseconds + " ms "
                + (benchmark.errors > 0 ? "(" + benchmark.errors + " errors)" : "") + " for '"
                + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options));
    }

    // show best runtime for each query
    out.println();
    out.println("---> best times");
    for (AnnisRunner.Benchmark benchmark : benchmarks) {
        String options = benchmarkOptions(benchmark.queryData);
        out.println(benchmark.bestTimeInMilliseconds + " ms "
                + (benchmark.errors > 0 ? "(" + benchmark.errors + " errors)" : "") + " for '"
                + benchmark.functionCall + ("".equals(options) ? "'" : "' with " + options));
    }
    out.println();

    // CSV output
    try {
        CSVWriter csv = new CSVWriter(
                new FileWriterWithEncoding(new File("annis_benchmark_result.csv"), "UTF-8"));

        String[] header = new String[] { "corpora", "query", "median", "diff-best", "diff-worst" };
        csv.writeNext(header);
        for (AnnisRunner.Benchmark benchmark : benchmarks) {
            long median = benchmark.getMedian();

            String[] line = new String[5];
            line[0] = StringUtils.join(benchmark.queryData.getCorpusList(), ",");
            line[1] = benchmark.functionCall;
            line[2] = "" + median;
            line[3] = "" + Math.abs(benchmark.bestTimeInMilliseconds - median);
            line[4] = "" + Math.abs(median - benchmark.worstTimeInMilliseconds);
            csv.writeNext(line);
        }

        csv.close();

    } catch (IOException ex) {
        log.error(null, ex);
    }

}

From source file:org.jsweet.input.typescriptdef.visitor.DuplicateMethodsCleaner.java

private Map<FullFunctionDeclaration, List<String>> calculateNames(Set<FullFunctionDeclaration> duplicates,
        Strategy strategy) {/*from www  . j ava2 s .  c  om*/
    Map<FullFunctionDeclaration, List<String>> nameMatrix = new HashMap<FullFunctionDeclaration, List<String>>();
    List<FullFunctionDeclaration> l = new ArrayList<FullFunctionDeclaration>(duplicates);
    TypeDeclaration highestTypeDeclaration = getHighestSuperType(duplicates);

    for (int paramIndex = 0; paramIndex < l.get(0).function.getParameters().length; paramIndex++) {
        final int i = paramIndex;
        l.sort(new Comparator<FullFunctionDeclaration>() {
            @Override
            public int compare(FullFunctionDeclaration f1, FullFunctionDeclaration f2) {
                int diff = context.getShortTypeNameNoErasure(f1.function.getParameters()[i].getType()).length()
                        - context.getShortTypeNameNoErasure(f2.function.getParameters()[i].getType()).length();
                if (diff == 0) {
                    return context.getShortTypeNameNoErasure(f1.function.getParameters()[i].getType())
                            .compareTo(context
                                    .getShortTypeNameNoErasure(f2.function.getParameters()[i].getType()));
                } else {
                    return diff;
                }
            }
        });

        List<String> names;
        boolean functionalDisambiguation = isFunctionalTypeReference(
                l.get(0).function.getParameters()[i].getType());
        if (functionalDisambiguation) {
            if (!isFunctionalTypeReference(l.get(0).function.getParameters()[i].getType())
                    || context.getShortTypeNameNoErasure(l.get(0).function.getParameters()[i].getType())
                            .equals(context.getShortTypeNameNoErasure(
                                    l.get(l.size() - 1).function.getParameters()[i].getType()))) {
                // no erasure conflict comes form parameter i (by convention
                // we set an empty name)
                names = new ArrayList<String>(Collections.nCopies(l.size(), NO_OVERRIDE));
            } else {
                names = calculateNames(highestTypeDeclaration, strategy, functionalDisambiguation, l,
                        l.get(0).function, i);
            }
        } else {
            if (context.getShortTypeNameNoErasure(l.get(0).function.getParameters()[i].getType()).equals(context
                    .getShortTypeNameNoErasure(l.get(l.size() - 1).function.getParameters()[i].getType()))) {
                // no erasure conflict comes form parameter i (by convention
                // we set an empty name)
                names = new ArrayList<String>(Collections.nCopies(l.size(), NO_OVERRIDE));
            } else {
                names = calculateNames(highestTypeDeclaration, strategy, functionalDisambiguation, l,
                        l.get(0).function, i);
            }
        }
        for (int j = 0; j < l.size(); j++) {
            List<String> paramNames = nameMatrix.get(l.get(j));
            if (paramNames == null) {
                paramNames = new ArrayList<String>();
                nameMatrix.put(l.get(j), paramNames);
            }
            paramNames.add(names.get(j));
        }
    }
    return nameMatrix;
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AlleleFrequencyCalculatorUnitTest.java

@Test
public void testManySamplesWithLowConfidence() {
    // prior corresponding to 1000 observations of ref, 1 of a SNP
    // for this test, we want many pseudocounts in the prior because the new AF calculator learns the allele frequency
    // and we don't want the complication of the posterior being differetn from the prior
    final AlleleFrequencyCalculator afCalc = new AlleleFrequencyCalculator(1000, 1, 1, DEFAULT_PLOIDY); //prior corresponding to 1000 observations of ref, 1 of a SNP
    final List<Allele> alleles = Arrays.asList(A, B);

    // for FAIRLY_CONFIDENT_PL = 20, this genotype has about 100 times greater likelihood to be het than hom ref
    // with our prior giving 1000 times as much weight to ref, this implies a 1 in 5 chance of each sample having a copy of the alt allele
    // (that is, 100/1000 times the combinatorial factor of 2).  Thus the MLE for up to 2 samples should be zero
    // for five samples we should have one
    // for ten samples we will have more than twice as many as for five since the counts fromt he samples start to influence
    // the estimated allele frequency
    final Genotype AB = genotypeWithObviousCall(DIPLOID, BIALLELIC, new int[] { 0, 1, 1, 1 },
            FAIRLY_CONFIDENT_PL);//from  w ww .  j  a  v  a 2  s .c  o  m

    final List<VariantContext> vcsWithDifferentNumbersOfSamples = IntStream.range(1, 11)
            .mapToObj(n -> makeVC(alleles, Collections.nCopies(n, AB))).collect(Collectors.toList());
    final int[] counts = vcsWithDifferentNumbersOfSamples.stream()
            .mapToInt(vc -> afCalc.getLog10PNonRef(vc).getAlleleCountAtMLE(B)).toArray();
    Assert.assertEquals(counts[0], 0); // one sample
    Assert.assertEquals(counts[1], 0); // two samples
    Assert.assertEquals(counts[4], 2); // five samples
    Assert.assertTrue(counts[8] >= 3); // ten samples
}

From source file:ai.grakn.test.engine.controller.TasksControllerTest.java

private Response send(Map<String, String> configuration, Map<String, String> params, int times) {
    Json jsonParams = makeJsonTask(configuration, params);
    return send(Collections.nCopies(times, jsonParams));
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AFCalcUnitTest.java

@DataProvider(name = "GLsWithNonInformative")
public Object[][] makeGLsWithNonInformative() {
    List<Object[]> tests = new ArrayList<Object[]>();

    final List<NonInformativeData> nonInformativeTests = new LinkedList<NonInformativeData>();
    nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB1), NON_INFORMATIVE1, 1));
    nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB2), NON_INFORMATIVE2, 2));
    nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB2, BC2), NON_INFORMATIVE2, 2));

    for (final int nNonInformative : Arrays.asList(1, 10, 100)) {
        for (final NonInformativeData testData : nonInformativeTests) {
            final List<Genotype> samples = new ArrayList<Genotype>();
            samples.addAll(testData.called);
            samples.addAll(Collections.nCopies(nNonInformative, testData.nonInformative));

            final int nSamples = samples.size();
            List<AFCalc> calcs = AFCalcFactory.createAFCalcs(Arrays.asList(AFCalcFactory.Calculation.values()),
                    4, 2, 2);// w w  w  .  j  a  v  a  2s.  c  om

            final double[] priors = MathUtils.normalizeFromLog10(new double[2 * nSamples + 1], true); // flat priors

            for (AFCalc model : calcs) {
                if (testData.nAltAlleles > 1 && model instanceof OriginalDiploidExactAFCalc)
                    continue;

                final GetGLsTest onlyInformative = new GetGLsTest(model, testData.nAltAlleles, testData.called,
                        priors, "flat");

                for (int rotation = 0; rotation < nSamples; rotation++) {
                    Collections.rotate(samples, 1);
                    final GetGLsTest withNonInformative = new GetGLsTest(model, testData.nAltAlleles, samples,
                            priors, "flat");
                    tests.add(new Object[] { onlyInformative, withNonInformative });
                }
            }
        }
    }

    return tests.toArray(new Object[][] {});
}