Example usage for java.util Collections nCopies

List of usage examples for java.util Collections nCopies

Introduction

In this page you can find the example usage for java.util Collections nCopies.

Prototype

public static <T> List<T> nCopies(int n, T o) 

Source Link

Document

Returns an immutable list consisting of n copies of the specified object.

Usage

From source file:eionet.cr.dao.virtuoso.VirtuosoStagingDatabaseDAO.java

@Override
public void delete(List<String> dbNames) throws DAOException {

    if (dbNames == null || dbNames.isEmpty()) {
        return;//w  w w .j  a v a 2  s.c o m
    }

    Connection conn = null;
    try {
        conn = getSQLConnection();
        conn.setAutoCommit(false);

        // First, ensure the given databases are present in staging databases table (to avoid deleting other databases).
        String questionMarks = StringUtils.join(Collections.nCopies(dbNames.size(), "?"), ',');
        String sql = COUNT_EXISTING_DBS_SQL.replace("?", questionMarks);
        int count = NumberUtils.toInt(SQLUtil.executeSingleReturnValueQuery(sql, dbNames, conn).toString());
        if (dbNames.size() > count) {
            throw new DAOException("At least one of the given databases is unknown");
        }

        // Second, delete the databases themselves.
        deleteDatabases(dbNames, conn);

        // Third, delete rows from the staging databases table.
        sql = DELETE_DB_SQL.replace("?", questionMarks);
        SQLUtil.executeUpdate(sql, dbNames, conn);

        conn.commit();
    } catch (SQLException e) {
        SQLUtil.rollback(conn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(conn);
    }
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AlleleFrequencyCalculatorUnitTest.java

private static int[] PLsForObviousCall(final int ploidy, final int numAlleles, final int[] alleleCounts,
        final int PL) {
    final GenotypeLikelihoodCalculator glCalc = GL_CALCS.getInstance(ploidy, numAlleles);
    final int[] result = Collections.nCopies(glCalc.genotypeCount(), PL).stream().mapToInt(n -> n).toArray();
    result[glCalc.alleleCountsToIndex(alleleCounts)] = 0;
    return result;
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AlleleFrequencyCalculatorUnitTest.java

@Test
public void testManyRefSamplesDontKillGoodVariant() {
    final AlleleFrequencyCalculator afCalc = new AlleleFrequencyCalculator(1, 0.1, 0.1, DEFAULT_PLOIDY);
    final List<Allele> alleles = Arrays.asList(A, B);
    final Genotype AA = genotypeWithObviousCall(DIPLOID, BIALLELIC, new int[] { 0, 2 }, EXTREMELY_CONFIDENT_PL);
    final Genotype AB = genotypeWithObviousCall(DIPLOID, BIALLELIC, new int[] { 0, 1, 1, 1 },
            EXTREMELY_CONFIDENT_PL);/* w w  w.j a  v  a 2  s .c o m*/
    for (final int numRef : new int[] { 1, 10, 100, 1000, 10000, 100000 }) {
        final List<Genotype> genotypeList = new ArrayList<>(Collections.nCopies(numRef, AA));
        genotypeList.add(AB);
        final VariantContext vc = makeVC(alleles, genotypeList);
        final double log10PRef = afCalc.getLog10PNonRef(vc).getLog10LikelihoodOfAFEq0();
        Assert.assertTrue(log10PRef < (-EXTREMELY_CONFIDENT_PL / 10) + Math.log10(numRef) + 1);
    }
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AlleleFrequencyCalculatorUnitTest.java

private static Genotype makeGenotype(final int ploidy, int... pls) {
    return new GenotypeBuilder("sample" + sampleNameCounter++)
            .alleles(Collections.nCopies(ploidy, Allele.NO_CALL)).PL(pls).make();
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AFCalculationUnitTest.java

@DataProvider(name = "GLsWithNonInformative")
public Object[][] makeGLsWithNonInformative() {
    List<Object[]> tests = new ArrayList<Object[]>();

    final List<NonInformativeData> nonInformativeTests = new LinkedList<NonInformativeData>();
    nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB1), NON_INFORMATIVE1, 1));
    nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB2), NON_INFORMATIVE2, 2));
    nonInformativeTests.add(new NonInformativeData(Arrays.asList(AB2, BC2), NON_INFORMATIVE2, 2));

    for (final int nNonInformative : Arrays.asList(1, 10, 100)) {
        for (final NonInformativeData testData : nonInformativeTests) {
            final List<Genotype> samples = new ArrayList<Genotype>();
            samples.addAll(testData.called);
            samples.addAll(Collections.nCopies(nNonInformative, testData.nonInformative));

            final int nSamples = samples.size();
            List<AFCalculator> calcs = createAFCalculators(Arrays.asList(AFCalculatorImplementation.values()),
                    MAX_ALT_ALLELES, PLOIDY);

            final double[] priors = MathUtils.normalizeFromLog10(new double[2 * nSamples + 1], true); // flat priors

            for (AFCalculator model : calcs) {
                if (testData.nAltAlleles > 1 && model instanceof OriginalDiploidExactAFCalculator)
                    continue;

                final GetGLsTest onlyInformative = new GetGLsTest(model, testData.nAltAlleles, testData.called,
                        priors, "flat");

                for (int rotation = 0; rotation < nSamples; rotation++) {
                    Collections.rotate(samples, 1);
                    final GetGLsTest withNonInformative = new GetGLsTest(model, testData.nAltAlleles, samples,
                            priors, "flat");
                    tests.add(new Object[] { onlyInformative, withNonInformative });
                }//from  w  w  w . ja v a2  s .  c o m
            }
        }
    }

    return tests.toArray(new Object[][] {});
}

From source file:org.protempa.backend.dsb.relationaldb.RelationalDbDataSourceBackend.java

private String buildWriteKeysInsertStmt(int size) {
    StringBuilder stmtBuilder = new StringBuilder();
    stmtBuilder.append("INSERT INTO ");
    if (getKeyLoaderKeyIdSchema() != null) {
        stmtBuilder.append(getKeyLoaderKeyIdSchema());
        stmtBuilder.append('.');
    }/* ww  w . j a  v  a2 s  . c  om*/
    stmtBuilder.append(getKeyLoaderKeyIdTable());
    stmtBuilder.append(" (");
    stmtBuilder.append(getKeyLoaderKeyIdColumn());
    stmtBuilder.append(", ");
    stmtBuilder.append(getKeyLoaderKeyIdJoinKey());
    stmtBuilder.append(") ");
    stmtBuilder.append(" SELECT ");
    stmtBuilder.append(getDefaultKeyIdColumn());
    stmtBuilder.append(", ");
    stmtBuilder.append(getDefaultKeyIdJoinKey());
    stmtBuilder.append(" FROM ");
    if (getSchemaName() != null) {
        stmtBuilder.append(getSchemaName());
        stmtBuilder.append('.');
    }
    stmtBuilder.append(getDefaultKeyIdTable());
    stmtBuilder.append(" WHERE ");
    stmtBuilder.append(getDefaultKeyIdColumn());
    stmtBuilder.append(" IN (");
    stmtBuilder.append(StringUtils.join(Collections.nCopies(size, "?"), ','));
    stmtBuilder.append(')');
    String stmt = stmtBuilder.toString();
    return stmt;
}

From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java

@SuppressWarnings("unchecked")
public void getGroupStats(ResourceRequest request, PrintWriter writer) throws IOException {

    DataApiHandler dataApi = new DataApiHandler(request);

    JSONObject figfams = new JSONObject();
    Set<String> figfamIdList = new HashSet<>();
    List<String> genomeIdList = new LinkedList<>();
    // get family Type
    final String familyType = request.getParameter("familyType");
    final String familyId = familyType + "_id";

    // get genome list in order
    String genomeIds = request.getParameter("genomeIds");
    try {//from  ww w.  j  av a  2s . com
        SolrQuery query = new SolrQuery("genome_id:(" + genomeIds.replaceAll(",", " OR ") + ")");
        query.addSort("genome_name", SolrQuery.ORDER.asc).addField("genome_id")
                .setRows(DataApiHandler.MAX_ROWS);

        LOGGER.trace("[{}] {}", SolrCore.GENOME.getSolrCoreName(), query);

        String apiResponse = dataApi.solrQuery(SolrCore.GENOME, query);
        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<Genome> genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class);

        for (final Genome genome : genomes) {
            genomeIdList.add(genome.getId());
        }

        if (genomeIdList.size() == 25000) {
            query.setStart(25000);

            apiResponse = dataApi.solrQuery(SolrCore.GENOME, query);
            resp = jsonReader.readValue(apiResponse);
            respBody = (Map) resp.get("response");

            genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class);

            for (final Genome genome : genomes) {
                genomeIdList.add(genome.getId());
            }
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    //      LOGGER.debug("genomeIdList: {}", genomeIdList);

    // getting genome counts per figfamID (figfam)
    // {stat:{field:{field:figfam_id,limit:-1,facet:{min:"min(aa_length)",max:"max(aa_length)",mean:"avg(aa_length)",ss:"sumsq(aa_length)",sum:"sum(aa_length)",dist:"percentile(aa_length,50,75,99,99.9)",field:{field:genome_id}}}}}

    try {
        long start = System.currentTimeMillis();
        SolrQuery query = new SolrQuery("annotation:PATRIC AND feature_type:CDS");
        //         query.addFilterQuery("end:[3200 TO 4300] OR end:[4400 TO 4490] OR end:[4990 TO 4999]");
        query.addFilterQuery(getSolrQuery(request));
        query.addFilterQuery("!" + familyId + ":\"\"");
        query.setRows(0).setFacet(true).set("facet.threads", 15);
        query.add("json.facet", "{stat:{type:field,field:genome_id,limit:-1,facet:{figfams:{type:field,field:"
                + familyId + ",limit:-1,sort:{index:asc}}}}}");

        LOGGER.trace("getGroupStats() 1/3: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query);
        String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query);

        long point = System.currentTimeMillis();
        LOGGER.debug("1st query: {} ms", (point - start));
        start = point;

        Map resp = jsonReader.readValue(apiResponse);
        Map facets = (Map) resp.get("facets");
        Map stat = (Map) facets.get("stat");

        final Map<String, String> figfamGenomeIdStr = new LinkedHashMap<>();
        final Map<String, Integer> figfamGenomeCount = new LinkedHashMap<>();

        final int genomeTotal = genomeIdList.size();
        final Map<String, Integer> genomePosMap = new LinkedHashMap<>();
        for (String genomeId : genomeIdList) {
            genomePosMap.put(genomeId, genomeIdList.indexOf(genomeId));
        }

        final Map<String, List> figfamGenomeIdCountMap = new ConcurrentHashMap<>();
        final Map<String, Set> figfamGenomeIdSet = new ConcurrentHashMap<>();

        List<Map> genomeBuckets = (List<Map>) stat.get("buckets");

        for (final Map bucket : genomeBuckets) {

            final String genomeId = (String) bucket.get("val");
            final List<Map> figfamBucket = (List<Map>) ((Map) bucket.get("figfams")).get("buckets");

            for (final Map figfam : figfamBucket) {
                final String figfamId = (String) figfam.get("val");
                final String genomeCount = String.format("%02x", (Integer) figfam.get("count"));

                if (figfamGenomeIdCountMap.containsKey(figfamId)) {
                    figfamGenomeIdCountMap.get(figfamId).set(genomePosMap.get(genomeId), genomeCount);
                } else {
                    final List<String> genomeIdCount = new LinkedList<>(Collections.nCopies(genomeTotal, "00"));
                    genomeIdCount.set(genomePosMap.get(genomeId), genomeCount);
                    figfamGenomeIdCountMap.put(figfamId, genomeIdCount);
                }

                if (figfamGenomeIdSet.containsKey(figfamId)) {
                    figfamGenomeIdSet.get(figfamId).add(genomeId);
                } else {
                    final Set<String> genomeIdSet = new HashSet<>();
                    genomeIdSet.add(genomeId);
                    figfamGenomeIdSet.put(figfamId, genomeIdSet);
                }
            }
        }

        for (String figfamId : figfamGenomeIdCountMap.keySet()) {
            final List genomeIdStr = figfamGenomeIdCountMap.get(figfamId);
            figfamGenomeIdStr.put(figfamId, StringUtils.join(genomeIdStr, ""));
            figfamGenomeCount.put(figfamId, figfamGenomeIdSet.get(figfamId).size());
        }

        point = System.currentTimeMillis();
        LOGGER.debug("1st query process : {} ms, figfamGenomeIdStr:{}, figfamGenomeCount:{}", (point - start),
                figfamGenomeIdStr.size(), figfamGenomeCount.size());

        long start2nd = System.currentTimeMillis();
        // 2nd query

        query.set("json.facet", "{stat:{type:field,field:" + familyId
                + ",limit:-1,facet:{min:\"min(aa_length)\",max:\"max(aa_length)\",mean:\"avg(aa_length)\",ss:\"sumsq(aa_length)\",sum:\"sum(aa_length)\"}}}");

        LOGGER.trace("getGroupStats() 2/3: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query);
        apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query);

        point = System.currentTimeMillis();
        LOGGER.debug("2st query: {} ms", (point - start2nd));
        start2nd = point;

        resp = jsonReader.readValue(apiResponse);
        facets = (Map) resp.get("facets");
        stat = (Map) facets.get("stat");

        List<Map> buckets = (List<Map>) stat.get("buckets");

        for (Map bucket : buckets) {
            final String figfamId = (String) bucket.get("val");
            final int count = (Integer) bucket.get("count");

            double min, max, mean, sumsq, sum;
            if (bucket.get("min") instanceof Double) {
                min = (Double) bucket.get("min");
            } else if (bucket.get("min") instanceof Integer) {
                min = ((Integer) bucket.get("min")).doubleValue();
            } else {
                min = 0;
            }
            if (bucket.get("max") instanceof Double) {
                max = (Double) bucket.get("max");
            } else if (bucket.get("max") instanceof Integer) {
                max = ((Integer) bucket.get("max")).doubleValue();
            } else {
                max = 0;
            }
            if (bucket.get("mean") instanceof Double) {
                mean = (Double) bucket.get("mean");
            } else if (bucket.get("mean") instanceof Integer) {
                mean = ((Integer) bucket.get("mean")).doubleValue();
            } else {
                mean = 0;
            }
            if (bucket.get("ss") instanceof Double) {
                sumsq = (Double) bucket.get("ss");
            } else if (bucket.get("ss") instanceof Integer) {
                sumsq = ((Integer) bucket.get("ss")).doubleValue();
            } else {
                sumsq = 0;
            }
            if (bucket.get("sum") instanceof Double) {
                sum = (Double) bucket.get("sum");
            } else if (bucket.get("sum") instanceof Integer) {
                sum = ((Integer) bucket.get("sum")).doubleValue();
            } else {
                sum = 0;
            }

            //            LOGGER.debug("bucket:{}, sumsq:{}, count: {}", bucket, sumsq, count);
            double std;
            if (count > 1) {
                // std = Math.sqrt(sumsq / (count - 1));
                final double realSq = sumsq - (sum * sum) / count;
                std = Math.sqrt(realSq / (count - 1));
            } else {
                std = 0;
            }
            final JSONObject aaLength = new JSONObject();
            aaLength.put("min", min);
            aaLength.put("max", max);
            aaLength.put("mean", mean);
            aaLength.put("stddev", std);

            figfamIdList.add(figfamId);

            final JSONObject figfam = new JSONObject();
            figfam.put("genomes", figfamGenomeIdStr.get(figfamId));
            figfam.put("genome_count", figfamGenomeCount.get(figfamId));
            figfam.put("feature_count", count);
            figfam.put("stats", aaLength);

            figfams.put(figfamId, figfam);
        }

        point = System.currentTimeMillis();
        LOGGER.debug("2st query process: {} ms", (point - start2nd));
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    // getting distinct figfam_product
    if (!figfamIdList.isEmpty()) {

        figfamIdList.remove("");

        try {
            SolrQuery query = new SolrQuery("family_id:(" + StringUtils.join(figfamIdList, " OR ") + ")");
            query.addFilterQuery("family_type:" + familyType);
            query.addField("family_id,family_product").setRows(figfamIdList.size());

            LOGGER.debug("getGroupStats() 3/3: [{}] {}", SolrCore.FIGFAM_DIC.getSolrCoreName(), query);

            String apiResponse = dataApi.solrQuery(SolrCore.FIGFAM_DIC, query);

            Map resp = jsonReader.readValue(apiResponse);
            Map respBody = (Map) resp.get("response");

            List<Map> sdl = (List<Map>) respBody.get("docs");

            for (final Map doc : sdl) {
                final JSONObject figfam = (JSONObject) figfams.get(doc.get("family_id"));
                figfam.put("description", doc.get("family_product"));
                figfams.put(doc.get("family_id").toString(), figfam);
            }

            int i = 1;
            while (sdl.size() == 25000) {
                query.setStart(25000 * i);

                apiResponse = dataApi.solrQuery(SolrCore.FIGFAM_DIC, query);
                resp = jsonReader.readValue(apiResponse);
                respBody = (Map) resp.get("response");

                sdl = (List<Map>) respBody.get("docs");

                for (final Map doc : sdl) {
                    final JSONObject figfam = (JSONObject) figfams.get(doc.get("family_id"));
                    figfam.put("description", doc.get("family_product"));
                    figfams.put(doc.get("family_id").toString(), figfam);
                }
                i++;
            }
        } catch (IOException e) {
            LOGGER.error(e.getMessage(), e);
            LOGGER.debug("::getGroupStats() 3/3, params: {}", request.getParameterMap().toString());
        }
        figfams.writeJSONString(writer);
    }
}

From source file:org.openconcerto.sql.model.SQLRow.java

public Set<SQLRow> getDistantRows(final Path path) {
    // on veut tous les champs de la derniere table et rien d'autre
    final List<List<String>> fields = new ArrayList<List<String>>(
            Collections.nCopies(path.length() - 1, Collections.<String>emptyList()));
    fields.add(null);//from w w w  .ja va2 s . c  o m
    final Set<List<SQLRow>> s = this.getRowsOnPath(path, fields);
    final Set<SQLRow> res = new LinkedHashSet<SQLRow>(s.size());
    for (final List<SQLRow> l : s) {
        res.add(l.get(0));
    }
    return res;
}

From source file:gov.nih.nci.caarray.magetab.sdrf.SdrfDocument.java

private void addCharacteristic(List<String> row, Characteristic characteristic, boolean hasTerm,
        boolean hasUnit) {
    List<String> values = new LinkedList<String>();
    // Measurement characteristic: Add value, unit and the unit's term source.
    if (characteristic == null || characteristic.getTermOrDirectValue() == null) {
        values.add("");
        if (hasTerm) {
            values.add("");
        }//  www .  j a  va  2  s. co m
        if (hasUnit) {
            values.addAll(Collections.nCopies(2, ""));
        }
    } else {
        values.add(characteristic.getTermOrDirectValue());
        if (characteristic.getTerm() != null) {
            TermSource termSource = characteristic.getTerm().getTermSource();
            if (termSource == null || StringUtils.isBlank(termSource.getName())) {
                values.add("");
            } else {
                values.add(termSource.getName());
            }
        } else if (hasTerm) {
            values.add("");
        }
        OntologyTerm unit = characteristic.getUnit();
        if (unit != null) {
            values.add(unit.getValue());
            TermSource termSource = unit.getTermSource();
            if (termSource == null || StringUtils.isBlank(termSource.getName())) {
                values.add("");
            } else {
                values.add(termSource.getName());
            }
        } else if (hasUnit) {
            values.add("");
            values.add("");
        }
    }
    row.addAll(values);
}

From source file:eu.crisis_economics.abm.model.AbstractModel.java

/**
  * Selling price of goods manufactured by firms
  *//* w  ww .ja  v a 2  s .  co  m*/
@RecorderSource(value = "firmGoodsSellingPrice", collectionLengthMember = "getNumberOfFirmsForRecorderSampling()")
public List<Double> getFirmSellingPrices() {
    final List<Double> salePrices = new ArrayList<Double>();
    for (final MacroFirm firm : population.getAgentsOfType(MacroFirm.class))
        salePrices.add(firm.getGoodsSellingPrice());
    final List<Double> result = new ArrayList<Double>(Collections.nCopies(getNumberOfFirms(), 0.));
    for (int i = 0; i < salePrices.size(); ++i)
        result.set(i, salePrices.get(i));
    return result;
}