Example usage for java.util Collections nCopies

List of usage examples for java.util Collections nCopies

Introduction

In this page you can find the example usage for java.util Collections nCopies.

Prototype

public static <T> List<T> nCopies(int n, T o) 

Source Link

Document

Returns an immutable list consisting of n copies of the specified object.

Usage

From source file:de.innovationgate.webgate.api.jdbc.custom.JDBCSource.java

private PreparedStatement getInsertStatement(String folder, Map values) throws SQLException {

    if (!_tables.containsKey(folder.toLowerCase())) {
        return null;
    }//from  w  ww  . java  2 s  . com

    // Prepare statement
    StringBuffer query = new StringBuffer();
    query.append("INSERT INTO " + folder);

    List columnNames = new ArrayList(values.keySet());
    query.append(" (").append(WGUtils.serializeCollection(columnNames, ",")).append(")");

    List columnValues = Collections.nCopies(columnNames.size(), "?");
    query.append(" VALUES (").append(WGUtils.serializeCollection(columnValues, ",")).append(")");

    PreparedStatement stmt = getConnection().prepareStatement(query.toString(),
            Statement.RETURN_GENERATED_KEYS);

    // Insert parameter values
    for (int idx = 0; idx < columnNames.size(); idx++) {
        String column = (String) columnNames.get(idx);
        stmt.setObject(idx + 1, values.get(column));
    }

    return stmt;
}

From source file:de.bund.bfr.knime.pmm.common.chart.Plotable.java

public List<Map<String, Integer>> getAllChoices() {
    List<Map<String, Integer>> choices = new ArrayList<>();
    List<String> argList = new ArrayList<>(functionArguments.keySet());
    List<Integer> choice = new ArrayList<>(Collections.nCopies(argList.size(), 0));
    boolean done = false;

    while (!done) {
        Map<String, Integer> map = new LinkedHashMap<>();

        for (int i = 0; i < argList.size(); i++) {
            map.put(argList.get(i), choice.get(i));
        }/*from   w w w.j  ava 2  s  .c  o m*/

        choices.add(map);

        for (int i = 0;; i++) {
            if (i >= argList.size()) {
                done = true;
                break;
            }

            choice.set(i, choice.get(i) + 1);

            if (choice.get(i) >= functionArguments.get(argList.get(i)).size()) {
                choice.set(i, 0);
            } else {
                break;
            }
        }
    }

    return choices;
}

From source file:org.broadinstitute.gatk.tools.walkers.cancer.m2.SomaticGenotypingEngine.java

/**
 * Main entry point of class - given a particular set of haplotypes, samples and reference context, compute
 * genotype likelihoods and assemble into a list of variant contexts and genomic events ready for calling
 *
 * The list of samples we're working with is obtained from the readLikelihoods
 * @param readLikelihoods                       Map from reads->(haplotypes,likelihoods)
 * @param perSampleFilteredReadList              Map from sample to reads that were filtered after assembly and before calculating per-read likelihoods.
 * @param ref                                    Reference bytes at active region
 * @param refLoc                                 Corresponding active region genome location
 * @param activeRegionWindow                     Active window
 *
 * @return                                       A CalledHaplotypes object containing a list of VC's with genotyped events and called haplotypes
 *
 *///from   w w w  .  j  a  va 2  s .  c o m
public CalledHaplotypes callMutations(final ReadLikelihoods<Haplotype> readLikelihoods,
        final Map<String, Integer> originalNormalReadQualities,
        final Map<String, List<GATKSAMRecord>> perSampleFilteredReadList, final byte[] ref,
        final GenomeLoc refLoc, final GenomeLoc activeRegionWindow, final RefMetaDataTracker tracker) {
    //TODO: in GATK4 use Utils.nonNull
    if (readLikelihoods == null || readLikelihoods.sampleCount() == 0)
        throw new IllegalArgumentException(
                "readLikelihoods input should be non-empty and non-null, got " + readLikelihoods);
    if (ref == null || ref.length == 0)
        throw new IllegalArgumentException("ref bytes input should be non-empty and non-null, got " + ref);
    if (refLoc == null || refLoc.size() != ref.length)
        throw new IllegalArgumentException(
                " refLoc must be non-null and length must match ref bytes, got " + refLoc);
    if (activeRegionWindow == null)
        throw new IllegalArgumentException("activeRegionWindow must be non-null, got " + activeRegionWindow);

    final List<Haplotype> haplotypes = readLikelihoods.alleles();

    // Somatic Tumor/Normal Sample Handling
    if (!readLikelihoods.samples().contains(tumorSampleName)) {
        throw new IllegalArgumentException(
                "readLikelihoods does not contain the tumor sample " + tumorSampleName);
    }
    final boolean hasNormal = matchedNormalSampleName != null;

    // update the haplotypes so we're ready to call, getting the ordered list of positions on the reference
    // that carry events among the haplotypes
    final TreeSet<Integer> startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, readLikelihoods,
            ref, refLoc, NO_GIVEN_ALLELES);

    // Walk along each position in the key set and create each event to be outputted
    final Set<Haplotype> calledHaplotypes = new HashSet<>();
    final List<VariantContext> returnCalls = new ArrayList<>();

    for (final int loc : startPosKeySet) {
        if (loc < activeRegionWindow.getStart() || loc > activeRegionWindow.getStop()) {
            continue;
        }

        final List<VariantContext> eventsAtThisLoc = getVCsAtThisLocation(haplotypes, loc, NO_GIVEN_ALLELES);

        if (eventsAtThisLoc.isEmpty()) {
            continue;
        }

        // Create the event mapping object which maps the original haplotype events to the events present at just this locus
        final Map<Event, List<Haplotype>> eventMapper = createEventMapper(loc, eventsAtThisLoc, haplotypes);

        // TODO: priorityList is not sorted by priority, might as well just use eventsAtThisLoc.map(VariantContext::getSource)
        final List<String> priorityList = makePriorityList(eventsAtThisLoc);

        // merge variant contexts from multiple haplotypes into one variant context
        // TODO: we should use haplotypes if possible, but that may have to wait for GATK4
        VariantContext mergedVC = GATKVariantContextUtils.simpleMerge(eventsAtThisLoc, priorityList,
                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
                GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, false, false, null, false, false);

        if (mergedVC == null) {
            continue;
        }

        // TODO: this varaible needs a descriptive name
        final Map<VariantContext, Allele> mergeMap = new LinkedHashMap<>();

        mergeMap.put(null, mergedVC.getReference()); // the reference event (null) --> the reference allele
        for (int i = 0; i < eventsAtThisLoc.size(); i++) {
            // TODO: as noted below, this operation seems dangerous. Understand how things can go wrong.
            mergeMap.put(eventsAtThisLoc.get(i), mergedVC.getAlternateAllele(i)); // BUGBUG: This is assuming that the order of alleles is the same as the priority list given to simpleMerge function
        }

        /** TODO: the code in the for loop up to here needs refactor. The goal, as far as I can tell, is to create two things: alleleMapper and mergedVC
         * alleleMapper maps alleles to haplotypes, and we need this to create readAlleleLikelihoods.
         * To make alleleMapper we make mergeMap (of type VC -> Allele) and eventMapper (of type Event -> List(Haplotypes), where Event is essentialy Variant Context)
         * If we just want a map of Alleles to Haplotypes, we should be able to do so directly; no need for intermediate maps, which just complicates the code.
         **/

        final Map<Allele, List<Haplotype>> alleleMapper = createAlleleMapper(mergeMap, eventMapper);

        // converting ReadLikelihoods<Haplotype> to ReadLikeliHoods<Allele>
        ReadLikelihoods<Allele> readAlleleLikelihoods = readLikelihoods.marginalize(alleleMapper,
                genomeLocParser.createPaddedGenomeLoc(genomeLocParser.createGenomeLoc(mergedVC),
                        ALLELE_EXTENSION));

        //LDG: do we want to do this before or after pulling out overlapping reads?
        if (MTAC.isSampleContaminationPresent()) {
            readAlleleLikelihoods.contaminationDownsampling(MTAC.getSampleContamination());
        }

        // TODO: this is a good break point for a new method
        // TODO: replace PRALM with ReadLikelihoods
        final PerReadAlleleLikelihoodMap tumorPRALM = readAlleleLikelihoods
                .toPerReadAlleleLikelihoodMap(readAlleleLikelihoods.sampleIndex(tumorSampleName));
        filterPRALMForOverlappingReads(tumorPRALM, mergedVC.getReference(), loc, false);
        MuTect2.logReadInfo(DEBUG_READ_NAME, tumorPRALM.getLikelihoodReadMap().keySet(),
                "Present in Tumor PRALM after filtering for overlapping reads");
        // extend to multiple samples

        // compute tumor LOD for each alternate allele
        // TODO: somewhere we have to ensure that the all the alleles in the variant context is in alleleFractions passed to getHetGenotypeLogLikelihoods. getHetGenotypeLogLikelihoods will not check that for you
        final PerAlleleCollection<Double> altAlleleFractions = estimateAlleleFraction(mergedVC, tumorPRALM,
                false);
        final PerAlleleCollection<Double> tumorHetGenotypeLLs = getHetGenotypeLogLikelihoods(mergedVC,
                tumorPRALM, originalNormalReadQualities, altAlleleFractions);

        final PerAlleleCollection<Double> tumorLods = PerAlleleCollection.createPerAltAlleleCollection();
        for (final Allele altAllele : mergedVC.getAlternateAlleles()) {
            tumorLods.set(altAllele, tumorHetGenotypeLLs.get(altAllele) - tumorHetGenotypeLLs.getRef());
        }

        // TODO: another good breakpoint e.g. compute normal LOD/set thresholds
        // TODO: anything related to normal should be encapsulated in Optional

        // A variant candidate whose normal LOD is below this threshold will be filtered as 'germline_risk'
        // This is a more stringent threshold than normalLodThresholdForVCF
        double normalLodFilterThreshold = -Double.MAX_VALUE;
        PerReadAlleleLikelihoodMap normalPRALM = null;
        final PerAlleleCollection<Double> normalLods = PerAlleleCollection.createPerAltAlleleCollection();

        // if normal bam is available, compute normal LOD
        // TODO: this if statement should be a standalone method for computing normal LOD
        // TODO: then we can do something like normalLodThreshold = hasNormal ? thisMethod() : Optional.empty()
        if (hasNormal) {
            normalPRALM = readAlleleLikelihoods
                    .toPerReadAlleleLikelihoodMap(readAlleleLikelihoods.sampleIndex(matchedNormalSampleName));
            filterPRALMForOverlappingReads(normalPRALM, mergedVC.getReference(), loc, true);
            MuTect2.logReadInfo(DEBUG_READ_NAME, normalPRALM.getLikelihoodReadMap().keySet(),
                    "Present after in Nomral PRALM filtering for overlapping reads");

            final GenomeLoc eventGenomeLoc = genomeLocParser.createGenomeLoc(activeRegionWindow.getContig(),
                    loc);
            final Collection<VariantContext> cosmicVC = tracker.getValues(MTAC.cosmicRod, eventGenomeLoc);
            final Collection<VariantContext> dbsnpVC = tracker.getValues(MTAC.dbsnp.dbsnp, eventGenomeLoc);
            final boolean germlineAtRisk = !dbsnpVC.isEmpty() && cosmicVC.isEmpty();

            normalLodFilterThreshold = germlineAtRisk ? MTAC.NORMAL_DBSNP_LOD_THRESHOLD
                    : MTAC.NORMAL_LOD_THRESHOLD;

            // compute normal LOD = LL(X|REF)/LL(X|ALT) where REF is the diploid HET with AF = 0.5
            // note normal LOD is REF over ALT, the reciprocal of the tumor LOD
            final PerAlleleCollection<Double> diploidHetAlleleFractions = PerAlleleCollection
                    .createPerRefAndAltAlleleCollection();
            for (final Allele allele : mergedVC.getAlternateAlleles()) {
                diploidHetAlleleFractions.setAlt(allele, 0.5);
            }

            final PerAlleleCollection<Double> normalGenotypeLLs = getHetGenotypeLogLikelihoods(mergedVC,
                    normalPRALM, originalNormalReadQualities, diploidHetAlleleFractions);

            for (final Allele altAllele : mergedVC.getAlternateAlleles()) {
                normalLods.setAlt(altAllele, normalGenotypeLLs.getRef() - normalGenotypeLLs.getAlt(altAllele));
            }
        }

        int numPassingAlts = 0;
        final Set<Allele> allelesThatPassThreshold = new HashSet<>();
        Allele alleleWithHighestTumorLOD = null;

        for (final Allele altAllele : mergedVC.getAlternateAlleles()) {
            final boolean passesTumorLodThreshold = tumorLods
                    .getAlt(altAllele) >= MTAC.INITIAL_TUMOR_LOD_THRESHOLD;
            final boolean passesNormalLodThreshold = hasNormal
                    ? normalLods.getAlt(altAllele) >= MTAC.INITIAL_NORMAL_LOD_THRESHOLD
                    : true;
            if (passesTumorLodThreshold && passesNormalLodThreshold) {
                numPassingAlts++;
                allelesThatPassThreshold.add(altAllele);
                if (alleleWithHighestTumorLOD == null
                        || tumorLods.getAlt(altAllele) > tumorLods.getAlt(alleleWithHighestTumorLOD)) {
                    alleleWithHighestTumorLOD = altAllele;
                }
            }
        }

        if (numPassingAlts == 0) {
            continue;
        }

        final VariantContextBuilder callVcb = new VariantContextBuilder(mergedVC);
        final int haplotypeCount = alleleMapper.get(alleleWithHighestTumorLOD).size();
        callVcb.attribute(GATKVCFConstants.HAPLOTYPE_COUNT_KEY, haplotypeCount);
        callVcb.attribute(GATKVCFConstants.TUMOR_LOD_KEY, tumorLods.getAlt(alleleWithHighestTumorLOD));

        if (hasNormal) {
            callVcb.attribute(GATKVCFConstants.NORMAL_LOD_KEY, normalLods.getAlt(alleleWithHighestTumorLOD));
            if (normalLods.getAlt(alleleWithHighestTumorLOD) < normalLodFilterThreshold) {
                callVcb.filter(GATKVCFConstants.GERMLINE_RISK_FILTER_NAME);
            }
        }

        // TODO: this should be a separate method
        // TODO: move code to MuTect2::calculateFilters()
        if (MTAC.ENABLE_STRAND_ARTIFACT_FILTER && numPassingAlts == 1) {
            final PerReadAlleleLikelihoodMap forwardPRALM = new PerReadAlleleLikelihoodMap();
            final PerReadAlleleLikelihoodMap reversePRALM = new PerReadAlleleLikelihoodMap();
            splitPRALMintoForwardAndReverseReads(tumorPRALM, forwardPRALM, reversePRALM);

            MuTect2.logReadInfo(DEBUG_READ_NAME, tumorPRALM.getLikelihoodReadMap().keySet(),
                    "Present in tumor PRALM after PRALM is split");
            MuTect2.logReadInfo(DEBUG_READ_NAME, forwardPRALM.getLikelihoodReadMap().keySet(),
                    "Present in forward PRALM after PRALM is split");
            MuTect2.logReadInfo(DEBUG_READ_NAME, reversePRALM.getLikelihoodReadMap().keySet(),
                    "Present in reverse PRALM after PRALM is split");

            // TODO: build a new type for probability, likelihood, and log_likelihood. e.g. f_fwd :: probability[], tumorGLs_fwd :: likelihood[]
            // TODO: don't want to call getHetGenotypeLogLikelihoods on more than one alternate alelle. May need to overload it to take a scalar f_fwd.
            final PerAlleleCollection<Double> alleleFractionsForward = estimateAlleleFraction(mergedVC,
                    forwardPRALM, true);
            final PerAlleleCollection<Double> tumorGenotypeLLForward = getHetGenotypeLogLikelihoods(mergedVC,
                    forwardPRALM, originalNormalReadQualities, alleleFractionsForward);

            final PerAlleleCollection<Double> alleleFractionsReverse = estimateAlleleFraction(mergedVC,
                    reversePRALM, true);
            final PerAlleleCollection<Double> tumorGenotypeLLReverse = getHetGenotypeLogLikelihoods(mergedVC,
                    reversePRALM, originalNormalReadQualities, alleleFractionsReverse);

            final double tumorLod_fwd = tumorGenotypeLLForward.getAlt(alleleWithHighestTumorLOD)
                    - tumorGenotypeLLForward.getRef();
            final double tumorLod_rev = tumorGenotypeLLReverse.getAlt(alleleWithHighestTumorLOD)
                    - tumorGenotypeLLReverse.getRef();

            // Note that we use the observed combined (+ and -) allele fraction for power calculation in either direction
            final double tumorSBpower_fwd = strandArtifactPowerCalculator.cachedPowerCalculation(
                    forwardPRALM.getNumberOfStoredElements(),
                    altAlleleFractions.getAlt(alleleWithHighestTumorLOD));
            final double tumorSBpower_rev = strandArtifactPowerCalculator.cachedPowerCalculation(
                    reversePRALM.getNumberOfStoredElements(),
                    altAlleleFractions.getAlt(alleleWithHighestTumorLOD));

            callVcb.attribute(GATKVCFConstants.TLOD_FWD_KEY, tumorLod_fwd);
            callVcb.attribute(GATKVCFConstants.TLOD_REV_KEY, tumorLod_rev);
            callVcb.attribute(GATKVCFConstants.TUMOR_SB_POWER_FWD_KEY, tumorSBpower_fwd);
            callVcb.attribute(GATKVCFConstants.TUMOR_SB_POWER_REV_KEY, tumorSBpower_rev);

            if ((tumorSBpower_fwd > MTAC.STRAND_ARTIFACT_POWER_THRESHOLD
                    && tumorLod_fwd < MTAC.STRAND_ARTIFACT_LOD_THRESHOLD)
                    || (tumorSBpower_rev > MTAC.STRAND_ARTIFACT_POWER_THRESHOLD
                            && tumorLod_rev < MTAC.STRAND_ARTIFACT_LOD_THRESHOLD))
                callVcb.filter(GATKVCFConstants.STRAND_ARTIFACT_FILTER_NAME);
        }

        // TODO: this probably belongs in M2::calculateFilters()
        if (numPassingAlts > 1) {
            callVcb.filter(GATKVCFConstants.TRIALLELIC_SITE_FILTER_NAME);
        }

        // build genotypes TODO: this part needs review and refactor
        final List<Allele> tumorAlleles = Arrays.asList(mergedVC.getReference(), alleleWithHighestTumorLOD);
        // TODO: estimateAlleleFraction should not repeat counting allele depths
        final PerAlleleCollection<Integer> tumorAlleleDepths = getRefAltCount(mergedVC, tumorPRALM, false);
        final int tumorRefAlleleDepth = tumorAlleleDepths.getRef();
        final int tumorAltAlleleDepth = tumorAlleleDepths.getAlt(alleleWithHighestTumorLOD);
        final Genotype tumorGenotype = new GenotypeBuilder(tumorSampleName, tumorAlleles)
                .AD(new int[] { tumorRefAlleleDepth, tumorAltAlleleDepth })
                .attribute(GATKVCFConstants.ALLELE_FRACTION_KEY,
                        altAlleleFractions.getAlt(alleleWithHighestTumorLOD))
                .make();

        final List<Genotype> genotypes = new ArrayList<>();
        genotypes.add(tumorGenotype);

        // We assume that the genotype in the normal is 0/0
        // TODO: is normal always homozygous reference?
        final List<Allele> homRefAllelesforNormalGenotype = Collections.nCopies(2, mergedVC.getReference());

        // if we are calling with a normal, build the genotype for the sample to appear in vcf
        if (hasNormal) {
            final PerAlleleCollection<Integer> normalAlleleDepths = getRefAltCount(mergedVC, normalPRALM,
                    false);
            final int normalRefAlleleDepth = normalAlleleDepths.getRef();
            final int normalAltAlleleDepth = normalAlleleDepths.getAlt(alleleWithHighestTumorLOD);
            final double normalAlleleFraction = (double) normalAltAlleleDepth
                    / (normalRefAlleleDepth + normalAltAlleleDepth);

            final Genotype normalGenotype = new GenotypeBuilder(matchedNormalSampleName,
                    homRefAllelesforNormalGenotype).AD(new int[] { normalRefAlleleDepth, normalAltAlleleDepth })
                            .attribute(GATKVCFConstants.ALLELE_FRACTION_KEY, normalAlleleFraction).make();
            genotypes.add(normalGenotype);
        }

        final VariantContext call = new VariantContextBuilder(callVcb).alleles(tumorAlleles)
                .genotypes(genotypes).make();

        // how should we be making use of _perSampleFilteredReadList_?
        readAlleleLikelihoods = prepareReadAlleleLikelihoodsForAnnotation(readLikelihoods,
                perSampleFilteredReadList, genomeLocParser, false, alleleMapper, readAlleleLikelihoods, call);

        final ReferenceContext referenceContext = new ReferenceContext(genomeLocParser,
                genomeLocParser.createGenomeLoc(mergedVC.getChr(), mergedVC.getStart(), mergedVC.getEnd()),
                refLoc, ref);
        VariantContext annotatedCall = annotationEngine.annotateContextForActiveRegion(referenceContext,
                tracker, readAlleleLikelihoods, call, false);

        if (call.getAlleles().size() != mergedVC.getAlleles().size())
            annotatedCall = GATKVariantContextUtils.reverseTrimAlleles(annotatedCall);

        // maintain the set of all called haplotypes
        call.getAlleles().stream().map(alleleMapper::get).filter(Objects::nonNull)
                .forEach(calledHaplotypes::addAll);
        returnCalls.add(annotatedCall);
    }

    // TODO: understand effect of enabling this for somatic calling...
    final List<VariantContext> outputCalls = doPhysicalPhasing ? phaseCalls(returnCalls, calledHaplotypes)
            : returnCalls;
    return new CalledHaplotypes(outputCalls, calledHaplotypes);
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AFCalcUnitTest.java

@DataProvider(name = "PNonRefBiallelicSystematic")
public Object[][] makePNonRefBiallelicSystematic() {
    List<Object[]> tests = new ArrayList<Object[]>();

    final List<Integer> bigNonRefPLs = Arrays.asList(0, 1, 2, 3, 4, 5, 10, 15, 20, 25, 50, 100, 1000);
    final List<List<Integer>> bigDiploidPLs = removeBadPLs(Utils.makePermutations(bigNonRefPLs, 3, true));

    for (AFCalcFactory.Calculation modelType : AFCalcFactory.Calculation.values()) {

        if (false) { // for testing only
            tests.add(new Object[] { modelType, toGenotypes(Arrays.asList(Arrays.asList(0, 100, 0))) });
        } else {//from  ww w.  j a v  a2 s . c  o  m
            if (modelType == AFCalcFactory.Calculation.EXACT_GENERAL_PLOIDY)
                continue; // TODO -- GENERAL_PLOIDY DOESN'T WORK

            // test all combinations of PLs for 1 sample
            for (final List<List<Integer>> PLsPerSample : Utils.makePermutations(bigDiploidPLs, 1, true)) {
                tests.add(new Object[] { modelType, toGenotypes(PLsPerSample) });
            }

            final List<List<Integer>> smallDiploidPLs = new LinkedList<List<Integer>>();
            for (final int nonRefPL : Arrays.asList(5, 10, 20, 30)) {
                for (int i = 0; i < 2; i++) {
                    List<Integer> pls = new ArrayList<Integer>(Collections.nCopies(3, nonRefPL));
                    pls.set(i, 0);
                    smallDiploidPLs.add(pls);
                }
            }

            for (final List<List<Integer>> PLsPerSample : Utils.makePermutations(smallDiploidPLs, 5, false)) {
                tests.add(new Object[] { modelType, toGenotypes(PLsPerSample) });
            }
        }
    }

    return tests.toArray(new Object[][] {});
}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AFCalculationUnitTest.java

@DataProvider(name = "PNonRefBiallelicSystematic")
public Object[][] makePNonRefBiallelicSystematic() {
    List<Object[]> tests = new ArrayList<Object[]>();

    final List<Integer> bigNonRefPLs = Arrays.asList(0, 1, 2, 3, 4, 5, 10, 15, 20, 25, 50, 100, 1000);
    final List<List<Integer>> bigDiploidPLs = removeBadPLs(Utils.makePermutations(bigNonRefPLs, 3, true));

    for (AFCalculatorImplementation modelType : AFCalculatorImplementation.values()) {

        if (false) { // for testing only
            tests.add(new Object[] { modelType, toGenotypes(Arrays.asList(Arrays.asList(0, 100, 0))) });
        } else {/*from  w w w .  j a  v a 2s.c o  m*/
            if (modelType == AFCalculatorImplementation.EXACT_GENERAL_PLOIDY)
                continue; // TODO -- GENERAL_PLOIDY DOESN'T WORK

            // test all combinations of PLs for 1 sample
            for (final List<List<Integer>> PLsPerSample : Utils.makePermutations(bigDiploidPLs, 1, true)) {
                tests.add(new Object[] { modelType, toGenotypes(PLsPerSample) });
            }

            final List<List<Integer>> smallDiploidPLs = new LinkedList<List<Integer>>();
            for (final int nonRefPL : Arrays.asList(5, 10, 20, 30)) {
                for (int i = 0; i < 2; i++) {
                    List<Integer> pls = new ArrayList<Integer>(Collections.nCopies(3, nonRefPL));
                    pls.set(i, 0);
                    smallDiploidPLs.add(pls);
                }
            }

            for (final List<List<Integer>> PLsPerSample : Utils.makePermutations(smallDiploidPLs, 5, false)) {
                tests.add(new Object[] { modelType, toGenotypes(PLsPerSample) });
            }
        }
    }

    return tests.toArray(new Object[][] {});
}

From source file:org.apigw.authserver.svc.impl.TokenServicesImplTest.java

@Test
public void testFindOnlyValidTokensByUsername() {
    final TokenServicesImpl tokenServices = new TokenServicesImpl();
    final AuthorizationGrant auth = buildAuthorizationGrantWithLegalGuardian();

    final ResidentServices residentServices = mock(ResidentServices.class);
    final AuthorizationGrantRepository repo = mock(AuthorizationGrantRepository.class);

    tokenServices.setResidentServices(residentServices);
    tokenServices.setAuthorizationGrantRepository(repo);

    when(repo.findByResidentIdentificationNumber(anyString())).thenReturn(Collections.nCopies(4, auth));
    when(residentServices.validateLegalGuardian(anyString(), anyString())).thenReturn(true, false, false, true);

    final Collection<OAuth2AccessToken> tokens = tokenServices
            .findTokensByUserName(auth.getResidentIdentificationNumber());

    assertEquals(2, tokens.size());//w w w  . j  av a  2 s .  c om
}

From source file:de.innovationgate.wgpublisher.webtml.form.TMLForm.java

private boolean validateField(TMLFormField field, TMLContext formContext, TMLContext validationContext,
        RhinoExpressionEngine engine) throws WGException, ParseException {
    FieldReg fieldReg = _formInfo.getFieldRegistration(field.getName());
    String fieldname = field.getName();
    String validation = fieldReg.getValidation();
    boolean multiple = fieldReg.isMultiple();

    boolean fieldIsValid = true;

    // if we have a validation, validate field
    if ((validation != null) && (!validation.trim().equals(""))) {

        // do not validate hashedpassword fields if hashed string was posted
        if (fieldReg.getType().equals("hashedpassword")) {
            // if entered and parsed value are equal a hashed string was posted
            String enteredValue = field.getFirstEnteredStringValue();
            Object parsedValue = field.getFirstParsedValue();
            if (enteredValue != null && !enteredValue.trim().equals("")) {
                if (enteredValue.equals(parsedValue)) {
                    return true;
                }/*  www .  ja  v  a2  s  .co  m*/
            }
        }

        // set tmlscript variables containing the current field value
        setValidationVariables(validationContext, field, multiple);

        List validationList = new ArrayList();
        List messageList = new ArrayList();

        //if a validationdivider is set for this field, tokenize validation and message
        String msg = fieldReg.getMessage();

        if (fieldReg.getValidationdivider() != null) {
            validationList = WGUtils.deserializeCollection(validation, fieldReg.getValidationdivider());
            if (msg == null) {
                messageList = Collections.nCopies(validationList.size(),
                        TMLContext.getThreadMainContext().systemLabel("tmlform", "msg_failed_validation")
                                + fieldReg.getName());
            } else {
                messageList = WGUtils.deserializeCollection(msg, fieldReg.getValidationdivider());
            }
        } else {
            // add single validation and message
            validationList.add(validation);
            if (msg == null) {
                msg = TMLContext.getThreadMainContext().systemLabel("tmlform", "msg_failed_validation")
                        + fieldReg.getName();
            }
            messageList.add(msg);
        }

        //process validations for field               
        for (int i = 0; i < validationList.size(); i++) {

            String expression = (String) validationList.get(i);

            String message = "";
            if (i < messageList.size()) {
                message = (String) messageList.get(i);
                // resolve scriptlets in message
                Map params = new HashMap();
                params.put(RhinoExpressionEngine.SCRIPTLETOPTION_LEVEL, RhinoExpressionEngine.LEVEL_SCRIPTLETS);
                message = engine.resolveScriptlets(message, validationContext, params);
            }

            ExpressionResult result = engine.evaluateExpression(expression, validationContext,
                    ExpressionEngine.TYPE_EXPRESSION, buildValidationExpressionParams(fieldReg));
            if (result.isError() || result.isFalse()) {

                fieldIsValid = false;
                String errorMsg = message;

                // Validation had an error itself, we put out the error cause instead of the validation message
                if (result.isError()) {
                    errorMsg = "Validation-Expression could not be processed. Warning: "
                            + result.getException().getMessage() + " - expression was: " + expression;
                    if (result.getException() != null) {
                        // See if there is a TMLFormValidationException "somewhere down there". If so we take it as negative validation result
                        Throwable cause = result.getException();
                        while (!(cause instanceof TMLFormValidationException) && cause.getCause() != null
                                && cause.getCause() != cause) {
                            cause = cause.getCause();
                        }
                        if (cause instanceof TMLFormValidationException) {
                            errorMsg = cause.getMessage();
                        } else {
                            formContext.addwarning(errorMsg, false);
                            formContext.getlog().error("Error running validation expression",
                                    result.getException());
                        }
                    }
                }

                if (WGUtils.isEmpty(errorMsg)) {
                    formContext.addwarning("No message defined for validation '" + expression + "'", false);
                }

                _log.debug(errorMsg);
                _messages.put(fieldname, errorMsg);

                // clear field if type is hashedpassword
                // to ensure the validation can be executed again
                if (fieldReg.getType().equals("hashedpassword")) {
                    field.clear();
                }
                // clear given dependent fields
                clearFields(fieldReg.getCleariferror());
                break; // stop further validation of this field
            } else if (result.isTrue()) {
                _log.debug("Validation result for field '" + fieldReg.getName() + "' result of '" + expression
                        + "' is '" + result.isTrue() + "'.");
            }
        }
    }

    // In WGA5 behaviour we automatically check for conversion errors and treat them like validation errors
    if (fieldIsValid && !WGUtils.isEmpty(field.getEnteredValues()) && getforminfo().getVersionCompliance()
            .isAtLeast(CSConfig.getComplianceVersion(CSConfig.VERSIONCOMPLIANCE_WGA50))) {
        if (!field.couldBeParsed()) {
            fieldIsValid = false;

            List labelParams = new ArrayList();
            labelParams.add(field.getName());
            labelParams.add(fieldReg.getType());
            labelParams.add(WGUtils.serializeCollection(field.getEnteredValues(), ", "));
            labelParams.add(fieldReg.getFormat());
            String message;
            if (fieldReg.getFormat() != null) {
                message = getFormContext().systemLabel("tmlform", "msg_failed_conversion_formatted",
                        labelParams);
            } else {
                message = getFormContext().systemLabel("tmlform", "msg_failed_conversion", labelParams);
            }

            _log.debug(message);
            _messages.put(fieldname, message);
            // clear given dependent fields
            clearFields(fieldReg.getCleariferror());
        }
    }

    return fieldIsValid;
}

From source file:com.google.uzaygezen.core.BitVectorTest.java

private void checkEvenBitsToBigEndianByteArray(Function<Integer, BitVector> factory) {
    BitVector v = factory.apply(128);// w  w  w .  ja v  a 2 s . c  om
    for (int i = 0; i < 64; ++i) {
        v.set(2 * i);
    }
    byte[] actual = v.toBigEndianByteArray();
    byte[] expected = Bytes.toArray(Collections.nCopies(16, (byte) 0x55));
    assertArrayEquals(expected, actual);
}

From source file:com.google.uzaygezen.core.BitVectorTest.java

private void checkOddBitsToBigEndianByteArray(Function<Integer, BitVector> factory) {
    BitVector v = factory.apply(128);//from  ww w.ja va  2 s. c o m
    for (int i = 0; i < 64; ++i) {
        v.set(2 * i + 1);
    }
    byte[] actual = v.toBigEndianByteArray();
    byte[] expected = Bytes.toArray(Collections.nCopies(16, (byte) 0xAA));
    assertArrayEquals(expected, actual);
}

From source file:org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils.java

/**
 * Subset the samples in VC to reference only information with ref call alleles
 *
 * Preserves DP if present// www  .j  ava2  s . co m
 *
 * @param vc the variant context to subset down to
 * @param ploidy ploidy to use if a genotype doesn't have any alleles
 * @return a GenotypesContext
 */
public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) {
    if (vc == null)
        throw new IllegalArgumentException("vc cannot be null");
    if (ploidy < 1)
        throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy);

    // the genotypes with PLs
    final GenotypesContext oldGTs = vc.getGenotypes();

    // optimization: if no input genotypes, just exit
    if (oldGTs.isEmpty())
        return oldGTs;

    // the new genotypes to create
    final GenotypesContext newGTs = GenotypesContext.create(oldGTs.size());

    final Allele ref = vc.getReference();
    final List<Allele> diploidRefAlleles = Arrays.asList(ref, ref);

    // create the new genotypes
    for (final Genotype g : vc.getGenotypes()) {
        final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy();
        final List<Allele> refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref);
        final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles);
        if (g.hasDP())
            gb.DP(g.getDP());
        if (g.hasGQ())
            gb.GQ(g.getGQ());
        newGTs.add(gb.make());
    }

    return newGTs;
}