Example usage for java.util HashMap computeIfAbsent

List of usage examples for java.util HashMap computeIfAbsent

Introduction

In this page you can find the example usage for java.util HashMap computeIfAbsent.

Prototype

@Override
public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) 

Source Link

Document

This method will, on a best-effort basis, throw a ConcurrentModificationException if it is detected that the mapping function modifies this map during computation.

Usage

From source file:com.github.anba.es6draft.chakra.ChakraTest.java

private static BiFunction<Path, Path, ChakraTestInfo> createTestFunction() {
    HashMap<Path, Map<String, TestSetting>> settingsMapping = new HashMap<>();

    return (basedir, file) -> {
        ChakraTestInfo testInfo = new ChakraTestInfo(basedir, file);
        Path dir = basedir.resolve(file).getParent();
        Map<String, TestSetting> map = settingsMapping.computeIfAbsent(dir, ChakraTest::readSettings);
        TestSetting setting = map.get(file.getFileName().toString());
        if (setting != null) {
            testInfo.baseline = setting.baseline;
            testInfo.setEnabled(!setting.disabled);
        }/*from www  . j ava 2  s  .  c  o  m*/
        return testInfo;
    };
}

From source file:com.vmware.admiral.request.compute.ComputeReservationTaskService.java

private void filterSelectedByEndpoint(ComputeReservationTaskState state,
        List<GroupResourcePlacementState> placements, List<String> tenantLinks,
        ComputeDescription computeDesc) {
    if (placements == null) {
        failTask(null, new LocalizableValidationException("No placements found",
                "request.compute.reservation.placements.missing"));
        return;//from   w  w  w. j  ava2 s  .c o  m
    }

    HashMap<String, List<GroupResourcePlacementState>> placementsByRpLink = new HashMap<>();
    placements.forEach(
            p -> placementsByRpLink.computeIfAbsent(p.resourcePoolLink, k -> new ArrayList<>()).add(p));
    String endpointLink = getProp(computeDesc.customProperties, ComputeProperties.ENDPOINT_LINK_PROP_NAME);

    EnvironmentQueryUtils.queryEnvironments(getHost(), UriUtils.buildUri(getHost(), getSelfLink()),
            placementsByRpLink.keySet(), endpointLink, tenantLinks, state.networkProfileConstraints,
            (envs, e) -> {
                if (e != null) {
                    failTask("Error retrieving environments for the selected placements: ", e);
                    return;
                }

                EnvironmentComputeDescriptionEnhancer enhancer = new EnvironmentComputeDescriptionEnhancer(
                        getHost(), UriUtils.buildUri(getHost().getPublicUri(), getSelfLink()));
                List<DeferredResult<Pair<ComputeDescription, EnvEntry>>> list = envs.stream()
                        .flatMap(envEntry -> envEntry.envLinks.stream().map(envLink -> {
                            ComputeDescription cloned = Utils.cloneObject(computeDesc);
                            EnhanceContext context = new EnhanceContext();
                            context.environmentLink = envLink;
                            context.imageType = cloned.customProperties
                                    .remove(ComputeConstants.CUSTOM_PROP_IMAGE_ID_NAME);
                            context.skipNetwork = true;
                            context.regionId = envEntry.endpoint.endpointProperties
                                    .get(EndpointConfigRequest.REGION_KEY);

                            DeferredResult<Pair<ComputeDescription, EnvEntry>> r = new DeferredResult<>();
                            enhancer.enhance(context, cloned).whenComplete((cd, t) -> {
                                if (t != null) {
                                    r.complete(Pair.of(cd, null));
                                    return;
                                }
                                String enhancedImage = cd.customProperties
                                        .get(ComputeConstants.CUSTOM_PROP_IMAGE_ID_NAME);
                                if (enhancedImage != null && context.imageType.equals(enhancedImage)) {
                                    r.complete(Pair.of(cd, null));
                                    return;
                                }
                                r.complete(Pair.of(cd, envEntry));
                            });
                            return r;
                        })).collect(Collectors.toList());

                DeferredResult.allOf(list).whenComplete((all, t) -> {
                    if (t != null) {
                        failTask("Error retrieving environments for the selected placements: ", t);
                        return;
                    }

                    List<GroupResourcePlacementState> filteredPlacements = all.stream()
                            .filter(p -> p.getRight() != null)
                            .flatMap(p -> supportsCD(state, placementsByRpLink, p))
                            .collect(Collectors.toList());

                    logInfo("Remaining candidate placements after endpoint filtering: " + filteredPlacements);

                    filterPlacementsByRequirements(state, filteredPlacements, tenantLinks, computeDesc);
                });
            });
}

From source file:gedi.atac.Atac.java

public static void normalizationFactors(GenomicRegionStorage<? extends AlignedReadsData> storage,
        GenomicRegionStorage<?> peaks, String out, String peakout, String detailout, String... typePattern)
        throws IOException {

    int cond = storage.getRandomRecord().getNumConditions();
    int[][] allCounts = new int[typePattern.length][cond];
    int[][] peakCounts = new int[typePattern.length][cond];

    Pattern[] types = new Pattern[typePattern.length];
    for (int i = 0; i < types.length; i++)
        types[i] = Pattern.compile(typePattern[i]);

    new LineOrientedFile(detailout).delete();

    Set<ReferenceSequence> refs = new TreeSet<ReferenceSequence>();
    for (ReferenceSequence ref : storage.getReferenceSequences())
        refs.add(ref.toStrandIndependent());

    for (ReferenceSequence ref : refs) {

        int ty = 0;
        for (; ty < types.length && !types[ty].matcher(ref.toPlusMinusString()).find(); ty++)
            ;//ww w  .ja  v  a  2 s  .c om

        if (ty < types.length)
            System.out.println(ref + " -> " + types[ty]);
        else
            System.out.println("Skipping " + ref);

        HashMap<ImmutableReferenceGenomicRegion<?>, int[]> detail = new HashMap<ImmutableReferenceGenomicRegion<?>, int[]>();

        int tyind = ty;
        Consumer<MutableReferenceGenomicRegion<? extends AlignedReadsData>> adder = new Consumer<MutableReferenceGenomicRegion<? extends AlignedReadsData>>() {

            @Override
            public void accept(MutableReferenceGenomicRegion<? extends AlignedReadsData> mrgr) {

                int f = GenomicRegionPosition.Start.position(ref, mrgr.getRegion(), 4);
                int b = GenomicRegionPosition.Stop.position(ref, mrgr.getRegion(), -4);

                int inpeak = 0;
                if (StreamSupport.stream(peaks.iterateIntersectingMutableReferenceGenomicRegions(
                        ref.toStrandIndependent(), f, f + 1), false).peek(peak -> {
                            int[] c = detail.computeIfAbsent(peak.toImmutable(), x -> new int[cond]);
                            for (int i = 0; i < c.length; i++)
                                c[i] += mrgr.getData().getTotalCountForConditionInt(i, ReadCountMode.All);
                        }).count() > 0)
                    inpeak++;

                if (StreamSupport.stream(peaks.iterateIntersectingMutableReferenceGenomicRegions(
                        ref.toStrandIndependent(), b, b + 1), false).peek(peak -> {
                            int[] c = detail.computeIfAbsent(peak.toImmutable(), x -> new int[cond]);
                            for (int i = 0; i < c.length; i++)
                                c[i] += mrgr.getData().getTotalCountForConditionInt(i, ReadCountMode.All);
                        }).count() > 0)
                    inpeak++;

                for (int i = 0; i < allCounts[tyind].length; i++) {
                    allCounts[tyind][i] += mrgr.getData().getTotalCountForConditionInt(i, ReadCountMode.All);
                    if (inpeak > 0)
                        peakCounts[tyind][i] += mrgr.getData().getTotalCountForConditionInt(i,
                                ReadCountMode.All) * inpeak;
                }
            }

        };
        if (ty < types.length) {
            storage.iterateMutableReferenceGenomicRegions(ref).forEachRemaining(adder);
            storage.iterateMutableReferenceGenomicRegions(ref.toPlusStrand()).forEachRemaining(adder);
            storage.iterateMutableReferenceGenomicRegions(ref.toMinusStrand()).forEachRemaining(adder);
        }

        LineOrientedFile d = new LineOrientedFile(detailout);
        if (d.exists())
            d.startAppending();
        else {
            d.startWriting();
            d.write("Peak\tType");
            for (int i = 0; i < cond; i++)
                d.writef("\t%d", i);
            d.writeLine();
        }

        for (ImmutableReferenceGenomicRegion<?> peak : detail.keySet()) {
            int[] count = detail.get(peak);
            d.writef("%s\t%s", peak.toLocationString(), typePattern[ty]);
            for (int c = 0; c < cond; c++)
                d.writef("\t%d", count[c]);
            d.writeLine();
        }
        d.finishWriting();

    }

    LineOrientedFile o = new LineOrientedFile(out);
    o.startWriting();
    o.write("Type\tCondition Index\tCount\n");
    for (int i = 0; i < types.length; i++) {
        for (int c = 0; c < allCounts[i].length; c++) {
            o.writef("%s\t%d\t%d\n", typePattern[i], c, allCounts[i][c]);
        }
    }
    o.finishWriting();

    o = new LineOrientedFile(peakout);
    o.startWriting();
    o.write("Type\tCondition Index\tCount\n");
    for (int i = 0; i < types.length; i++) {
        for (int c = 0; c < allCounts[i].length; c++) {
            o.writef("%s\t%d\t%d\n", typePattern[i], c, peakCounts[i][c]);
        }
    }
    o.finishWriting();
}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

private LinkedList<OrfWithCodons>[] findConnectedOrfs(LinkedList<OrfWithCodons> orfs) {

    HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>();
    for (OrfWithCodons orf : orfs)
        for (Codon c : orf.getCodons())
            cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf);

    UnionFind<OrfWithCodons> uf = new UnionFind<OrfWithCodons>(orfs);
    for (Codon c : cod2Orf.keySet())
        uf.unionAll(cod2Orf.get(c));//  w  w w . j a  v a2s.com

    return uf.getGroups(new LinkedList<OrfWithCodons>());
}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

private void distributeCodons(LinkedList<OrfWithCodons> orfs) {
    HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>();
    for (OrfWithCodons orf : orfs)
        for (Codon c : orf.getCodons())
            cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf);

    for (OrfWithCodons orf : orfs) {
        ArrayList<Codon> estCodonsUnique = new ArrayList<Codon>();
        ArrayList<Codon> estCodonsEach = new ArrayList<Codon>();
        for (Codon c : orf.getCodons()) {
            estCodonsUnique.add(c.createProportionalUnique(orf, cod2Orf.get(c)));
            estCodonsEach.add(c.createProportionalEach(orf, cod2Orf.get(c)));
        }/* www .jav  a 2s  .c om*/
        orf.setEstimatedCodons(estCodonsUnique, estCodonsEach);
    }

}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

private void inferOverlappingOrfActivitiesEM(List<OrfWithCodons> orfs) {

    HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>();
    int numCond = -1;
    for (OrfWithCodons orf : orfs)
        for (Codon c : orf.getCodons()) {
            cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf);
            numCond = c.getActivity().length;
        }//from w ww .j  a  v a 2  s  . c  o m

    // now equivalence classes: gives you all codons that are consistent with a specific combination of orfs
    HashMap<HashSet<OrfWithCodons>, HashSet<Codon>> equi = new HashMap<HashSet<OrfWithCodons>, HashSet<Codon>>();
    for (Codon c : cod2Orf.keySet()) {
        equi.computeIfAbsent(cod2Orf.get(c), x -> new HashSet<>()).add(c);
    }

    OrfWithCodons[][] E = new OrfWithCodons[equi.size()][];
    HashSet<Codon>[] codons = new HashSet[E.length];
    int ind = 0;
    for (HashSet<OrfWithCodons> e : equi.keySet()) {
        codons[ind] = equi.get(e);
        E[ind++] = e.toArray(new OrfWithCodons[0]);
    }

    int dfEach = (numCond - 1) * orfs.size();
    int dfUnique = (numCond - 1);
    double llEach = 0;
    double llUnique = 0;

    double[] alpha = new double[E.length];
    for (int i = 0; i < alpha.length; i++) {
        for (Codon codon : codons[i])
            alpha[i] += codon.getTotalActivity();
    }
    double sum = EI.wrap(alpha).sum();

    // TODO not quite right, divide by effective lengths, then go through all equiv classes and sum the weighted alphas
    llUnique = new EquivalenceClassCountEM<OrfWithCodons>(E, alpha, orf -> orf.getEffectiveLength())
            .compute(miniter, maxiter, (orf, pi) -> orf.setEstimatedTotalActivity(pi * sum, pi));

    for (int c = 0; c < numCond; c++) {
        Arrays.fill(alpha, 0);
        for (int i = 0; i < alpha.length; i++)
            for (Codon codon : codons[i])
                alpha[i] += codon.getActivity()[c];
        int uc = c;

        double csum = EI.wrap(alpha).sum();
        double lla = new EquivalenceClassCountEM<OrfWithCodons>(E, alpha, orf -> orf.getEffectiveLength())
                .compute(miniter, maxiter, (orf, pi) -> orf.setEstimatedTotalActivity(uc, pi * csum, pi));
        if (!Double.isNaN(lla))
            llEach += lla;
    }

    double p = ChiSquare.cumulative(2 * llEach - 2 * llUnique, dfEach - dfUnique, false, false);

    for (OrfWithCodons o : orfs)
        o.setUniqueProportionPval(p);

}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

private void overlapUniqueCoverage(List<OrfWithCodons> orfs) {

    HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>();
    int numCond = -1;
    for (OrfWithCodons orf : orfs)
        for (Codon c : orf.getCodons()) {
            cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf);
            numCond = c.getActivity().length;
        }//from w  ww. jav  a 2  s. com

    // now equivalence classes: gives you all codons that are consistent with a specific combination of orfs
    HashMap<HashSet<OrfWithCodons>, HashSet<Codon>> equi = new HashMap<HashSet<OrfWithCodons>, HashSet<Codon>>();
    for (Codon c : cod2Orf.keySet()) {
        equi.computeIfAbsent(cod2Orf.get(c), x -> new HashSet<>()).add(c);
    }

    // compute equi regions for their length
    HashMap<HashSet<OrfWithCodons>, Integer> equiLengths = new HashMap<HashSet<OrfWithCodons>, Integer>();
    for (HashSet<OrfWithCodons> e : equi.keySet()) {
        LinkedList<ArrayGenomicRegion> equiCodons = null;
        for (OrfWithCodons orf : e) {
            if (equiCodons == null) {
                equiCodons = new LinkedList<ArrayGenomicRegion>();
                for (int i = 0; i < orf.getRegion().getTotalLength(); i += 3)
                    equiCodons.add(orf.getRegion().map(new ArrayGenomicRegion(i, i + 3)));
            } else {
                Iterator<ArrayGenomicRegion> it = equiCodons.iterator();
                while (it.hasNext()) {
                    ArrayGenomicRegion cod = it.next();
                    if (!orf.getRegion().containsUnspliced(cod)
                            || orf.getRegion().induce(cod.getStart()) % 3 != 0)
                        it.remove();
                }
            }
        }
        for (OrfWithCodons orf : orfs) {
            if (!e.contains(orf)) {

                Iterator<ArrayGenomicRegion> it = equiCodons.iterator();
                while (it.hasNext()) {
                    ArrayGenomicRegion cod = it.next();
                    if (orf.getRegion().containsUnspliced(cod)
                            && orf.getRegion().induce(cod.getStart()) % 3 == 0)
                        it.remove();
                }
            }

        }
        equiLengths.put(e, equiCodons.size());
    }

    HashMap<OrfWithCodons, double[]> total = estimateByCoverage(equi, equiLengths, c -> c.getTotalActivity());
    double sum = EI.wrap(total.values()).mapToDouble(a -> a[0]).sum();
    for (OrfWithCodons orf : total.keySet())
        orf.setEstimatedTotalActivity(total.get(orf)[0], total.get(orf)[0] / sum);

    for (int i = 0; i < numCond; i++) {
        int ei = i;
        total = estimateByCoverage(equi, equiLengths, c -> c.getActivity()[ei]);
        sum = EI.wrap(total.values()).mapToDouble(a -> a[0]).sum();
        for (OrfWithCodons orf : total.keySet())
            orf.setEstimatedTotalActivity(i, total.get(orf)[0], total.get(orf)[0] / sum);
    }

}

From source file:gedi.riboseq.inference.codon.ReadsXCodonMatrix.java

public double regularize3(Codon codon) {

    double deltaLL = 0;
    HashMap<Read, double[]> rs = M.get(codon);
    HashMap<Codon, MutableDouble> ntotal = new HashMap<Codon, MutableDouble>();

    for (Read r : rs.keySet()) {
        double[] d = rs.get(r);
        if (d[1] == 0)
            continue;

        // try to redistribute d[1] to other codons
        HashMap<Codon, double[]> cs = I.get(r);
        double s = 0;
        for (Codon c : cs.keySet()) {
            double[] d2 = cs.get(c);
            if (d2 != d)
                s += d2[1];//from w  w  w.  j  a v a 2s  . co  m
        }
        if (s == 0)
            return Double.NEGATIVE_INFINITY; // cannot distribute read to another codon!

        double beforesum = 0;
        for (Codon c : cs.keySet()) {
            double[] d2 = cs.get(c);
            beforesum += c.totalActivity * d2[0];
            if (d2 != d) {
                ntotal.computeIfAbsent(c, x -> new MutableDouble(c.totalActivity)).N += d[1] * d2[1] / s;
                d2[1] += d[1] * d2[1] / s;
            }
        } //JN555585:112387-112922
        deltaLL += r.totalCount * (-Math.log(beforesum));
    }
    for (Read r : rs.keySet()) {
        double[] d = rs.get(r);
        if (d[1] == 0)
            continue;

        HashMap<Codon, double[]> cs = I.get(r);
        double aftersum = 0;
        for (Codon c : cs.keySet()) {
            double[] d2 = cs.get(c);
            if (d2 != d) {
                aftersum += ntotal.get(c).N * d2[0];
            }
        }
        deltaLL += r.totalCount * (Math.log(aftersum));

        d[1] = 0;
    }

    //      double deltaparam = -total; 
    //      return 2*deltaparam-2*deltaLL; // == AIC_after - AIC_before, i.e. regularization is successful if this is negative
    return deltaLL;//codon.totalActivity;
}

From source file:gedi.util.ArrayUtils.java

public static <A, K, V, C extends Collection<? super V>> HashMap<K, C> indexMulti(Iterator<A> it,
        Function<? super A, ? extends K> toKey, Function<? super A, ? extends V> toVal,
        Function<? super K, ? extends C> multi) {
    HashMap<K, C> re = new HashMap<K, C>();
    while (it.hasNext()) {
        A n = it.next();/*from  w  w w.j a v a  2  s . c  o  m*/
        K key = toKey.apply(n);
        V val = toVal.apply(n);
        re.computeIfAbsent(key, multi).add(val);
    }
    return re;
}

From source file:org.apache.lens.cube.parse.PruneCauses.java

private HashMap<String, List<CandidateTablePruneCause>> computeCompact() {
    HashMap<String, List<CandidateTablePruneCause>> detailedMessage = Maps.newHashMap();
    for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
        String key = StringUtils.join(entry.getValue(), ",");
        detailedMessage.computeIfAbsent(key, k -> new ArrayList<>()).add(entry.getKey());
    }/*w ww .j  a v a 2 s. c om*/
    return detailedMessage;
}