List of usage examples for com.google.common.collect Multimap asMap
Map<K, Collection<V>> asMap();
From source file:sadl.models.pdrta.StateStatistic.java
/** * Calculates the {@link LikelihoodValue} of the histogram bin distributions for splitting a transition. This done by splitting the set of {@link TimedTail} * s in a {@link PDRTAState}/*from w ww .ja v a 2 s. c o m*/ * * @param s * The {@link PDRTAState} for splitting * @param mHist * The Set of {@link TimedTail}s to be split apart clustered by histogram index * @return The {@link LikelihoodValue} of the histogram bin distributions for splitting a transition */ public static LikelihoodValue getLikelihoodRatioTime(PDRTAState s, Multimap<Integer, TimedTail> mHist, boolean advancedPooling, CalcRatio cr) { final StateStatistic st = s.getStat(); final PDRTA a = s.getPDRTA(); final int minData = PDRTA.getMinData(); if (!st.trainMode) { throw new UnsupportedOperationException(); } // LRT_FIX : Operator for calculation interruption (thesis: AND, impl: OR, own: AND) if (SimplePDRTALearner.bOp[2].eval((st.totalOutCount - mHist.size()) < minData, mHist.size() < minData)) { return new LikelihoodValue(); } final int[] part1TimeCount = Arrays.copyOf(st.timeCount, st.timeCount.length); final int[] part2TimeCount = new int[st.timeCount.length]; for (final Entry<Integer, Collection<TimedTail>> eCol : mHist.asMap().entrySet()) { part1TimeCount[eCol.getKey().intValue()] -= eCol.getValue().size(); part2TimeCount[eCol.getKey().intValue()] += eCol.getValue().size(); } return calcInterimLRT(a, part1TimeCount, part2TimeCount, advancedPooling, cr); }
From source file:com.eucalyptus.blockstorage.SnapshotUpdateEventListener.java
@Override public Boolean call() throws Exception { try {// www . jav a2s . com try { final Multimap<String, Snapshot> snapshots = ArrayListMultimap.create(); for (Snapshot s : Snapshots.list()) { snapshots.put(s.getPartition(), s); } final Map<String, Collection<Snapshot>> snapshotsByPartition = ImmutableMap .copyOf(snapshots.asMap()); final Map<String, Supplier<Map<String, StorageSnapshot>>> scSnapshotsByPartition = Maps .newHashMap(); for (final String partition : snapshotsByPartition.keySet()) { scSnapshotsByPartition.put(partition, getSnapshotsInPartition(partition)); } for (final String partition : snapshotsByPartition.keySet()) { try { final Map<String, StorageSnapshot> storageSnapshots = scSnapshotsByPartition.get(partition) .get(); for (final Snapshot snapshot : snapshotsByPartition.get(partition)) { final StorageSnapshot storageSnapshot = storageSnapshots .remove(snapshot.getDisplayName()); updateSnapshot(snapshot, storageSnapshot); } for (StorageSnapshot unknownSnapshot : storageSnapshots.values()) { LOG.trace("SnapshotStateUpdate: found unknown snapshot: " + unknownSnapshot.getSnapshotId() + " " + unknownSnapshot.getStatus()); } } catch (Exception ex) { LOG.error(ex); Logs.extreme().error(ex, ex); } } } catch (Exception ex) { LOG.error(ex); Logs.extreme().error(ex, ex); } } finally { ready.set(true); } return true; }
From source file:sadl.models.pdrta.StateStatistic.java
/** * Calculates the {@link LikelihoodValue} of the symbol distributions for splitting a transition. This done by splitting the set of {@link TimedTail}s in a * {@link PDRTAState}/*from w ww. ja v a 2 s. c om*/ * * @param s * The {@link PDRTAState} for splitting * @param mSym * The Set of {@link TimedTail}s to be split apart clustered by symbol index * @return The {@link LikelihoodValue} of the symbol distributions for splitting a transition */ public static LikelihoodValue getLikelihoodRatioSym(PDRTAState s, Multimap<Integer, TimedTail> mSym, boolean advancedPooling, CalcRatio cr) { final StateStatistic st = s.getStat(); final PDRTA a = s.getPDRTA(); final int minData = PDRTA.getMinData(); if (!st.trainMode) { throw new UnsupportedOperationException(); } // LRT_FIX : Operator for calculation interruption (thesis: AND, impl: OR, own: AND) if (SimplePDRTALearner.bOp[2].eval((st.totalOutCount - mSym.size()) < minData, mSym.size() < minData)) { return new LikelihoodValue(); } final int[] part1SymCount = Arrays.copyOf(st.symbolCount, st.symbolCount.length); final int[] part2SymCount = new int[st.symbolCount.length]; for (final Entry<Integer, Collection<TimedTail>> eCol : mSym.asMap().entrySet()) { part1SymCount[eCol.getKey().intValue()] -= eCol.getValue().size(); part2SymCount[eCol.getKey().intValue()] += eCol.getValue().size(); } return calcInterimLRT(a, part1SymCount, part2SymCount, advancedPooling, cr); }
From source file:org.opentestsystem.shared.security.domain.SbacUser.java
private Map<String, Collection<GrantedAuthority>> calculateAuthoritiesByTenantId( final Multimap<String, SbacRole> inUserRoles) { Multimap<String, GrantedAuthority> ret = ArrayListMultimap.create(); if (inUserRoles != null) { for (SbacRole role : inUserRoles.values()) { if (role.isApplicableToComponent() && role.getEffectiveTenant() != null && role.getPermissions() != null && role.getPermissions().size() > 0) { ret.putAll(role.getEffectiveTenant().getId(), role.getPermissions()); }/* ww w .j a v a 2 s. c o m*/ } } return ret.asMap(); }
From source file:dmg.util.command.TextHelpPrinter.java
@Override public String getHelp(Object instance) { Class<?> clazz = instance.getClass(); Command command = clazz.getAnnotation(Command.class); StringWriter out = new StringWriter(); PrintWriter writer = new PrintWriter(out); writer.println(heading("NAME")); writer.append(" ").append(literal(command.name())); if (!command.hint().isEmpty()) { writer.append(" -- ").append(command.hint()); }/*from ww w. j a va 2 s . co m*/ writer.println(); writer.println(); writer.println(heading("SYNOPSIS")); writer.append(Strings.wrap(" ", literal(command.name()) + " " + getSignature(clazz), WIDTH)); writer.println(); if (clazz.getAnnotation(Deprecated.class) != null) { writer.append(Strings.wrap(" ", "This command is deprecated and will be removed in a future release.", WIDTH)); writer.println(); } if (!command.description().isEmpty()) { writer.println(heading("DESCRIPTION")); writer.append(Strings.wrap(" ", command.description(), WIDTH)); } writer.println(); List<Field> arguments = AnnotatedCommandUtils.getArguments(clazz); if (!arguments.isEmpty() && any(arguments, shouldBeDocumented)) { writer.println(heading("ARGUMENTS")); for (Field field : arguments) { Argument argument = field.getAnnotation(Argument.class); writer.append(" ").println(getMetaVar(field, argument)); String help = argument.usage(); if (!argument.required()) { help = Joiner.on(' ').join(help, getDefaultDescription(instance, field)); } if (field.getAnnotation(ExpandWith.class) != null) { help = Joiner.on(' ').join(help, "Glob patterns will be expanded."); } if (!help.isEmpty()) { writer.append(Strings.wrap(" ", help, WIDTH)); } } writer.println(); } Multimap<String, Field> options = AnnotatedCommandUtils.getOptionsByCategory(clazz); if (!options.isEmpty()) { writer.println(heading("OPTIONS")); for (Map.Entry<String, Collection<Field>> category : options.asMap().entrySet()) { if (!category.getKey().isEmpty()) { writer.println(); writer.append(" ").println(heading(category.getKey() + ":")); } for (Field field : category.getValue()) { Class<?> type = field.getType(); Option option = field.getAnnotation(Option.class); if (option != null) { writer.append(" ").append(literal(" -" + option.name())); if (!type.isArray()) { if (!Boolean.class.equals(type) && !Boolean.TYPE.equals(type)) { writer.append("=").append(getMetaVar(type, option)); } } else if (option.separator().isEmpty()) { writer.append("=").append(getMetaVar(type.getComponentType(), option)); writer.append(value("...")); } else { String metaVar = getMetaVar(type.getComponentType(), option); writer.append("=").append(metaVar); writer.append("[").append(option.separator()).append(metaVar).append("]"); writer.append(value("...")); } writer.println(); String usage = option.usage(); if (!option.required()) { usage = Joiner.on(' ').join(usage, getDefaultDescription(instance, field)); } if (!usage.isEmpty()) { writer.append(Strings.wrap(" ", usage, WIDTH)); } } CommandLine cmd = field.getAnnotation(CommandLine.class); if (cmd != null && cmd.allowAnyOption()) { writer.append(" ").append(valuespec(cmd.valueSpec())).println(); String usage = cmd.usage(); if (!usage.isEmpty()) { writer.append(Strings.wrap(" ", usage, WIDTH)); } } } } } writer.flush(); return out.toString(); }
From source file:org.dllearner.utilities.sparql.RedundantTypeTriplePatternRemover.java
@Override public void visit(ElementTriplesBlock el) { // get all rdf:type triple patterns Multimap<Node, Triple> subject2TypeTriples = HashMultimap.create(); for (Iterator<Triple> iterator = el.patternElts(); iterator.hasNext();) { Triple t = iterator.next();//from w w w.j a va2 s .com if (t.getPredicate().matches(RDF.type.asNode())) { subject2TypeTriples.put(t.getSubject(), t); } } // check for semantically redundant triple patterns Set<Triple> redundantTriples = new HashSet<>(); for (Entry<Node, Collection<Triple>> entry : subject2TypeTriples.asMap().entrySet()) { Collection<Triple> triples = entry.getValue(); // get all super classes Set<Node> superClasses = new HashSet<>(); for (Triple triple : triples) { Node cls = triple.getObject(); superClasses.addAll(getSuperClasses(cls)); } for (Triple triple : triples) { Node cls = triple.getObject(); if (superClasses.contains(cls)) { redundantTriples.add(triple); } } } // remove redundant triple patterns for (Iterator<Triple> iterator = el.patternElts(); iterator.hasNext();) { Triple t = iterator.next(); if (redundantTriples.contains(t)) { iterator.remove(); } } }
From source file:org.ldp4j.application.kernel.engine.DefaultExistingPublicResource.java
@Override protected void configureValidationConstraints(ValidatorBuilder builder, Individual<?, ?> individual, DataSet metadata) {/*from www. j ava2 s. c o m*/ builder.withPropertyConstraint( ValidationConstraintFactory.mandatoryPropertyValues(individual.property(RDF.TYPE.as(URI.class)))); Multimap<URI, AttachedTemplate> attachmentMap = LinkedHashMultimap.create(); for (AttachedTemplate attachedTemplate : template().attachedTemplates()) { URI propertyId = attachedTemplate.predicate().or(HAS_ATTACHMENT); attachmentMap.put(propertyId, attachedTemplate); } for (Entry<URI, Collection<AttachedTemplate>> entry : attachmentMap.asMap().entrySet()) { URI propertyId = entry.getKey(); Property property = individual.property(propertyId); if (property != null) { configurePropertyValidationConstraints(builder, individual, metadata, property, entry.getValue()); } else { builder.withPropertyConstraint( ValidationConstraintFactory.readOnlyProperty(individual.id(), propertyId)); } } }
From source file:com.palantir.atlasdb.sweep.SweepTaskRunner.java
private Multimap<Cell, Long> getCellTsPairsToSweep(Multimap<Cell, Long> cellTsMappings, PeekingIterator<RowResult<Value>> values, long sweepTimestamp, SweepStrategy sweepStrategy, @Output Set<Cell> sentinelsToAdd) { Multimap<Cell, Long> cellTsMappingsToSweep = HashMultimap.create(); Map<Long, Long> startTsToCommitTs = transactionService.get(cellTsMappings.values()); for (Map.Entry<Cell, Collection<Long>> entry : cellTsMappings.asMap().entrySet()) { Cell cell = entry.getKey();//from w w w . j a v a2 s . c o m Collection<Long> timestamps = entry.getValue(); boolean sweepLastCommitted = isLatestValueEmpty(cell, values); Iterable<? extends Long> timestampsToSweep = getTimestampsToSweep(cell, timestamps, startTsToCommitTs, sentinelsToAdd, sweepTimestamp, sweepLastCommitted, sweepStrategy); cellTsMappingsToSweep.putAll(entry.getKey(), timestampsToSweep); } return cellTsMappingsToSweep; }
From source file:com.hosniah.grid.ArtMiner_bgrt.java
public double computeConditionalTriadicSupport(int cardinalOfSimilarTC) { int incidence = 0; // double support = 0; int tasksGroupSize = this.tasks.size(); // this.extractionContext.columnKeySet(); // Map<String, Map<String, String>> mapcolumn = this.extractionContext.rowMap(); //Map<String, Map<String, String>> mapcolumn = this.extractionContext.columnMap(); Set keySet = this.extractionContext.keySet(); Iterator keyIterator = keySet.iterator(); while (keyIterator.hasNext()) { Integer key = (Integer) keyIterator.next(); Collection<String> values = this.extractionContext.get(key); Multimap<Integer, String> occurence_map = ArrayListMultimap.create(); for (String occurence : values) { String[] file2site = occurence.split("-"); //use sites as keys and files as values occurence_map.put(Integer.parseInt(file2site[1]), file2site[0]); }// w w w. jav a2 s. com /* Context is now formatted as {S1=[F2, F3, F4], S2=[F2, F4], S3=[F3, F4], S4=[F1, F3, F5]} */ for (Collection<String> col : occurence_map.asMap().values()) { // System.out.println(col); Object[] arr1 = col.toArray(); Set<Object> set1 = new TreeSet<Object>(); Set<String> set2 = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER); set1.addAll(Arrays.asList(arr1)); set2.addAll(Arrays.asList(this.filesOFRA)); if (set1.containsAll(set2)) { incidence++; //System.out.println("Increment Support right here"); } } //iterate on Gridsites ,then on collection, if a //System.out.println(values.toString()); } // System.out.println("+++++++++ "+occurence_map.toString()); // support = (incidence/tasksGroupSize)/cardinalOfSimilarTC; double support = incidence / tasksGroupSize; double supp_c = support / cardinalOfSimilarTC; System.out.println("support count..." + incidence); System.out.println("task count..." + tasksGroupSize); System.out.println("Similar TC count..." + cardinalOfSimilarTC); System.out.println(" ----- Support_c = " + supp_c); this.conditionalTriadicSupport = supp_c; return this.conditionalTriadicSupport; }
From source file:com.flexive.sqlParser.Brace.java
/** * Group the conditions in the current brace using the given function. If no regrouping was performed, * {@code this} is returned, otherwise a new Brace instance is created. * * @param fun the function to perform the grouping (conditions with equals values will be grouped) * @return a re-grouped brace, or {@code this} when no actions were performed */// ww w . j ava 2 s . c o m public Brace groupConditions(GroupFunction fun) throws SqlParserException { final Multimap<Object, Condition> groupedConditions = HashMultimap.create(); for (BraceElement be : conditions) { if (be instanceof Condition) { groupedConditions.put(fun.apply((Condition) be), (Condition) be); } } Brace newBrace = null; final Map<Object, Collection<Condition>> grouped = groupedConditions.asMap(); for (Collection<Condition> subConditions : grouped.values()) { if (subConditions.size() > 1 && subConditions.size() < conditions.size()) { // create a subcondition with the grouped conditions, prevent recursion // when all conditions are in the same group final Brace subBrace = new Brace(stmt); subBrace.setType(type); subBrace.addElements(subConditions.toArray(new BraceElement[subConditions.size()])); // add subcondition to newBrace if (newBrace == null) { newBrace = new Brace(stmt); newBrace.setType(type); } newBrace.addElement(subBrace); } } if (newBrace != null) { // reorg, add rest of the statement for (Collection<Condition> subConditions : grouped.values()) { if (subConditions.size() == 1) { // add all conditions not added before newBrace.addElement(subConditions.iterator().next()); } } for (BraceElement be : conditions) { if (!(be instanceof Condition)) { newBrace.addElement(be); } } } return newBrace == null ? this : newBrace; }