List of usage examples for com.google.common.collect Multimap values
Collection<V> values();
From source file:com.android.tools.idea.templates.RepositoryUrlManager.java
/** * Resolves multiple dynamic dependencies on artifacts distributed in the SDK. * * <p>This method doesn't check any remote repositories, just the already downloaded SDK "extras" repositories. *//* www . j ava2 s . co m*/ public List<GradleCoordinate> resolveDynamicSdkDependencies( @NotNull Multimap<String, GradleCoordinate> dependencies, @Nullable String supportLibVersionFilter, @NotNull AndroidSdkData sdk, @NotNull FileOp fileOp) { List<GradleCoordinate> result = Lists.newArrayListWithCapacity(dependencies.size()); String supportFilter = findExistingExplicitVersion(dependencies.values()); if (supportFilter != null) { supportLibVersionFilter = supportFilter; } for (String key : dependencies.keySet()) { GradleCoordinate highest = Collections.max(dependencies.get(key), COMPARE_PLUS_LOWER); if (highest.getGroupId() == null || highest.getArtifactId() == null) { return null; } // For test consistency, don't depend on installed SDK state while testing if (myForceRepositoryChecksInTests || !ApplicationManager.getApplication().isUnitTestMode()) { // If this coordinate points to an artifact in one of our repositories, check to see if there is a static version // that we can add instead of a plus revision. String filter = highest.getRevision(); if (filter.endsWith("+")) { filter = filter.length() > 1 ? filter.substring(0, filter.length() - 1) : null; boolean includePreviews = false; if (filter == null && ImportModule.SUPPORT_GROUP_ID.equals(highest.getGroupId())) { filter = supportLibVersionFilter; includePreviews = true; } String version = getLibraryRevision(highest.getGroupId(), highest.getArtifactId(), filter, includePreviews, sdk.getLocation(), fileOp); if (version == null && filter != null) { // No library found at the support lib version filter level, so look for any match version = getLibraryRevision(highest.getGroupId(), highest.getArtifactId(), null, includePreviews, sdk.getLocation(), fileOp); } if (version == null && !includePreviews) { // Still no library found, check preview versions version = getLibraryRevision(highest.getGroupId(), highest.getArtifactId(), null, true, sdk.getLocation(), fileOp); } if (version != null) { String libraryCoordinate = highest.getId() + ":" + version; GradleCoordinate available = GradleCoordinate.parseCoordinateString(libraryCoordinate); if (available != null) { File archiveFile = getArchiveForCoordinate(available, sdk.getLocation(), fileOp); if (((archiveFile != null && fileOp.exists(archiveFile)) // Not a known library hardcoded in RepositoryUrlManager? || SupportLibrary.forGradleCoordinate(available) == null) && COMPARE_PLUS_LOWER.compare(available, highest) >= 0) { highest = available; } } } } } result.add(highest); } return result; }
From source file:co.mitro.analysis.StatsGenerator.java
/** * Generate statistics and return newly created objects that have not been committed. * @param outDir directory in which to write summary files. Subdirectories outDir/users * and outDir/orgs must exist. Supply null for no output. *///from www .ja v a 2s . c om public static Snapshot generateStatistics(String outDir, Manager manager) throws SQLException, IOException, MitroServletException { final long runTimestampMs = System.currentTimeMillis(); Snapshot output = new Snapshot(); // TODO: don't do this in one gigantic transaction. Multimap<Integer, Link> countToFile = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural()); // get all orgs. Map<Integer, GroupInfo> orgIdToOrg = Maps.newHashMap(); for (DBGroup o : DBGroup.getAllOrganizations(manager)) { GroupInfo newGi = new GroupInfo(); newGi.autoDelete = o.isAutoDelete(); newGi.groupId = o.getId(); newGi.isTopLevelOrg = true; newGi.name = o.getName(); Set<String> users = Sets.newHashSet(); for (DBGroup orgGroup : o.getAllOrgGroups(manager)) { users.add(orgGroup.getName()); } newGi.users = Lists.newArrayList(users); orgIdToOrg.put(newGi.groupId, newGi); } int numPeople = 0; for (DBIdentity id : manager.identityDao.queryForAll()) { ++numPeople; try { logger.info(id.getName() + ": " + id.getGuidCookie()); DBHistoricalUserState userState = getHistoricalUserState(manager, runTimestampMs, orgIdToOrg, id); output.userStateObjects.add(userState); String filename = id.getName() + ".html"; renderIfOutputEnabled(outDir, "/users/" + filename, userStateTemplate, userState); countToFile.put(userState.numSecrets, new Link(id.getName(), filename, userState.numSecrets)); } catch (MitroServletException e) { logger.error("UNKNOWN ERROR", e); } } renderIfOutputEnabled(outDir, "/users/index.html", indexTemplate, countToFile.values()); countToFile.clear(); int numOrgs = 0; // now do the orgs for (DBGroup org : DBGroup.getAllOrganizations(manager)) { ++numOrgs; // hack to make this work Set<Integer> admins = Sets.newHashSet(); org.putDirectUsersIntoSet(admins, DBAcl.adminAccess()); int userId = admins.iterator().next(); DBIdentity dbi = manager.identityDao.queryForId(userId); MitroRequestContext context = new MitroRequestContext(dbi, null, manager, null); GetOrganizationStateResponse resp = GetOrganizationState.doOperation(context, org.getId()); DBHistoricalOrgState orgState = new DBHistoricalOrgState(resp, org.getId(), runTimestampMs); output.orgStateObjects.add(orgState); String filename = org.getId() + ".html"; renderIfOutputEnabled(outDir, "/orgs/" + filename, orgStateTemplate, orgState); countToFile.put(orgState.numMembers + orgState.numAdmins, new Link(org.getName() + org.getId(), org.getId() + ".html", orgState.numAdmins + orgState.numMembers)); } renderIfOutputEnabled(outDir, "/orgs/index.html", indexTemplate, countToFile.values()); renderIfOutputEnabled(outDir, "/index.html", indexTemplate, ImmutableList.of(new Link("organizations", "orgs/index.html", numOrgs), new Link("users", "users/index.html", numPeople))); return output; }
From source file:io.github.jonestimd.swing.table.model.BeanListMultimapTableModel.java
public void setBeans(Multimap<G, T> beans) { groups.clear();//from ww w . j a v a 2s.c o m groups.putAll(beans); sortedGroups.clear(); sortedGroups.addAll(groups.keySet()); sortedGroups.sort(groupOrdering); groupOffsets = new int[sortedGroups.size() + GROWTH_FACTOR]; int groupIndex = 0; for (G group : sortedGroups) { groupOffsets[groupIndex + 1] = groupOffsets[groupIndex] + groups.get(group).size() + 1; groupIndex++; } fireTableDataChanged(); beanTableAdapter.setBeans(beans.values()); }
From source file:com.google.devtools.build.lib.query2.RdepsBoundedVisitor.java
@Override protected Visit getVisitResult(Iterable<DepAndRdepAtDepth> depAndRdepAtDepths) throws InterruptedException { Map<SkyKey, Integer> shallowestRdepDepthMap = new HashMap<>(); depAndRdepAtDepths.forEach(depAndRdepAtDepth -> shallowestRdepDepthMap .merge(depAndRdepAtDepth.depAndRdep.rdep, depAndRdepAtDepth.rdepDepth, Integer::min)); Collection<SkyKey> validRdeps = new ArrayList<>(); // Multimap of dep to all the reverse deps in this visitation. Used to filter out the // disallowed deps. Multimap<SkyKey, SkyKey> reverseDepMultimap = ArrayListMultimap.create(); for (DepAndRdepAtDepth depAndRdepAtDepth : depAndRdepAtDepths) { // The "roots" of our visitation (see #preprocessInitialVisit) have a null 'dep' field. if (depAndRdepAtDepth.depAndRdep.dep == null) { validRdeps.add(depAndRdepAtDepth.depAndRdep.rdep); } else {/*from w w w .ja va2 s. c o m*/ reverseDepMultimap.put(depAndRdepAtDepth.depAndRdep.dep, depAndRdepAtDepth.depAndRdep.rdep); } } Multimap<SkyKey, SkyKey> packageKeyToTargetKeyMap = env .makePackageKeyToTargetKeyMap(Iterables.concat(reverseDepMultimap.values())); Set<PackageIdentifier> pkgIdsNeededForTargetification = packageKeyToTargetKeyMap.keySet().stream() .map(SkyQueryEnvironment.PACKAGE_SKYKEY_TO_PACKAGE_IDENTIFIER).collect(toImmutableSet()); packageSemaphore.acquireAll(pkgIdsNeededForTargetification); try { // Filter out disallowed deps. We cannot defer the targetification any further as we do not // want to retrieve the rdeps of unwanted nodes (targets). if (!reverseDepMultimap.isEmpty()) { Collection<Target> filteredTargets = env.filterRawReverseDepsOfTransitiveTraversalKeys( reverseDepMultimap.asMap(), packageKeyToTargetKeyMap); filteredTargets.stream().map(SkyQueryEnvironment.TARGET_TO_SKY_KEY).forEachOrdered(validRdeps::add); } } finally { packageSemaphore.releaseAll(pkgIdsNeededForTargetification); } ImmutableList<SkyKey> uniqueValidRdeps = validRdeps.stream().filter(validRdep -> validRdepMinDepthUniquifier .uniqueAtDepthLessThanOrEqualTo(validRdep, shallowestRdepDepthMap.get(validRdep))) .collect(ImmutableList.toImmutableList()); // Don't bother getting the rdeps of the rdeps that are already at the depth bound. Iterable<SkyKey> uniqueValidRdepsBelowDepthBound = Iterables.filter(uniqueValidRdeps, uniqueValidRdep -> shallowestRdepDepthMap.get(uniqueValidRdep) < depth); // Retrieve the reverse deps as SkyKeys and defer the targetification and filtering to next // recursive visitation. Map<SkyKey, Iterable<SkyKey>> unfilteredRdepsOfRdeps = env.graph .getReverseDeps(uniqueValidRdepsBelowDepthBound); ImmutableList.Builder<DepAndRdepAtDepth> depAndRdepAtDepthsToVisitBuilder = ImmutableList.builder(); unfilteredRdepsOfRdeps.entrySet().forEach(entry -> { SkyKey rdep = entry.getKey(); int depthOfRdepOfRdep = shallowestRdepDepthMap.get(rdep) + 1; Streams.stream(entry.getValue()).filter(Predicates.and(SkyQueryEnvironment.IS_TTV, universe)) .forEachOrdered(rdepOfRdep -> { depAndRdepAtDepthsToVisitBuilder .add(new DepAndRdepAtDepth(new DepAndRdep(rdep, rdepOfRdep), depthOfRdepOfRdep)); }); }); return new Visit(/*keysToUseForResult=*/ uniqueValidRdeps, /*keysToVisit=*/ depAndRdepAtDepthsToVisitBuilder.build()); }
From source file:org.killbill.billing.plugin.analytics.dao.factory.BusinessInvoiceFactory.java
private BusinessInvoiceItemBaseModelDao createBusinessInvoiceItem( final BusinessContextFactory businessContextFactory, final InvoiceItem invoiceItem, final Multimap<UUID, InvoiceItem> allInvoiceItems, final Map<UUID, Invoice> invoiceIdToInvoiceMappings, final Account account, final Map<UUID, SubscriptionBundle> bundles, final CurrencyConverter currencyConverter, final AuditLog creationAuditLog, final Long accountRecordId, final Long tenantRecordId, final ReportGroup reportGroup) throws AnalyticsRefreshException { final Invoice invoice = invoiceIdToInvoiceMappings.get(invoiceItem.getInvoiceId()); final Collection<InvoiceItem> otherInvoiceItems = Collections2.filter(allInvoiceItems.values(), new Predicate<InvoiceItem>() { @Override/*w ww .j av a 2 s . c o m*/ public boolean apply(final InvoiceItem input) { return input.getId() != null && !input.getId().equals(invoiceItem.getId()); } }); return createBusinessInvoiceItem(businessContextFactory, account, invoice, invoiceItem, otherInvoiceItems, bundles, currencyConverter, creationAuditLog, accountRecordId, tenantRecordId, reportGroup); }
From source file:com.facebook.presto.execution.BenchmarkNodeScheduler.java
@Benchmark @OperationsPerInvocation(SPLITS)/* ww w . j a va2s . co m*/ public Object benchmark(BenchmarkData data) throws Throwable { List<RemoteTask> remoteTasks = ImmutableList.copyOf(data.getTaskMap().values()); Iterator<MockRemoteTaskFactory.MockRemoteTask> finishingTask = Iterators.cycle(data.getTaskMap().values()); Iterator<Split> splits = data.getSplits().iterator(); Set<Split> batch = new HashSet<>(); while (splits.hasNext() || !batch.isEmpty()) { Multimap<Node, Split> assignments = data.getNodeSelector().computeAssignments(batch, remoteTasks) .getAssignments(); for (Node node : assignments.keySet()) { MockRemoteTaskFactory.MockRemoteTask remoteTask = data.getTaskMap().get(node); remoteTask.addSplits(ImmutableMultimap.<PlanNodeId, Split>builder() .putAll(new PlanNodeId("sourceId"), assignments.get(node)).build()); remoteTask.startSplits(MAX_SPLITS_PER_NODE); } if (assignments.size() == batch.size()) { batch.clear(); } else { batch.removeAll(assignments.values()); } while (batch.size() < SPLIT_BATCH_SIZE && splits.hasNext()) { batch.add(splits.next()); } finishingTask.next().finishSplits((int) Math.ceil(MAX_SPLITS_PER_NODE / 50.0)); } return remoteTasks; }
From source file:io.prestosql.execution.BenchmarkNodeScheduler.java
@Benchmark @OperationsPerInvocation(SPLITS)//from www .java 2s .co m public Object benchmark(BenchmarkData data) { List<RemoteTask> remoteTasks = ImmutableList.copyOf(data.getTaskMap().values()); Iterator<MockRemoteTaskFactory.MockRemoteTask> finishingTask = Iterators.cycle(data.getTaskMap().values()); Iterator<Split> splits = data.getSplits().iterator(); Set<Split> batch = new HashSet<>(); while (splits.hasNext() || !batch.isEmpty()) { Multimap<Node, Split> assignments = data.getNodeSelector().computeAssignments(batch, remoteTasks) .getAssignments(); for (Node node : assignments.keySet()) { MockRemoteTaskFactory.MockRemoteTask remoteTask = data.getTaskMap().get(node); remoteTask.addSplits(ImmutableMultimap.<PlanNodeId, Split>builder() .putAll(new PlanNodeId("sourceId"), assignments.get(node)).build()); remoteTask.startSplits(MAX_SPLITS_PER_NODE); } if (assignments.size() == batch.size()) { batch.clear(); } else { batch.removeAll(assignments.values()); } while (batch.size() < SPLIT_BATCH_SIZE && splits.hasNext()) { batch.add(splits.next()); } finishingTask.next().finishSplits((int) Math.ceil(MAX_SPLITS_PER_NODE / 50.0)); } return remoteTasks; }
From source file:org.opennms.features.topology.plugins.topo.linkd.internal.CdpLinkStatusProvider.java
@Override protected List<EdgeAlarmStatusSummary> getEdgeAlarmSummaries(List<Integer> linkIds) { List<CdpTopologyLink> cdpLinks = getCdpLinkDao().findLinksForTopologyByIds(linkIds.toArray(new Integer[0])); Multimap<String, EdgeAlarmStatusSummary> summaryMap = HashMultimap.create(); for (CdpTopologyLink link : cdpLinks) { summaryMap.put(link.getSrcNodeId() + ":" + link.getSrcIfIndex(), new EdgeAlarmStatusSummary(link.getSourceId(), link.getTargetId(), null)); }/*w ww . j a v a2 s . c om*/ List<OnmsAlarm> alarms = getLinkDownAlarms(); for (OnmsAlarm alarm : alarms) { String key = alarm.getNodeId() + ":" + alarm.getIfIndex(); if (summaryMap.containsKey(key)) { Collection<EdgeAlarmStatusSummary> summaries = summaryMap.get(key); for (EdgeAlarmStatusSummary summary : summaries) { summary.setEventUEI(alarm.getUei()); } } } return new ArrayList<>(summaryMap.values()); }
From source file:com.google.devtools.build.lib.query2.output.PreciseAspectResolver.java
@Override public Set<Label> computeBuildFileDependencies(Package pkg, BuildFileDependencyMode mode) throws InterruptedException { Set<Label> result = new LinkedHashSet<>(); result.addAll(mode.getDependencies(pkg)); Set<PackageIdentifier> dependentPackages = new LinkedHashSet<>(); // First compute with packages can possibly affect the aspect attributes of this package: // Iterate over all rules... for (Target target : pkg.getTargets()) { if (!(target instanceof Rule)) { continue; }//from ww w . j a v a 2 s. c om // ...figure out which direct dependencies can possibly have aspects attached to them... Multimap<Attribute, Label> depsWithPossibleAspects = ((Rule) target) .getTransitions(new BinaryPredicate<Rule, Attribute>() { @Override public boolean apply(@Nullable Rule rule, Attribute attribute) { for (Aspect aspectWithParameters : attribute.getAspects(rule)) { if (!aspectWithParameters.getDefinition().getAttributes().isEmpty()) { return true; } } return false; } }); // ...and add the package of the aspect. for (Label depLabel : depsWithPossibleAspects.values()) { dependentPackages.add(depLabel.getPackageIdentifier()); } } // Then add all the subinclude labels of the packages thus found to the result. for (PackageIdentifier packageIdentifier : dependentPackages) { try { result.add(Label.create(packageIdentifier, "BUILD")); Package dependentPackage = packageProvider.getPackage(eventHandler, packageIdentifier); result.addAll(mode.getDependencies(dependentPackage)); } catch (NoSuchPackageException e) { // If the package is not found, just add its BUILD file, which is already done above. // Hopefully this error is not raised when there is a syntax error in a subincluded file // or something. } catch (LabelSyntaxException e) { throw new IllegalStateException(e); } } return result; }
From source file:org.eclipse.xtext.serializer.sequencer.AssignmentFinder.java
protected Set<AbstractElement> findValidAssignmentsForContainmentRef(EObject semanticObj, Multimap<AbstractElement, ISerializationContext> assignments, EObject value) { Multimap<ISerializationContext, AbstractElement> children = ArrayListMultimap.create(); for (Entry<AbstractElement, Collection<ISerializationContext>> e : assignments.asMap().entrySet()) { AbstractElement ele = e.getKey(); if (ele instanceof RuleCall) { EClassifier classifier = ((RuleCall) ele).getRule().getType().getClassifier(); if (!classifier.isInstance(value)) continue; }//from ww w . ja v a2 s . c o m for (ISerializationContext container : e.getValue()) { ISerializationContext child = SerializationContext.forChild(container, ele, value); children.put(child, ele); } } if (children.size() < 2) return Sets.newHashSet(children.values()); Set<ISerializationContext> found = contextFinder.findByContents(value, children.keySet()); Set<AbstractElement> result = Sets.newLinkedHashSet(); for (ISerializationContext ctx : children.keySet()) if (found.contains(ctx)) result.addAll(children.get(ctx)); return result; }