List of usage examples for com.google.common.collect Maps uniqueIndex
public static <K, V> ImmutableMap<K, V> uniqueIndex(Iterator<V> values, Function<? super V, K> keyFunction)
From source file:org.opendaylight.netconf.impl.osgi.NetconfCapabilityMonitoringService.java
private void onCapabilitiesAdded(final Set<Capability> addedCaps) { this.capabilities.putAll(Maps.uniqueIndex(setupCapabilities(addedCaps), CAPABILITY_TO_URI)); }
From source file:com.facebook.buck.maven.Pom.java
private void updateModel(Artifact mavenCoordinates, Iterable<Artifact> deps) { model.setGroupId(mavenCoordinates.getGroupId()); model.setArtifactId(mavenCoordinates.getArtifactId()); model.setVersion(mavenCoordinates.getVersion()); if (Strings.isNullOrEmpty(model.getName())) { model.setName(mavenCoordinates.getArtifactId()); // better than nothing }/*from w ww .ja v a 2 s .com*/ // Dependencies ImmutableMap<DepKey, Dependency> depIndex = Maps.uniqueIndex(getModel().getDependencies(), DepKey::new); for (Artifact artifactDep : deps) { DepKey key = new DepKey(artifactDep); Dependency dependency = depIndex.get(key); if (dependency == null) { dependency = key.createDependency(); getModel().addDependency(dependency); } updateDependency(dependency, artifactDep); } }
From source file:org.jclouds.compute.config.BaseComputeServiceContextModule.java
@Provides @Singleton//from w w w. ja v a 2 s . co m protected Supplier<Map<String, ? extends Hardware>> provideSizeMap( @Memoized Supplier<Set<? extends Hardware>> sizes) { return Suppliers.compose(new Function<Set<? extends Hardware>, Map<String, ? extends Hardware>>() { @Override public Map<String, ? extends Hardware> apply(Set<? extends Hardware> from) { return Maps.uniqueIndex(from, new Function<Hardware, String>() { @Override public String apply(Hardware from) { return from.getId(); } }); } }, sizes); }
From source file:com.twitter.aurora.scheduler.http.SchedulerzJob.java
private static Map<String, SchedulingDetails> buildSchedulingTable(Iterable<IAssignedTask> tasks) { Map<Integer, ITaskConfig> byInstance = Maps .transformValues(Maps.uniqueIndex(tasks, Tasks.ASSIGNED_TO_INSTANCE_ID), Tasks.ASSIGNED_TO_INFO); Map<Integer, SchedulingDetails> detailsByInstance = Maps.transformValues(byInstance, CONFIG_TO_DETAILS); Multimap<SchedulingDetails, Integer> instancesByDetails = Multimaps .invertFrom(Multimaps.forMap(detailsByInstance), HashMultimap.<SchedulingDetails, Integer>create()); Map<SchedulingDetails, String> instanceStringsByDetails = Maps.transformValues(instancesByDetails.asMap(), TransformationUtils.INSTANCES_TOSTRING); return HashBiMap.create(instanceStringsByDetails).inverse(); }
From source file:controllers.SearchController.java
protected Result renderSearch(String q, String rangeType, int relative, String from, String to, String keyword, String interval, int page, String savedSearchId, String fields, int displayWidth, SearchSort sort, Stream stream, String filter) { UniversalSearch search;//from w ww .ja v a 2 s . com try { search = getSearch(q, filter, rangeType, relative, from, to, keyword, page, sort); } catch (InvalidRangeParametersException e2) { return status(400, views.html.errors.error.render("Invalid range parameters provided.", e2, request())); } catch (IllegalArgumentException e1) { return status(400, views.html.errors.error.render("Invalid range type provided.", e1, request())); } SearchResult searchResult; DateHistogramResult histogramResult; SavedSearch savedSearch = null; Set<String> selectedFields = getSelectedFields(fields); String formattedHistogramResults; Set<StreamDescription> streams; Set<InputDescription> inputs = Sets.newHashSet(); Map<String, NodeDescription> nodes = Maps.newHashMap(); nodes.putAll(Maps.transformEntries(serverNodes.asMap(), new Maps.EntryTransformer<String, Node, NodeDescription>() { @Override public NodeDescription transformEntry(@Nullable String key, @Nullable Node value) { return new NodeDescription(value); } })); try { if (savedSearchId != null && !savedSearchId.isEmpty()) { savedSearch = savedSearchService.get(savedSearchId); } searchResult = search.search(); // TODO create a bulk call to get stream and input details (and restrict the fields that come back) final Set<String> streamIds = Sets.newHashSet(); final HashMultimap<String, String> usedInputIds = HashMultimap.create(); final HashMultimap<String, String> usedRadioIds = HashMultimap.create(); for (MessageResult messageResult : searchResult.getMessages()) { streamIds.addAll(messageResult.getStreamIds()); usedInputIds.put(messageResult.getSourceNodeId(), messageResult.getSourceInputId()); if (messageResult.getSourceRadioId() != null) { usedRadioIds.put(messageResult.getSourceRadioId(), messageResult.getSourceRadioInputId()); } } // resolve all stream information in the result set final HashSet<Stream> allStreams = Sets.newHashSet(streamService.all().iterator()); streams = Sets.newHashSet(Collections2.transform(Sets.filter(allStreams, new Predicate<Stream>() { @Override public boolean apply(Stream input) { return streamIds.contains(input.getId()); } }), new Function<Stream, StreamDescription>() { @Nullable @Override public StreamDescription apply(@Nullable Stream stream) { return StreamDescription.of(stream); } })); // resolve all used inputs and nodes from the result set final Map<String, Node> nodeMap = serverNodes.asMap(); for (final String nodeId : usedInputIds.keySet()) { final Node node = nodeMap.get(nodeId); if (node != null) { final HashSet<Input> allInputs = Sets.newHashSet(node.getInputs()); inputs = Sets.newHashSet(Collections2.transform(Sets.filter(allInputs, new Predicate<Input>() { @Override public boolean apply(Input input) { final Set<String> inputIds = usedInputIds.get(nodeId); return inputIds != null && inputIds.contains(input.getId()); } }), new Function<Input, InputDescription>() { @Nullable @Override public InputDescription apply(Input input) { return new InputDescription(input); } })); } } // resolve radio inputs for (final String radioId : usedRadioIds.keySet()) { try { final Radio radio = nodeService.loadRadio(radioId); nodes.put(radio.getId(), new NodeDescription(radio)); final HashSet<Input> allRadioInputs = Sets.newHashSet(radio.getInputs()); inputs.addAll(Collections2.transform(Sets.filter(allRadioInputs, new Predicate<Input>() { @Override public boolean apply(Input input) { return usedRadioIds.get(radioId).contains(input.getId()); } }), new Function<Input, InputDescription>() { @Override public InputDescription apply(Input input) { return new InputDescription(input); } })); } catch (NodeService.NodeNotFoundException e) { Logger.warn("Could not load radio node " + radioId, e); } } searchResult.setAllFields(getAllFields()); // histogram resolution (strangely aka interval) if (interval == null || interval.isEmpty() || !SearchTools.isAllowedDateHistogramInterval(interval)) { interval = determineHistogramResolution(searchResult); } histogramResult = search.dateHistogram(interval); formattedHistogramResults = formatHistogramResults(histogramResult.getResults(), displayWidth); } catch (IOException e) { return status(504, views.html.errors.error.render(ApiClient.ERROR_MSG_IO, e, request())); } catch (APIException e) { if (e.getHttpCode() == 400) { try { QueryParseError qpe = objectMapper.readValue(e.getResponseBody(), QueryParseError.class); return ok(views.html.search.queryerror.render(currentUser(), q, qpe, savedSearch, fields, stream)); } catch (IOException ioe) { // Ignore } } String message = "There was a problem with your search. We expected HTTP 200, but got a HTTP " + e.getHttpCode() + "."; return status(504, views.html.errors.error.render(message, e, request())); } return ok(views.html.search.index.render(currentUser(), q, search, page, savedSearch, selectedFields, searchResult, histogramResult, formattedHistogramResults, nodes, Maps.uniqueIndex(streams, new Function<StreamDescription, String>() { @Nullable @Override public String apply(@Nullable StreamDescription stream) { return stream == null ? null : stream.getId(); } }), Maps.uniqueIndex(inputs, new Function<InputDescription, String>() { @Nullable @Override public String apply(@Nullable InputDescription input) { return input == null ? null : input.getId(); } }), stream)); }
From source file:com.facebook.buck.features.python.PythonUtil.java
static PythonPackageComponents getAllComponents(CellPathResolver cellPathResolver, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, SourcePathRuleFinder ruleFinder, Iterable<BuildRule> deps, PythonPackageComponents packageComponents, PythonPlatform pythonPlatform, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, NativeLinkStrategy nativeLinkStrategy, ImmutableSet<BuildTarget> preloadDeps) { PythonPackageComponents.Builder allComponents = new PythonPackageComponents.Builder(buildTarget); Map<BuildTarget, CxxPythonExtension> extensions = new LinkedHashMap<>(); Map<BuildTarget, NativeLinkable> nativeLinkableRoots = new LinkedHashMap<>(); OmnibusRoots.Builder omnibusRoots = OmnibusRoots.builder(cxxPlatform, preloadDeps, graphBuilder); // Add the top-level components. allComponents.addComponent(packageComponents, buildTarget); // Walk all our transitive deps to build our complete package that we'll // turn into an executable. new AbstractBreadthFirstTraversal<BuildRule>( Iterables.concat(deps, graphBuilder.getAllRules(preloadDeps))) { private final ImmutableList<BuildRule> empty = ImmutableList.of(); @Override//from w w w . j a v a 2 s .co m public Iterable<BuildRule> visit(BuildRule rule) { Iterable<BuildRule> deps = empty; if (rule instanceof CxxPythonExtension) { CxxPythonExtension extension = (CxxPythonExtension) rule; NativeLinkTarget target = ((CxxPythonExtension) rule).getNativeLinkTarget(pythonPlatform); extensions.put(target.getBuildTarget(), extension); omnibusRoots.addIncludedRoot(target); List<BuildRule> cxxpydeps = new ArrayList<>(); for (BuildRule dep : extension.getPythonPackageDeps(pythonPlatform, cxxPlatform, graphBuilder)) { if (dep instanceof PythonPackagable) { cxxpydeps.add(dep); } } deps = cxxpydeps; } else if (rule instanceof PythonPackagable) { PythonPackagable packagable = (PythonPackagable) rule; PythonPackageComponents comps = packagable.getPythonPackageComponents(pythonPlatform, cxxPlatform, graphBuilder); allComponents.addComponent(comps, rule.getBuildTarget()); if (packagable.doesPythonPackageDisallowOmnibus() || comps.hasNativeCode(cxxPlatform)) { for (BuildRule dep : packagable.getPythonPackageDeps(pythonPlatform, cxxPlatform, graphBuilder)) { if (dep instanceof NativeLinkable) { NativeLinkable linkable = (NativeLinkable) dep; nativeLinkableRoots.put(linkable.getBuildTarget(), linkable); omnibusRoots.addExcludedRoot(linkable); } } } deps = packagable.getPythonPackageDeps(pythonPlatform, cxxPlatform, graphBuilder); } else if (rule instanceof NativeLinkable) { NativeLinkable linkable = (NativeLinkable) rule; nativeLinkableRoots.put(linkable.getBuildTarget(), linkable); omnibusRoots.addPotentialRoot(linkable); } return deps; } }.start(); // For the merged strategy, build up the lists of included native linkable roots, and the // excluded native linkable roots. if (nativeLinkStrategy == NativeLinkStrategy.MERGED) { OmnibusRoots roots = omnibusRoots.build(); OmnibusLibraries libraries = Omnibus.getSharedLibraries(buildTarget, projectFilesystem, params, cellPathResolver, graphBuilder, ruleFinder, cxxBuckConfig, cxxPlatform, extraLdflags, roots.getIncludedRoots().values(), roots.getExcludedRoots().values()); // Add all the roots from the omnibus link. If it's an extension, add it as a module. // Otherwise, add it as a native library. for (Map.Entry<BuildTarget, OmnibusRoot> root : libraries.getRoots().entrySet()) { CxxPythonExtension extension = extensions.get(root.getKey()); if (extension != null) { allComponents.addModule(extension.getModule(), root.getValue().getPath(), root.getKey()); } else { NativeLinkTarget target = Preconditions.checkNotNull( roots.getIncludedRoots().get(root.getKey()), "%s: linked unexpected omnibus root: %s", buildTarget, root.getKey()); NativeLinkTargetMode mode = target.getNativeLinkTargetMode(cxxPlatform); String soname = Preconditions.checkNotNull(mode.getLibraryName().orElse(null), "%s: omnibus library for %s was built without soname", buildTarget, root.getKey()); allComponents.addNativeLibraries(Paths.get(soname), root.getValue().getPath(), root.getKey()); } } // Add all remaining libraries as native libraries. for (OmnibusLibrary library : libraries.getLibraries()) { allComponents.addNativeLibraries(Paths.get(library.getSoname()), library.getPath(), buildTarget); } } else { // For regular linking, add all extensions via the package components interface. Map<BuildTarget, NativeLinkable> extensionNativeDeps = new LinkedHashMap<>(); for (Map.Entry<BuildTarget, CxxPythonExtension> entry : extensions.entrySet()) { allComponents.addComponent( entry.getValue().getPythonPackageComponents(pythonPlatform, cxxPlatform, graphBuilder), entry.getValue().getBuildTarget()); extensionNativeDeps.putAll(Maps.uniqueIndex(entry.getValue().getNativeLinkTarget(pythonPlatform) .getNativeLinkTargetDeps(cxxPlatform, graphBuilder), NativeLinkable::getBuildTarget)); } // Add all the native libraries. ImmutableMap<BuildTarget, NativeLinkable> nativeLinkables = NativeLinkables .getTransitiveNativeLinkables(cxxPlatform, graphBuilder, Iterables.concat(nativeLinkableRoots.values(), extensionNativeDeps.values())); for (NativeLinkable nativeLinkable : nativeLinkables.values()) { NativeLinkable.Linkage linkage = nativeLinkable.getPreferredLinkage(cxxPlatform, graphBuilder); if (nativeLinkableRoots.containsKey(nativeLinkable.getBuildTarget()) || linkage != NativeLinkable.Linkage.STATIC) { ImmutableMap<String, SourcePath> libs = nativeLinkable.getSharedLibraries(cxxPlatform, graphBuilder); for (Map.Entry<String, SourcePath> ent : libs.entrySet()) { allComponents.addNativeLibraries(Paths.get(ent.getKey()), ent.getValue(), nativeLinkable.getBuildTarget()); } } } } return allComponents.build(); }
From source file:org.eclipse.sw360.datahandler.common.CommonUtils.java
public static Map<String, User> getStringUserMap(UserService.Iface userClient) throws TException { Map<String, User> userMap; userMap = Maps.uniqueIndex(userClient.getAllUsers(), new Function<User, String>() { @Override/*from ww w.j a v a2 s . c o m*/ public String apply(User input) { return input.getEmail(); } }); return userMap; }
From source file:com.twitter.aurora.scheduler.state.CronJobManager.java
/** * Triggers execution of a cron job, depending on the cron collision policy for the job. * * @param config The config of the job to be triggered. *//*from ww w . j a v a 2s. c o m*/ @VisibleForTesting void cronTriggered(SanitizedConfiguration config) { IJobConfiguration job = config.getJobConfig(); LOG.info(String.format("Cron triggered for %s at %s with policy %s", JobKeys.toPath(job), new Date(), job.getCronCollisionPolicy())); cronJobsTriggered.incrementAndGet(); ImmutableMap.Builder<Integer, ITaskConfig> builder = ImmutableMap.builder(); final Query.Builder activeQuery = Query.jobScoped(job.getKey()).active(); Set<IScheduledTask> activeTasks = Storage.Util.consistentFetchTasks(storage, activeQuery); if (activeTasks.isEmpty()) { builder.putAll(config.getTaskConfigs()); } else { // Assign a default collision policy. CronCollisionPolicy collisionPolicy = orDefault(job.getCronCollisionPolicy()); switch (collisionPolicy) { case KILL_EXISTING: try { schedulerCore.killTasks(activeQuery, CRON_USER); // Check immediately if the tasks are gone. This could happen if the existing tasks // were pending. if (!hasTasks(activeQuery)) { builder.putAll(config.getTaskConfigs()); } else { delayedRun(activeQuery, config); } } catch (ScheduleException e) { LOG.log(Level.SEVERE, "Failed to kill job.", e); } break; case CANCEL_NEW: break; case RUN_OVERLAP: Map<Integer, IScheduledTask> byInstance = Maps.uniqueIndex(activeTasks, Tasks.SCHEDULED_TO_INSTANCE_ID); Map<Integer, ScheduleStatus> existingTasks = Maps.transformValues(byInstance, Tasks.GET_STATUS); if (existingTasks.isEmpty()) { builder.putAll(config.getTaskConfigs()); } else if (Iterables.any(existingTasks.values(), Predicates.equalTo(PENDING))) { LOG.info("Job " + JobKeys.toPath(job) + " has pending tasks, suppressing run."); } else { // To safely overlap this run, we need to adjust the instance IDs of the overlapping // run (maintaining the role/job/instance UUID invariant). int instanceOffset = Ordering.natural().max(existingTasks.keySet()) + 1; LOG.info("Adjusting instance IDs of " + JobKeys.toPath(job) + " by " + instanceOffset + " for overlapping cron run."); for (Map.Entry<Integer, ITaskConfig> entry : config.getTaskConfigs().entrySet()) { builder.put(entry.getKey() + instanceOffset, entry.getValue()); } } break; default: LOG.severe("Unrecognized cron collision policy: " + job.getCronCollisionPolicy()); } } Map<Integer, ITaskConfig> newTasks = builder.build(); if (!newTasks.isEmpty()) { stateManager.insertPendingTasks(newTasks); } }
From source file:org.ambraproject.rhino.service.taxonomy.impl.TaxonomyClassificationServiceImpl.java
private void persistCategories(List<WeightedTerm> terms, Article article) { Set<String> termStrings = terms.stream().map(WeightedTerm::getPath).collect(Collectors.toSet()); Collection<Category> existingCategories = hibernateTemplate.execute(session -> { Query query = session.createQuery("FROM Category WHERE path IN (:terms)"); query.setParameterList("terms", termStrings); return (Collection<Category>) query.list(); });/* w w w . ja v a 2 s .co m*/ Map<String, Category> existingCategoryMap = Maps.uniqueIndex(existingCategories, Category::getPath); Collection<ArticleCategoryAssignment> existingAssignments = getAssignmentsForArticle(article); Map<Category, ArticleCategoryAssignment> assignmentMap = Maps.uniqueIndex(existingAssignments, ArticleCategoryAssignment::getCategory); assignmentMap = new HashMap<>(assignmentMap); // Make it mutable. We will remove assignments as they are updated. for (WeightedTerm term : terms) { Category category = existingCategoryMap.get(term.getPath()); if (category == null) { /* * A new category from the taxonomy server, which is not yet persisted in our system. Create it now. * * This risks a race condition if two articles are being populated concurrently and both have the same new * category, which can cause a "MySQLIntegrityConstraintViolationException: Duplicate entry" error. */ category = new Category(); category.setPath(term.getPath()); hibernateTemplate.save(category); } ArticleCategoryAssignment assignment = assignmentMap.remove(category); if (assignment == null) { hibernateTemplate.save(new ArticleCategoryAssignment(category, article, term.getWeight())); } else { assignment.setWeight(term.getWeight()); hibernateTemplate.update(assignment); } } // Each assignment that was not removed from assignmentMap is not among the new terms, so it should be deleted. assignmentMap.values().forEach(hibernateTemplate::delete); }
From source file:org.geogig.osm.cli.commands.OSMHistoryImport.java
private void ensureTypesExist(GeogigCLI cli) throws IOException { Repository repo = cli.getGeogig().getRepository(); WorkingTree workingTree = repo.workingTree(); ImmutableMap<String, NodeRef> featureTypeTrees = Maps.uniqueIndex(workingTree.getFeatureTypeTrees(), (nr) -> nr.path());//from w ww. j a va 2s. c o m if (!featureTypeTrees.containsKey(WAY_TYPE_NAME)) { workingTree.createTypeTree(WAY_TYPE_NAME, wayType()); } if (!featureTypeTrees.containsKey(NODE_TYPE_NAME)) { workingTree.createTypeTree(NODE_TYPE_NAME, nodeType()); } repo.command(AddOp.class).call(); }