List of usage examples for com.google.common.collect Multimap get
Collection<V> get(@Nullable K key);
From source file:org.apache.cassandra.service.ActiveRepairService.java
/** * Return all of the neighbors with whom we share the provided range. * * @param keyspaceName keyspace to repair * @param toRepair token to repair// w ww . j a v a2 s. com * @param dataCenters the data centers to involve in the repair * * @return neighbors with whom we share the provided range */ public static Set<InetAddress> getNeighbors(String keyspaceName, Range<Token> toRepair, Collection<String> dataCenters, Collection<String> hosts) { StorageService ss = StorageService.instance; Map<Range<Token>, List<InetAddress>> replicaSets = ss.getRangeToAddressMap(keyspaceName); Range<Token> rangeSuperSet = null; for (Range<Token> range : ss.getLocalRanges(keyspaceName)) { if (range.contains(toRepair)) { rangeSuperSet = range; break; } else if (range.intersects(toRepair)) { throw new IllegalArgumentException( "Requested range intersects a local range but is not fully contained in one; this would lead to imprecise repair"); } } if (rangeSuperSet == null || !replicaSets.containsKey(rangeSuperSet)) return Collections.emptySet(); Set<InetAddress> neighbors = new HashSet<>(replicaSets.get(rangeSuperSet)); neighbors.remove(FBUtilities.getBroadcastAddress()); if (dataCenters != null && !dataCenters.isEmpty()) { TokenMetadata.Topology topology = ss.getTokenMetadata().cloneOnlyTokenMap().getTopology(); Set<InetAddress> dcEndpoints = Sets.newHashSet(); Multimap<String, InetAddress> dcEndpointsMap = topology.getDatacenterEndpoints(); for (String dc : dataCenters) { Collection<InetAddress> c = dcEndpointsMap.get(dc); if (c != null) dcEndpoints.addAll(c); } return Sets.intersection(neighbors, dcEndpoints); } else if (hosts != null && !hosts.isEmpty()) { Set<InetAddress> specifiedHost = new HashSet<>(); for (final String host : hosts) { try { final InetAddress endpoint = InetAddress.getByName(host.trim()); if (endpoint.equals(FBUtilities.getBroadcastAddress()) || neighbors.contains(endpoint)) specifiedHost.add(endpoint); } catch (UnknownHostException e) { throw new IllegalArgumentException("Unknown host specified " + host, e); } } if (!specifiedHost.contains(FBUtilities.getBroadcastAddress())) throw new IllegalArgumentException("The current host must be part of the repair"); if (specifiedHost.size() <= 1) { String msg = "Repair requires at least two endpoints that are neighbours before it can continue, the endpoint used for this repair is %s, " + "other available neighbours are %s but these neighbours were not part of the supplied list of hosts to use during the repair (%s)."; throw new IllegalArgumentException(String.format(msg, specifiedHost, neighbors, hosts)); } specifiedHost.remove(FBUtilities.getBroadcastAddress()); return specifiedHost; } return neighbors; }
From source file:com.google.acai.Dependencies.java
/** * Returns the set of services which {@code testingService} depends upon. *///w w w . ja va 2 s . c om private static ImmutableSet<TestingService> getDependencies(TestingService testingService, Multimap<Class<? extends TestingService>, TestingService> servicesByClass) { if (!testingService.getClass().isAnnotationPresent(DependsOn.class)) { return ImmutableSet.of(); } ImmutableSet.Builder<TestingService> dependencies = ImmutableSet.builder(); DependsOn dependsOn = testingService.getClass().getAnnotation(DependsOn.class); for (Class<? extends TestingService> serviceClass : dependsOn.value()) { dependencies.addAll(servicesByClass.get(serviceClass)); } return dependencies.build(); }
From source file:org.icgc.dcc.release.job.summarize.function.CreateGeneSummary.java
/** * @return <pre>/*w ww . j a v a 2 s . c om*/ * { * "_project_id": "P1", * "_summary": { * "_affected_donor_count": 4, * "_available_data_type": [ * "ssm", * "sgv" * ] * } * } * </pre> */ private static ObjectNode createProjectEntry(String projectId, Map<String, Integer> projectToDonors, Multimap<String, FeatureType> projectAvailableTypes) { val projectEntry = createObject(); projectEntry.put(GENE_PROJECT_PROJECT_ID, projectId); val summaryEntry = projectEntry.with(GENE_PROJECT_SUMMARY); val donorCount = projectToDonors.get(projectId); summaryEntry.put(AFFECTED_DONOR_COUNT, donorCount); val availableTypes = summaryEntry.withArray(AVAILABLE_DATA_TYPES); for (val type : newTreeSet(projectAvailableTypes.get(projectId))) { availableTypes.add(type.getId()); } return projectEntry; }
From source file:es.usc.citius.composit.cli.command.GraphCommand.java
public static Multimap<Operation<Concept>, Operation<Concept>> buildOperationGraph( ServiceProvider<Concept> provider, SetMatchFunction<Concept, ?> matcher) { Multimap<Operation<Concept>, Operation<Concept>> mmap = HashMultimap.create(); // Find match between all services for (Operation<Concept> source : provider.getOperations()) { for (Operation<Concept> target : provider.getOperations()) { if (source.getID().equals(target.getID())) continue; // Output -> Input match? if (!matcher.partialMatch(source.getSignature().getOutputs(), target.getSignature().getInputs()) .getMatchTable().isEmpty()) { mmap.get(source).add(target); }/*from w w w.j a va2s . com*/ } } return mmap; }
From source file:co.cask.cdap.internal.app.runtime.plugin.PluginClassLoaders.java
/** * Returns a {@link ClassLoader} that only allows loading of plugin classes and plugin exported classes. * It should only be used in context when a single ClassLoader is needed to load all different kinds of user classes * (e.g. in MapReduce/Spark).//from w w w.jav a 2s. c om */ public static ClassLoader createFilteredPluginsClassLoader(Map<String, Plugin> plugins, @Nullable PluginInstantiator pluginInstantiator) { if (plugins.isEmpty() || pluginInstantiator == null) { return new CombineClassLoader(null, ImmutableList.<ClassLoader>of()); } try { // Gather all explicitly used plugin class names. It is needed for external plugin case. Multimap<Plugin, String> artifactPluginClasses = getArtifactPluginClasses(plugins); List<ClassLoader> pluginClassLoaders = new ArrayList<>(); for (Plugin plugin : plugins.values()) { ClassLoader pluginClassLoader = pluginInstantiator.getArtifactClassLoader(plugin.getArtifactId()); if (pluginClassLoader instanceof PluginClassLoader) { // A ClassLoader to allow loading of all plugin classes used by the program. Collection<String> pluginClasses = artifactPluginClasses.get(plugin); if (!pluginClasses.isEmpty()) { pluginClassLoaders.add(createClassFilteredClassLoader(pluginClasses, pluginClassLoader)); } // A ClassLoader to allow all export package classes to be loadable. pluginClassLoaders.add(((PluginClassLoader) pluginClassLoader).getExportPackagesClassLoader()); } } return new CombineClassLoader(null, pluginClassLoaders); } catch (IOException e) { throw Throwables.propagate(e); } }
From source file:org.apache.hadoop.hive.ql.exec.FunctionTask.java
public static void addFunctionResources(FunctionResource[] resources) throws HiveException { if (resources != null) { Multimap<SessionState.ResourceType, String> mappings = HashMultimap.create(); for (FunctionResource res : resources) { mappings.put(res.getResourceType(), res.getResourceURI()); }/*from ww w.j ava 2 s.c o m*/ for (SessionState.ResourceType type : mappings.keys()) { SessionState.get().add_resources(type, mappings.get(type)); } } }
From source file:org.sonar.java.checks.verifier.CheckVerifier.java
private static void validateIssue(Multimap<Integer, Map<IssueAttribute, String>> expected, List<Integer> unexpectedLines, AnalyzerMessage issue, @Nullable RemediationFunction remediationFunction) { int line = issue.getLine(); if (expected.containsKey(line)) { Map<IssueAttribute, String> attrs = Iterables.getLast(expected.get(line)); assertEquals(issue.getMessage(), attrs, IssueAttribute.MESSAGE); Double cost = issue.getCost(); if (cost != null) { Preconditions.checkState(remediationFunction != RemediationFunction.CONST, "Rule with constant remediation function shall not provide cost"); assertEquals(Integer.toString(cost.intValue()), attrs, IssueAttribute.EFFORT_TO_FIX); } else if (remediationFunction == RemediationFunction.LINEAR) { Fail.fail("A cost should be provided for a rule with linear remediation function"); }//from ww w . j ava2 s .c om validateAnalyzerMessage(attrs, issue); expected.remove(line, attrs); } else { unexpectedLines.add(line); } }
From source file:com.android.tools.lint.checks.ResourceCycleDetector.java
@Nullable private static List<String> dfs(@NonNull Multimap<String, String> map, @NonNull String from, @NonNull Set<String> visiting) { visiting.add(from);// w ww.ja va 2 s. co m Collection<String> targets = map.get(from); if (targets != null && !targets.isEmpty()) { for (String target : targets) { if (visiting.contains(target)) { List<String> chain = Lists.newArrayList(); chain.add(target); chain.add(from); return chain; } List<String> chain = dfs(map, target, visiting); if (chain != null) { chain.add(from); return chain; } } } visiting.remove(from); return null; }
From source file:org.lealone.cluster.dht.RangeStreamer.java
/** * @param rangesWithSources The ranges we want to fetch (key) and their potential sources (value) * @param sourceFilters A (possibly empty) collection of source filters to apply. * In addition to any filters given here, we always exclude ourselves. * @param dbName database name/* w w w . j a va 2 s . co m*/ * @return Map of source endpoint to collection of ranges */ private static Multimap<InetAddress, Range<Token>> getRangeFetchMap( Multimap<Range<Token>, InetAddress> rangesWithSources, Collection<ISourceFilter> sourceFilters, String dbName) { Multimap<InetAddress, Range<Token>> rangeFetchMapMap = HashMultimap.create(); for (Range<Token> range : rangesWithSources.keySet()) { boolean foundSource = false; outer: for (InetAddress address : rangesWithSources.get(range)) { if (address.equals(Utils.getBroadcastAddress())) { // If localhost is a source, we have found one, but we don't add it to the map to avoid streaming // locally foundSource = true; continue; } for (ISourceFilter filter : sourceFilters) { if (!filter.shouldInclude(address)) continue outer; } rangeFetchMapMap.put(address, range); foundSource = true; break; // ensure we only stream from one other node for each range } if (!foundSource) throw new IllegalStateException("unable to find sufficient sources for streaming range " + range + " in database " + dbName); } return rangeFetchMapMap; }
From source file:org.apache.cassandra.dht.RangeStreamer.java
/** * @param rangesWithSources The ranges we want to fetch (key) and their potential sources (value) * @param sourceFilters A (possibly empty) collection of source filters to apply. In addition to any filters given * here, we always exclude ourselves. * @param keyspace keyspace name/* www .jav a 2 s.c o m*/ * @return Map of source endpoint to collection of ranges */ private static Multimap<InetAddress, Range<Token>> getRangeFetchMap( Multimap<Range<Token>, InetAddress> rangesWithSources, Collection<ISourceFilter> sourceFilters, String keyspace) { Multimap<InetAddress, Range<Token>> rangeFetchMapMap = HashMultimap.create(); for (Range<Token> range : rangesWithSources.keySet()) { boolean foundSource = false; outer: for (InetAddress address : rangesWithSources.get(range)) { if (address.equals(FBUtilities.getBroadcastAddress())) { // If localhost is a source, we have found one, but we don't add it to the map to avoid streaming locally foundSource = true; continue; } for (ISourceFilter filter : sourceFilters) { if (!filter.shouldInclude(address)) continue outer; } rangeFetchMapMap.put(address, range); foundSource = true; break; // ensure we only stream from one other node for each range } if (!foundSource) throw new IllegalStateException("unable to find sufficient sources for streaming range " + range + " in keyspace " + keyspace); } return rangeFetchMapMap; }