List of usage examples for com.google.common.collect Multimap get
Collection<V> get(@Nullable K key);
From source file:org.jclouds.chef.predicates.CookbookVersionPredicates.java
/** * Note that the default recipe of a cookbook is its name. Otherwise, you * prefix the recipe with the name of the cookbook. ex. {@code apache2} will * be the default recipe where {@code apache2::mod_proxy} is a specific one * in the cookbook./*from www . ja v a 2 s . com*/ * * @param recipes * names of the recipes. * @return true if the cookbook version contains a recipe in the list. */ public static Predicate<CookbookVersion> containsRecipes(String... recipes) { checkNotNull(recipes, "recipes must be defined"); final Multimap<String, String> search = LinkedListMultimap.create(); for (String recipe : recipes) { if (recipe.indexOf("::") != -1) { Iterable<String> nameRecipe = Splitter.on("::").split(recipe); search.put(get(nameRecipe, 0), get(nameRecipe, 1) + ".rb"); } else { search.put(recipe, "default.rb"); } } return new Predicate<CookbookVersion>() { @Override public boolean apply(final CookbookVersion cookbookVersion) { return search.containsKey(cookbookVersion.getCookbookName()) && any(search.get(cookbookVersion.getCookbookName()), new Predicate<String>() { @Override public boolean apply(final String recipeName) { return any(cookbookVersion.getRecipes(), new Predicate<Resource>() { @Override public boolean apply(Resource resource) { return resource.getName().equals(recipeName); } }); } }); } @Override public String toString() { return "containsRecipes(" + search + ")"; } }; }
From source file:org.apache.hadoop.hive.ql.optimizer.GlobalLimitOptimizer.java
/** * Check the limit number in all sub queries * * @return if there is one and only one limit for all subqueries, return the limit * if there is no limit, return 0 * otherwise, return null/* w w w . jav a2 s. c o m*/ */ private static LimitOperator checkQbpForGlobalLimit(TableScanOperator ts) { Set<Class<? extends Operator<?>>> searchedClasses = new ImmutableSet.Builder<Class<? extends Operator<?>>>() .add(ReduceSinkOperator.class).add(GroupByOperator.class).add(FilterOperator.class) .add(LimitOperator.class).build(); Multimap<Class<? extends Operator<?>>, Operator<?>> ops = OperatorUtils.classifyOperators(ts, searchedClasses); // To apply this optimization, in the input query: // - There cannot exist any order by/sort by clause, // thus existsOrdering should be false. // - There cannot exist any distribute by clause, thus // existsPartitioning should be false. // - There cannot exist any cluster by clause, thus // existsOrdering AND existsPartitioning should be false. for (Operator<?> op : ops.get(ReduceSinkOperator.class)) { ReduceSinkDesc reduceSinkConf = ((ReduceSinkOperator) op).getConf(); if (reduceSinkConf.isOrdering() || reduceSinkConf.isPartitioning()) { return null; } } // - There cannot exist any (distinct) aggregate. for (Operator<?> op : ops.get(GroupByOperator.class)) { GroupByDesc groupByConf = ((GroupByOperator) op).getConf(); if (groupByConf.isAggregate() || groupByConf.isDistinct()) { return null; } } // - There cannot exist any sampling predicate. for (Operator<?> op : ops.get(FilterOperator.class)) { FilterDesc filterConf = ((FilterOperator) op).getConf(); if (filterConf.getIsSamplingPred()) { return null; } } // If there is one and only one limit starting at op, return the limit // If there is no limit, return 0 // Otherwise, return null Collection<Operator<?>> limitOps = ops.get(LimitOperator.class); if (limitOps.size() == 1) { return (LimitOperator) limitOps.iterator().next(); } else if (limitOps.size() == 0) { return null; } return null; }
From source file:org.apache.streams.datasift.csdl.DatasiftCsdlUtil.java
public static String csdlMultifieldMatch(Multimap<String, String> require1, Multimap<String, String> require2, Multimap<String, String> exclude) throws Exception { StringBuilder csdlBuilder = new StringBuilder(); String require1String; String require2String = null; String excludeString = null;// w ww . j a v a2 s .co m List<String> require1clauses = Lists.newArrayList(); for (String includeField : require1.keySet()) { StringBuilder clauseBuilder = new StringBuilder(); Collection<String> values = require1.get(includeField); String match_clause = null; if (values.size() > 1) match_clause = "contains_any"; else if (values.size() == 1) match_clause = "contains"; if (match_clause != null) { clauseBuilder.append(includeField + " " + match_clause + " \""); Joiner.on(",").skipNulls().appendTo(clauseBuilder, values); clauseBuilder.append("\""); require1clauses.add(clauseBuilder.toString()); } } require1String = "(\n" + Joiner.on("\nOR\n").skipNulls().join(require1clauses) + "\n)\n"; if (require2 != null && require2.keySet().size() > 0) { List<String> require2clauses = Lists.newArrayList(); for (String includeField : require2.keySet()) { StringBuilder clauseBuilder = new StringBuilder(); Collection<String> values = require2.get(includeField); String match_clause = null; if (values.size() > 1) match_clause = "contains_any"; else if (values.size() == 1) match_clause = "contains"; if (match_clause != null) { clauseBuilder.append(includeField + " " + match_clause + " \""); Joiner.on(",").skipNulls().appendTo(clauseBuilder, values); clauseBuilder.append("\""); require2clauses.add(clauseBuilder.toString()); } } require2String = "(\n" + Joiner.on("\nOR\n").skipNulls().join(require2clauses) + "\n)\n"; } if (exclude != null && exclude.keySet().size() > 0) { List<String> excludeclauses = Lists.newArrayList(); for (String includeField : exclude.keySet()) { StringBuilder clauseBuilder = new StringBuilder(); Collection<String> values = exclude.get(includeField); String match_clause = null; if (values.size() > 1) match_clause = "contains_any"; else if (values.size() == 1) match_clause = "contains"; if (match_clause != null) { clauseBuilder.append(includeField + " " + match_clause + " \""); Joiner.on(",").skipNulls().appendTo(clauseBuilder, values); clauseBuilder.append("\""); excludeclauses.add(clauseBuilder.toString()); } } excludeString = "(\n" + Joiner.on("\nOR\n").skipNulls().join(excludeclauses) + "\n)\n"; } Joiner.on("AND\n").skipNulls().appendTo(csdlBuilder, require1String, require2String); csdlBuilder.append("AND NOT\n" + excludeString); log.debug(csdlBuilder.toString()); return csdlBuilder.toString(); }
From source file:org.gradle.model.internal.manage.binding.DefaultStructBindingsStore.java
private static <T> ManagedProperty<T> createManagedProperty(StructBindingExtractionContext<?> extractionContext, String propertyName, ModelSchema<T> propertySchema, Multimap<PropertyAccessorType, StructMethodBinding> accessors) { boolean writable = accessors.containsKey(SETTER); boolean declaredAsUnmanaged = isDeclaredAsHavingUnmanagedType(accessors.get(GET_GETTER)) || isDeclaredAsHavingUnmanagedType(accessors.get(IS_GETTER)); boolean internal = !extractionContext.getPublicSchema().hasProperty(propertyName); validateManagedProperty(extractionContext, propertyName, propertySchema, writable, declaredAsUnmanaged); return new ManagedProperty<T>(propertyName, propertySchema.getType(), writable, declaredAsUnmanaged, internal);//from www. j a v a2 s . c o m }
From source file:com.eucalyptus.cloudwatch.common.internal.domain.metricdata.MetricManager.java
private static void addManyMetrics(Multimap<Class, MetricEntity> metricMap) { for (Class c : metricMap.keySet()) { for (List<MetricEntity> dataBatchPartial : Iterables.partition(metricMap.get(c), METRIC_DATA_NUM_DB_OPERATIONS_PER_TRANSACTION)) { try (final TransactionResource db = Entities.transactionFor(c)) { int numOperations = 0; for (MetricEntity me : dataBatchPartial) { numOperations++;/*from www. j a v a 2 s . c om*/ if (numOperations % METRIC_DATA_NUM_DB_OPERATIONS_UNTIL_SESSION_FLUSH == 0) { Entities.flushSession(c); Entities.clearSession(c); } Entities.persist(me); } db.commit(); } } } }
From source file:com.android.tools.idea.gradle.project.ProjectDiagnostics.java
public static void findAndReportStructureIssues(@NotNull Project project) { Multimap<String, Module> modulesByPath = ArrayListMultimap.create(); ModuleManager moduleManager = ModuleManager.getInstance(project); for (Module module : moduleManager.getModules()) { File moduleFilePath = new File(toSystemDependentName(module.getModuleFilePath())); File moduleDirPath = moduleFilePath.getParentFile(); if (moduleDirPath != null) { modulesByPath.put(moduleDirPath.getPath(), module); }/*from ww w. ja v a 2 s . c o m*/ } Set<String> modulePaths = modulesByPath.keySet(); for (String modulePath : modulePaths) { Collection<Module> modules = modulesByPath.get(modulePath); int moduleCount = modules.size(); if (moduleCount > 1) { ProjectSyncMessages messages = ProjectSyncMessages.getInstance(project); StringBuilder msg = new StringBuilder(); msg.append("The modules "); int i = 0; Set<String> moduleNames = Sets.newHashSet(); for (Module module : modules) { if (i++ != 0) { msg.append(", "); } String name = module.getName(); moduleNames.add(name); msg.append("'").append(name).append("'"); } msg.append(" point to same directory in the file system."); String[] lines = { msg.toString(), "Each module has to have a unique path." }; Message message = new Message(PROJECT_STRUCTURE_ISSUES, Message.Type.ERROR, lines); List<DataNode<ModuleData>> modulesToDisplayInDialog = Lists.newArrayList(); if (ProjectSubset.isSettingEnabled()) { ProjectSubset subset = ProjectSubset.getInstance(project); Collection<DataNode<ModuleData>> cachedModules = subset.getCachedModuleData(); if (cachedModules != null) { for (DataNode<ModuleData> moduleNode : cachedModules) { if (moduleNames.contains(moduleNode.getData().getExternalName())) { modulesToDisplayInDialog.add(moduleNode); } } } } if (modulesToDisplayInDialog.isEmpty()) { messages.add(message); } else { messages.add(message, new AddOrRemoveModulesHyperlink()); } } } }
From source file:com.foundationdb.util.Strings.java
public static <T> String toString(Multimap<T, ?> map) { StringBuilder sb = new StringBuilder(); for (Iterator<T> keysIter = map.keySet().iterator(); keysIter.hasNext();) { T key = keysIter.next();//from w w w. j a v a 2 s. c o m sb.append(key).append(" => "); for (Iterator<?> valsIter = map.get(key).iterator(); valsIter.hasNext();) { sb.append(valsIter.next()); if (valsIter.hasNext()) sb.append(", "); } if (keysIter.hasNext()) sb.append(nl()); } return sb.toString(); }
From source file:ubicrypt.core.FileSynchronizer.java
/** return only files which are in conflict */ static Multimap<UUID, FileProvenience> conflicts(final Multimap<UUID, FileProvenience> all) { return all.asMap().entrySet().stream() .filter(entry -> entry.getValue().stream() .filter(fp -> entry.getValue().stream() .filter(fp2 -> fp.getFile().compare(fp2.getFile()) == VClock.Comparison.conflict) .collect(Collectors.toList()).size() == 0) .collect(Collectors.toList()).size() == 0) .collect(LinkedHashMultimap::create, (multimap, entry) -> multimap.putAll(entry.getKey(), all.get(entry.getKey())), (m1, m2) -> m1.putAll(m2)); }
From source file:eu.interedition.collatex.medite.Matches.java
public static Matches between(VariantGraph.Vertex[][] vertices, SuffixTree<Token> suffixTree, Function<SortedSet<VertexMatch.WithTokenIndex>, Integer> matchEvaluator) { final Multimap<Integer, MatchThreadElement> matchThreads = HashMultimap.create(); for (int rank = 0; rank < vertices.length; rank++) { for (VariantGraph.Vertex vertex : vertices[rank]) { final MatchThreadElement matchThreadElement = new MatchThreadElement(suffixTree).advance(vertex, rank);/*from w w w.j a v a 2 s . c om*/ if (matchThreadElement != null) { matchThreads.put(rank, matchThreadElement); } } for (MatchThreadElement matchThreadElement : matchThreads.get(rank - 1)) { for (VariantGraph.Vertex vertex : vertices[rank]) { final MatchThreadElement advanced = matchThreadElement.advance(vertex, rank); if (advanced != null) { matchThreads.put(rank, advanced); } } } } final Matches matches = new Matches(matchThreads.size()); for (MatchThreadElement matchThreadElement : matchThreads.values()) { final List<SortedSet<VertexMatch.WithTokenIndex>> threadPhrases = Lists.newArrayList(); boolean firstElement = true; for (MatchThreadElement threadElement : matchThreadElement.thread()) { final SuffixTree<Token>.EquivalenceClass equivalenceClass = threadElement.cursor.matchedClass(); for (int mc = 0; mc < equivalenceClass.length; mc++) { final int tokenCandidate = equivalenceClass.members[mc]; if (firstElement) { final SortedSet<VertexMatch.WithTokenIndex> phrase = new TreeSet<VertexMatch.WithTokenIndex>(); phrase.add(new VertexMatch.WithTokenIndex(threadElement.vertex, threadElement.vertexRank, tokenCandidate)); threadPhrases.add(phrase); } else { for (SortedSet<VertexMatch.WithTokenIndex> phrase : threadPhrases) { if ((phrase.last().token + 1) == tokenCandidate) { phrase.add(new VertexMatch.WithTokenIndex(threadElement.vertex, threadElement.vertexRank, tokenCandidate)); } } } } firstElement = false; } matches.addAll(threadPhrases); } Collections.sort(matches, maximalUniqueMatchOrdering(matchEvaluator)); return matches; }
From source file:org.sonar.server.permission.ws.GroupsAction.java
private static WsGroupsResponse buildResponse(List<GroupDto> groups, List<GroupPermissionDto> groupPermissions, Paging paging) {/* w w w . ja va2 s. c om*/ Multimap<Integer, String> permissionsByGroupId = TreeMultimap.create(); groupPermissions.forEach(groupPermission -> permissionsByGroupId.put(groupPermission.getGroupId(), groupPermission.getRole())); WsGroupsResponse.Builder response = WsGroupsResponse.newBuilder(); groups.forEach(group -> { Group.Builder wsGroup = response.addGroupsBuilder().setName(group.getName()); if (group.getId() != 0) { wsGroup.setId(String.valueOf(group.getId())); } setNullable(group.getDescription(), wsGroup::setDescription); wsGroup.addAllPermissions(permissionsByGroupId.get(group.getId())); }); response.getPagingBuilder().setPageIndex(paging.pageIndex()).setPageSize(paging.pageSize()) .setTotal(paging.total()); return response.build(); }