List of usage examples for java.util Set size
int size();
From source file:net.big_oh.common.utils.CollectionsUtil.java
/** * //from www . ja va2 s . c o m * @param <T> * @param originalSet * The set of original objects from which combinations of size k * would be generated. * @param k * The size of combinations that would be generated. * @return Returns the number of possible ways to construct subsets of size * k from the originalSet. * @throws IllegalArgumentException * thrown if k is less than zero of greater than the size of the * original set. */ public static <T> BigInteger countCombinations(Set<T> originalSet, int k) throws IllegalArgumentException { return countCombinations(originalSet.size(), k); }
From source file:de.metas.ui.web.config.SwaggerConfig.java
@SuppressWarnings("unused") private static final Predicate<RequestHandler> basePackages(final Class<?>... classes) { final Set<Predicate<RequestHandler>> predicates = new HashSet<>(classes.length); for (final Class<?> clazz : classes) { final String packageName = clazz.getPackage().getName(); predicates.add(RequestHandlerSelectors.basePackage(packageName)); }//from ww w. j a v a 2s. c om if (predicates.size() == 1) { return predicates.iterator().next(); } return Predicates.or(predicates); }
From source file:org.cloudfoundry.reconfiguration.play.Configurer.java
static void configure(ApplicationConfiguration applicationConfiguration, Cloud cloud, PropertySetter propertySetter) { propertySetter.setCloudProperties(); Set<String> databaseNames = applicationConfiguration.getDatabaseNames(); propertySetter.setDatabaseProperties(databaseNames); if (databaseNames.isEmpty()) { LOGGER.info("No databases found. Skipping auto-reconfiguration."); } else if (databaseNames.size() > 1) { LOGGER.warning(String.format("Multiple (%d) databases found. Skipping auto-reconfiguration.", databaseNames.size()));//from w ww .j av a 2 s .c om } else { processDatabase(applicationConfiguration, cloud, databaseNames.iterator().next()); } }
From source file:it.geosolutions.geostore.services.rest.impl.RESTServiceImpl.java
/** * Given a Group Set returns a List that contains all the group names * //w w w . jav a 2 s . c o m * @param groups * @return */ public static List<String> extratcGroupNames(Set<UserGroup> groups) { List<String> groupNames = new ArrayList<>(groups.size() + 1); for (UserGroup ug : groups) { groupNames.add(ug.getGroupName()); } return groupNames; }
From source file:grails.plugin.searchable.internal.lucene.LuceneUtils.java
/** * Returns an array of {@link Term}s by parsing the given query string. Since Lucene's query parser is used, * special query characters and words (OR / AND) are not included in the returned terms * * @param defaultField The default term field, cannot be null * @param queryString the query string to parse, cannot be null * @param analyzer the Analyzer instance, may be null in which case Lucene's StandardAnalyzer is used * @return the Term array (field + term pairs) * @throws org.apache.lucene.queryParser.ParseException if the the query has invalid syntax *//*w w w. j a v a 2 s. c o m*/ public static Term[] realTermsForQueryString(String defaultField, String queryString, Analyzer analyzer) throws ParseException { Assert.notNull(defaultField, "defaultField cannot be null"); Assert.notNull(queryString, "queryString cannot be null"); if (analyzer == null) { analyzer = new StandardAnalyzer(); } QueryParser queryParser = new QueryParser(defaultField, analyzer); Query query = queryParser.parse(queryString); Set terms = new ListNotSet(); query.extractTerms(terms); Term[] termsArray = new Term[terms.size()]; int i = 0; for (Iterator iter = terms.iterator(); iter.hasNext();) { Term term = (Term) iter.next(); termsArray[i++] = term; } return termsArray; }
From source file:grails.plugin.searchable.internal.lucene.LuceneUtils.java
/** * Returns a list of terms by parsing the given query string - special query characters and words (OR/AND) are * not included in the returned list/* www. jav a 2 s . c o m*/ * * @param queryString the query string to parse * @param analyzer the Analyzer instance, may be null in which case Lucene's StandardAnalyzer is used * @return a list of text terms * @throws org.apache.lucene.queryParser.ParseException if the query is invalid */ public static String[] termsForQueryString(String queryString, Analyzer analyzer) throws ParseException { if (analyzer == null) { analyzer = new StandardAnalyzer(); } final String defaultField = "$termsForQueryString_defaultField$"; QueryParser queryParser = new QueryParser(defaultField, analyzer); Query query = queryParser.parse(queryString); Set terms = new ListNotSet(); query.extractTerms(terms); String[] termsArray = new String[terms.size()]; int i = 0; for (Iterator iter = terms.iterator(); iter.hasNext();) { termsArray[i++] = ((Term) iter.next()).text(); } return termsArray; }
From source file:io.lavagna.service.CardDataRepository.java
private static List<String> toStringList(Set<?> s) { List<String> r = new ArrayList<>(s.size()); for (Object e : s) { r.add(e.toString()); }//from w w w .j a v a 2 s .c o m return r; }
From source file:com.datatorrent.contrib.kafka.KafkaMetadataUtil.java
/** * @param brokerSet//ww w . j a va 2 s.c o m * @param topic * @return TopicMetadata for this specific topic via the brokerList<br> * null if topic is not found */ public static TopicMetadata getTopicMetadata(Set<String> brokerSet, String topic) { SimpleConsumer mdConsumer = null; if (brokerSet == null || brokerSet == null || brokerSet.size() == 0) { return null; } try { for (Iterator<String> iterator = brokerSet.iterator(); iterator.hasNext();) { String broker = iterator.next(); logger.debug("Try to get Metadata for topic {} broker {}", topic, broker); try { mdConsumer = new SimpleConsumer(broker.split(":")[0], Integer.parseInt(broker.split(":")[1]), timeout, bufferSize, mdClientId); List<String> topics = new ArrayList<String>(1); topics.add(topic); kafka.javaapi.TopicMetadataRequest req = new kafka.javaapi.TopicMetadataRequest(topics); TopicMetadataResponse resp = mdConsumer.send(req); List<TopicMetadata> metaData = resp.topicsMetadata(); for (TopicMetadata item : metaData) { // There is at most 1 topic for this method return item; } } catch (NumberFormatException e) { throw new IllegalArgumentException("Wrong format for broker url, should be \"broker1:port1\""); } catch (Exception e) { logger.warn("Broker {} is unavailable or in bad state!", broker); // skip and try next broker } } return null; } finally { if (mdConsumer != null) { mdConsumer.close(); } } }
From source file:com.inclouds.hbase.utils.RegionServerPoker.java
/** * Selects random region/* w w w.j a v a 2 s . co m*/ * @param set * @return */ private static HRegionInfo select(Set<HRegionInfo> set) { Random r = new Random(); int i = r.nextInt(set.size()); return (HRegionInfo) set.toArray()[i]; }
From source file:com.vmware.admiral.compute.container.volume.VolumeUtil.java
/** * Creates additional affinity rules between container descriptions which share * local volumes. Each container group should be deployed on a single host. *///from w w w . j a v a 2 s . c o m public static void applyLocalNamedVolumeConstraints(Collection<ComponentDescription> componentDescriptions) { Map<String, ContainerVolumeDescription> volumes = filterDescriptions(ContainerVolumeDescription.class, componentDescriptions); List<String> localVolumes = volumes.values().stream().filter(v -> DEFAULT_VOLUME_DRIVER.equals(v.driver)) .map(v -> v.name).collect(Collectors.toList()); if (localVolumes.isEmpty()) { return; } Map<String, ContainerDescription> containers = filterDescriptions(ContainerDescription.class, componentDescriptions); // sort containers by local volume: each set is a group of container names // that share a particular local volume List<Set<String>> localVolumeContainers = localVolumes.stream() .map(v -> filterByVolume(v, containers.values())).filter(s -> !s.isEmpty()) .collect(Collectors.toList()); if (localVolumeContainers.isEmpty()) { return; } /** Merge sets of containers sharing local volumes * * C1 C2 C3 C4 C5 C6 * \ /\ / | \ / * L1 L2 L3 L4 * * Input: [C1, C2], [C2, C3], [C4], [C5, C6] * Output: [C1, C2, C3], [C4], [C5, C6] */ localVolumeContainers = mergeSets(localVolumeContainers); Map<String, List<ContainerVolumeDescription>> containerToVolumes = containers.values().stream() .collect(Collectors.toMap(cd -> cd.name, cd -> filterVolumes(cd, volumes.values()))); Map<String, Integer> containerToDriverCount = containerToVolumes.entrySet().stream() .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().stream().map(vd -> vd.driver).collect(Collectors.toSet()).size())); for (Set<String> s : localVolumeContainers) { if (s.size() > 1) { // find the container with highest number of required drivers int max = s.stream().map(cn -> containerToDriverCount.get(cn)) .max((vc1, vc2) -> Integer.compare(vc1, vc2)).get(); Set<String> maxDrivers = s.stream().filter(cn -> containerToDriverCount.get(cn) == max) .collect(Collectors.toSet()); String maxCont = maxDrivers.iterator().next(); s.remove(maxCont); s.stream().forEach(cn -> addAffinity(maxCont, containers.get(cn))); } } }