List of usage examples for java.util HashSet size
public int size()
From source file:org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo.java
/** * Returns an array of storages where the replicas are stored *///from ww w . ja v a 2s .c o m protected DatanodeStorageInfo[] getStorages(DatanodeManager datanodeMgr, List<? extends ReplicaBase> replicas, final DatanodeStorage.State state) { int numLocations = replicas.size(); HashSet<DatanodeStorageInfo> set = new HashSet<>(); for (int i = numLocations - 1; i >= 0; i--) { DatanodeStorageInfo desc = datanodeMgr.getStorage(replicas.get(i).getStorageId()); if (desc != null && desc.getState().equals(state)) { set.add(desc); } else { replicas.remove(i); } } DatanodeStorageInfo[] storages = new DatanodeStorageInfo[set.size()]; return set.toArray(storages); }
From source file:org.eurekastreams.server.persistence.DomainGroupMapper.java
/** * Get a String representation the Person.id of all of the Person.ids for coordinators and followers of the input * group.// ww w .j ava 2s. c o m * * @param domainGroup * the DomainGroup to find coordinators and followers for * @return an array of all of the Person.ids for coordinators and followers of the input group */ @SuppressWarnings("unchecked") public Long[] getFollowerAndCoordinatorPersonIds(final DomainGroup domainGroup) { // use a set to eliminate duplicates HashSet<Long> peopleIds = new HashSet<Long>(); Query q = getEntityManager() .createQuery("SELECT pk.followerId FROM GroupFollower WHERE followingId=:groupId") .setParameter("groupId", domainGroup.getId()); peopleIds.addAll(q.getResultList()); q = getEntityManager().createQuery( "SELECT p.id FROM Person p, DomainGroup g WHERE p MEMBER OF g.coordinators AND g.id=:groupId") .setParameter("groupId", domainGroup.getId()); peopleIds.addAll(q.getResultList()); return peopleIds.toArray(new Long[peopleIds.size()]); }
From source file:edu.uga.cs.fluxbuster.clustering.ClusterGenerator.java
/** * Compute a distance matrix from a list of candidate flux domains with * a maximum number of calculation threads. * * @param cfds the list of candidate flux domains * @param maxnumthreads the thread ceiling * @return the vector of values in the distance matrix in row major * order//from w w w . j av a 2s .c o m */ private Vector<Float> computeDistanceMatrixMultiThreaded(List<CandidateFluxDomain> cfds, int maxnumthreads) { Vector<Float> retval = new Vector<Float>(); ThreadFactory tf = Executors.defaultThreadFactory(); double gamma = Double.parseDouble(localprops.getProperty(GAMMAKEY)); ArrayList<Thread> threads = new ArrayList<Thread>(); ArrayList<HashSet<Integer>> threadrows = new ArrayList<HashSet<Integer>>(); int interval = (int) Math.ceil((cfds.size() - 1) / (double) maxnumthreads); int left = 0; int right = cfds.size() - 2; HashSet<Integer> curset = null; boolean addLeftFirst = true; while (left <= right) { if (curset == null) { curset = new HashSet<Integer>(); } if (curset.size() == interval) { threadrows.add(curset); curset = null; } else { if (addLeftFirst) { curset.add(left++); } else { curset.add(right--); } addLeftFirst = !addLeftFirst; if (curset.size() == interval) { continue; } if (addLeftFirst) { curset.add(left++); } else { curset.add(right--); } } } if (curset != null && curset.size() > 0) { threadrows.add(curset); } ArrayList<Vector<Float>> resultsList = new ArrayList<Vector<Float>>(cfds.size()); // this is necessary to make sure that the proper indexes exist in // resultsList before being accessed by the threads for (int i = 0; i < cfds.size() - 1; i++) { resultsList.add(null); } for (int i = 0; i < threadrows.size(); i++) { Thread t = tf.newThread(new DistanceMatrixCalculator(gamma, threadrows.get(i), cfds, resultsList)); threads.add(t); } for (Thread t : threads) { t.start(); } for (Thread t : threads) { try { t.join(); } catch (InterruptedException e) { e.printStackTrace(); } } for (int i = 0; i < resultsList.size(); i++) { retval.addAll(resultsList.get(i)); } return retval; }
From source file:org.apache.phoenix.jdbc.SecureUserConnectionsIT.java
@Test public void testMultipleConnectionsAsSameUser() throws Exception { final HashSet<ConnectionInfo> connections = new HashSet<>(); final String princ1 = getUserPrincipal(1); final File keytab1 = getUserKeytabFile(1); final String url = joinUserAuthentication(BASE_URL, princ1, keytab1); UserGroupInformation.loginUserFromKeytab(princ1, keytab1.getPath()); // Using the same UGI should result in two equivalent ConnectionInfo objects connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES)); assertEquals(1, connections.size()); // Sanity check verifyAllConnectionsAreKerberosBased(connections); // Because the UGI instances are unique, so are the connections connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES)); assertEquals(1, connections.size()); }
From source file:com.evolveum.midpoint.model.common.stringpolicy.ValuePolicyProcessor.java
private void testMinimalUniqueCharacters(String password, LimitationsType limitations, OperationResult result, List<LocalizableMessage> message) { if (limitations.getMinUniqueChars() == null) { return;/*from w ww. jav a 2 s.co m*/ } HashSet<String> distinctCharacters = new HashSet<>(StringPolicyUtils.stringTokenizer(password)); if (limitations.getMinUniqueChars() > distinctCharacters.size()) { LocalizableMessage msg = new LocalizableMessageBuilder() .key("ValuePolicy.minimalUniqueCharactersNotMet").arg(limitations.getMinUniqueChars()) .arg(distinctCharacters.size()).build(); result.addSubresult(new OperationResult("Check minimal count of unique chars", OperationResultStatus.FATAL_ERROR, msg)); message.add(msg); } }
From source file:fr.aliasource.webmail.indexing.SearchAction.java
public void execute(IProxy p, IParameterSource req, IResponder responder) { long time = System.currentTimeMillis(); String query = req.getParameter("query"); int page = Integer.parseInt(req.getParameter("page")); int pageLength = Integer.parseInt(req.getParameter("pageLength")); SearchDirector sd = p.getAccount().getSearchDirector(); int startIdx = (page - 1) * pageLength; List<Hit> results = sd.findByType(p.getAccount().getUserId(), query); if (logger.isInfoEnabled()) { time = System.currentTimeMillis() - time; logger.info("[" + p.getAccount().getUserId() + "] " + query + " p: " + page + " l: " + pageLength + " => " + results.size() + " result(s) in " + time + "ms."); }/* w w w. j ava 2s.c o m*/ int endIdx = Math.min(results.size(), startIdx + pageLength); int resultsSize = results.size(); VersionnedList<ConversationReference> resultPage = new VersionnedList<ConversationReference>(); if (startIdx < results.size()) { HashSet<String> notFoundInCache = new HashSet<String>(); ConversationCache cc = p.getAccount().getCache().getConversationCache(); for (int i = startIdx; i < endIdx; i++) { Map<String, Object> payload = results.get(i).getPayload(); String convId = payload.get("id").toString(); ConversationReference cr = null; if (convId.contains("/")) { cr = cc.find(convId); } else { // chat ? cr = loadChat(payload); } if (cr != null) { resultPage.add(cr); } else { notFoundInCache.add(convId); } } if (notFoundInCache.size() > 0) { StringBuilder sb = new StringBuilder(); sb.append("Messages with ids["); for (Iterator<String> it = notFoundInCache.iterator(); it.hasNext();) { sb.append(it.next()); if (it.hasNext()) { sb.append(", "); } } sb.append("] found by solr are not in cache"); logger.warn(sb.toString()); resultsSize -= notFoundInCache.size(); } } ConversationReferenceList ret = new ConversationReferenceList(resultPage, resultsSize); responder.sendConversationsPage(ret); }
From source file:org.apache.nutch.crawl.CrawlDb.java
public int run(String[] args) throws Exception { if (args.length < 2) { System.err.println(//from w ww.jav a2 s. c o m "Usage: CrawlDb <crawldb> (-dir <segments> | <seg1> <seg2> ...) [-force] [-normalize] [-filter] [-noAdditions]"); System.err.println("\tcrawldb\tCrawlDb to update"); System.err.println("\t-dir segments\tparent directory containing all segments to update from"); System.err.println("\tseg1 seg2 ...\tlist of segment names to update from"); System.err.println("\t-force\tforce update even if CrawlDb appears to be locked (CAUTION advised)"); System.err .println("\t-normalize\tuse URLNormalizer on urls in CrawlDb and segment (usually not needed)"); System.err.println("\t-filter\tuse URLFilters on urls in CrawlDb and segment"); System.err.println( "\t-noAdditions\tonly update already existing URLs, don't add any newly discovered URLs"); return -1; } boolean normalize = false; boolean filter = false; boolean force = false; final FileSystem fs = FileSystem.get(getConf()); boolean additionsAllowed = getConf().getBoolean(CRAWLDB_ADDITIONS_ALLOWED, true); HashSet<Path> dirs = new HashSet<Path>(); for (int i = 1; i < args.length; i++) { if (args[i].equals("-normalize")) { normalize = true; } else if (args[i].equals("-filter")) { filter = true; } else if (args[i].equals("-force")) { force = true; } else if (args[i].equals("-noAdditions")) { additionsAllowed = false; } else if (args[i].equals("-dir")) { FileStatus[] paths = fs.listStatus(new Path(args[++i]), HadoopFSUtil.getPassDirectoriesFilter(fs)); dirs.addAll(Arrays.asList(HadoopFSUtil.getPaths(paths))); } else { dirs.add(new Path(args[i])); } } try { update(new Path(args[0]), dirs.toArray(new Path[dirs.size()]), normalize, filter, additionsAllowed, force); return 0; } catch (Exception e) { LOG.fatal("CrawlDb update: " + StringUtils.stringifyException(e)); return -1; } }
From source file:io.stallion.tools.ExportToHtml.java
public Set<String> findAssetsInHtml(String html) { HashSet<String> assets = new HashSet<>(); Jerry jerry = Jerry.jerry(html);// w ww.j a v a2 s . c om for (Jerry j : jerry.find("script")) { String src = j.attr("src"); Log.info("SCRIPT TAG HTML {0} {1}", j.htmlAll(true), src); if (empty(src)) { continue; } Log.info("Add asset {0}", src); assets.add(src); } for (Jerry j : jerry.find("link")) { Log.info("LINK TAG HTML {0}", j.htmlAll(true)); if (!"stylesheet".equals(j.attr("rel"))) { continue; } String src = j.attr("href"); if (empty(src)) { continue; } assets.add(src); } for (Jerry j : jerry.find("img")) { String src = j.attr("src"); if (empty(src)) { continue; } assets.add(src); } HashSet<String> filteredAssets = new HashSet<>(); Log.info("CDN URL {0}", Settings.instance().getCdnUrl()); Log.info("Site URL {0}", Settings.instance().getSiteUrl()); for (String src : assets) { if (src.startsWith(Settings.instance().getCdnUrl())) { src = StringUtils.replace(src, Settings.instance().getCdnUrl(), ""); if (!src.startsWith("/")) { src = "/" + src; } } if (src.startsWith(Settings.instance().getSiteUrl())) { src = StringUtils.replace(src, Settings.instance().getSiteUrl(), ""); } if (src.startsWith("/")) { filteredAssets.add(src); } } Log.info("Asset count {0}", filteredAssets.size()); return filteredAssets; }
From source file:org.apache.bookkeeper.client.DefaultEnsemblePlacementPolicy.java
@Override public Set<BookieSocketAddress> onClusterChanged(Set<BookieSocketAddress> writableBookies, Set<BookieSocketAddress> readOnlyBookies) { rwLock.writeLock().lock();//from w ww. java 2 s.co m try { HashSet<BookieSocketAddress> deadBookies; deadBookies = new HashSet<BookieSocketAddress>(knownBookies); deadBookies.removeAll(writableBookies); // readonly bookies should not be treated as dead bookies deadBookies.removeAll(readOnlyBookies); if (this.isWeighted) { for (BookieSocketAddress b : deadBookies) { this.bookieInfoMap.remove(b); } @SuppressWarnings("unchecked") Collection<BookieSocketAddress> newBookies = CollectionUtils.subtract(writableBookies, knownBookies); for (BookieSocketAddress b : newBookies) { this.bookieInfoMap.put(b, new BookieInfo()); } if (deadBookies.size() > 0 || newBookies.size() > 0) { this.weightedSelection.updateMap(this.bookieInfoMap); } } knownBookies = writableBookies; return deadBookies; } finally { rwLock.writeLock().unlock(); } }
From source file:org.acmsl.queryj.customsql.xml.SqlXmlParserImpl.java
/** * Post-processes given items.//from w ww. jav a 2 s.co m * @param items the items. * @param <T> the type of the items. * @return the processed items. */ @NotNull protected <T extends IdentifiableElement<String>> List<T> postProcess(@NotNull final List<T> items) { final List<T> result; final HashSet<T> aux = new HashSet<>(items.size()); aux.addAll(items); result = new ArrayList<>(aux.size()); result.addAll(aux); return result; }