List of usage examples for java.util LinkedHashSet size
int size();
From source file:org.openhab.binding.network.service.NetworkService.java
/** * Starts the DiscoveryThread for each IP on the Networks * * @param allNetworkIPs/* ww w .j a v a2 s. com*/ */ private static void startDiscovery(final LinkedHashSet<String> networkIPs, final DiscoveryCallback discoveryCallback, ScheduledExecutorService scheduledExecutorService) { final int PING_TIMEOUT_IN_MS = 500; ExecutorService executorService = Executors .newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 10); for (Iterator<String> it = networkIPs.iterator(); it.hasNext();) { final String ip = it.next(); executorService.execute(new Runnable() { @Override public void run() { if (ip != null) { try { if (Ping.checkVitality(ip, 0, PING_TIMEOUT_IN_MS)) { discoveryCallback.newDevice(ip); } } catch (IOException e) { } } } }); } try { executorService.awaitTermination(PING_TIMEOUT_IN_MS * networkIPs.size(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { } executorService.shutdown(); }
From source file:cz.cas.lib.proarc.authentication.Authenticators.java
public List<Authenticator> getAuthenticators() { List<Object> authenticatorIds = conf.getList(PROPERTY_AUTHENTICATORS); LinkedHashSet<Object> ids = new LinkedHashSet<Object>(authenticatorIds); // ensure the ProArc authenticator used as a last resort ids.remove(TYPE_PROARC);/*from w ww . j av a2s.c om*/ ids.add(TYPE_PROARC); ArrayList<Authenticator> authenticators = new ArrayList<Authenticator>(ids.size()); for (Object id : ids) { if (TYPE_PROARC.equals(id)) { authenticators.add(new ProArcAuthenticator()); } else if (TYPE_DESA.equals(id)) { authenticators.add(new DESAAuthenticator()); } else { LOG.warning("Unknown authenticator: " + id); } } return authenticators; }
From source file:com.geewhiz.pacify.TestCheckTargetFileExist.java
@Test public void checkPFileForCorrect() { String testFolder = "checkTargetFileExistTest/correct/file"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(Collections.<String, String>emptyMap()), new CheckTargetFileExist()); Assert.assertEquals(0, defects.size()); }
From source file:com.geewhiz.pacify.TestCheckTargetFileExist.java
@Test public void checkPFileForNotCorrect() { String testFolder = "checkTargetFileExistTest/wrong/file"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(Collections.<String, String>emptyMap()), new CheckTargetFileExist()); Assert.assertEquals(1, defects.size()); }
From source file:com.geewhiz.pacify.TestCheckTargetFileExist.java
@Test public void checkArchiveCorrect() { String testFolder = "checkTargetFileExistTest/correct/archive"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(Collections.<String, String>emptyMap()), new CheckTargetFileExist()); Assert.assertEquals(0, defects.size()); }
From source file:com.geewhiz.pacify.TestCheckTargetFileExist.java
@Test public void checkArchiveForNotCorrect() { String testFolder = "checkTargetFileExistTest/wrong/archive"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(Collections.<String, String>emptyMap()), new CheckTargetFileExist()); Assert.assertEquals(1, defects.size()); }
From source file:com.geewhiz.pacify.TestCheckTargetFileExist.java
@Test public void checkRegExForNotCorrectPFile() { String testFolder = "checkTargetFileExistTest/wrong/regExFile"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(Collections.<String, String>emptyMap()), new CheckTargetFileExist()); Assert.assertEquals(1, defects.size()); }
From source file:org.loklak.api.server.SuggestServlet.java
@Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { RemoteAccess.Post post = RemoteAccess.evaluate(request); // manage DoS if (post.isDoS_blackout()) { response.sendError(503, "your request frequency is too high"); return;/*w ww .ja v a2s.c o m*/ } String callback = post.get("callback", ""); boolean jsonp = callback != null && callback.length() > 0; boolean minified = post.get("minified", false); int requestkey = post.hashCode(); Map<String, Object> m = post.isDoS_servicereduction() ? cache.get(requestkey) : null; if (m == null) { boolean local = post.isLocalhostAccess(); boolean delete = post.get("delete", false); int count = post.get("count", 10); // number of queries String query = post.get("q", ""); // to get a list of queries which match; to get all latest: leave q empty String source = post.get("source", "all"); // values: all,query,geo String orders = post.get("order", query.length() == 0 ? "desc" : "asc").toUpperCase(); SortOrder order = SortOrder.valueOf(orders); String orderby = post.get("orderby", query.length() == 0 ? "retrieval_next" : "query_count"); int timezoneOffset = post.get("timezoneOffset", 0); Date since = post.get("since", "").equals("now") ? new Date() : post.get("since", (Date) null, timezoneOffset); Date until = post.get("until", "").equals("now") ? new Date() : post.get("until", (Date) null, timezoneOffset); String selectby = post.get("selectby", "retrieval_next"); ResultList<QueryEntry> queryList = new ResultList<>(); if ((source.equals("all") || source.equals("query")) && query.length() >= 0) { long start = System.currentTimeMillis(); queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby); post.recordEvent("localqueries_time", System.currentTimeMillis() - start); } if (delete && local && queryList.size() > 0) { long start = System.currentTimeMillis(); for (QueryEntry qe : queryList) DAO.deleteQuery(qe.getQuery(), qe.getSourceType()); queryList.clear(); queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby); post.recordEvent("localquerydelete_time", System.currentTimeMillis() - start); } if (source.equals("all") || source.equals("geo")) { long start = System.currentTimeMillis(); LinkedHashSet<String> suggestions = DAO.geoNames.suggest(query, count, 0); if (suggestions.size() < count && query.length() > 2) suggestions.addAll(DAO.geoNames.suggest(query, count, 1)); if (suggestions.size() < count && query.length() > 5) suggestions.addAll(DAO.geoNames.suggest(query, count, 2)); for (String s : suggestions) { QueryEntry qe = new QueryEntry(s, 0, Long.MAX_VALUE, SourceType.IMPORT, false); queryList.add(qe); } post.recordEvent("suggestionsquery_time", System.currentTimeMillis() - start); } long start = System.currentTimeMillis(); post.setResponse(response, "application/javascript"); List<Object> queries = new ArrayList<>(); if (queryList != null) for (QueryEntry t : queryList) queries.add(t.toMap()); int random = post.get("random", -1); if (random > 0 && random < queries.size()) { // take the given number from the result list and use random to choose List<Object> random_queries = new ArrayList<>(); Random r = new Random(System.currentTimeMillis()); while (random-- > 0) { random_queries.add(queries.remove(r.nextInt(queries.size()))); int shrink = Math.max(queries.size() / 2, random * 10); while (queries.size() > shrink) queries.remove(queries.size() - 1); // prefer from top } queries = random_queries; } // generate json m = new LinkedHashMap<String, Object>(); Map<String, Object> metadata = new LinkedHashMap<String, Object>(); metadata.put("count", queryList == null ? "0" : Integer.toString(queries.size())); metadata.put("hits", queryList.getHits()); metadata.put("query", query); metadata.put("order", orders); metadata.put("orderby", orderby); if (since != null) metadata.put("since", AbstractIndexEntry.utcFormatter.print(since.getTime())); if (until != null) metadata.put("until", AbstractIndexEntry.utcFormatter.print(until.getTime())); if (since != null || until != null) metadata.put("selectby", selectby); metadata.put("client", post.getClientHost()); m.put("search_metadata", metadata); m.put("queries", queries); post.recordEvent("postprocessing_time", System.currentTimeMillis() - start); } // write json response.setCharacterEncoding("UTF-8"); PrintWriter sos = response.getWriter(); if (jsonp) sos.print(callback + "("); sos.print(minified ? new ObjectMapper().writer().writeValueAsString(m) : new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(m)); if (jsonp) sos.println(");"); sos.println(); post.finalize(); }
From source file:de.andreasschoknecht.LS3.Query.java
/** * Calculate term frequencies for the query model. Thereby, the term frequencies for terms not contained in the query model * but appearing in other models of the collection are set to 0. Terms which are only part of the query model are not represented. * They are left out because the goal is to put the query model in the vector space of the Term-Document Matrix of a document * collection./*ww w.jav a 2 s . co m*/ * * @param allTerms The list of all terms contained in the whole model collection */ void calculateTermFrequencies(LinkedHashSet<String> allTerms) { // allTermsArray contains all the terms of the Term-Document Matrix of the document collection String[] allTermsArray = allTerms.toArray(new String[allTerms.size()]); termFrequencies = new double[allTermsArray.length]; for (int i = 0; i < allTermsArray.length; i++) { if (this.getTermCollection().contains(allTermsArray[i])) { String tmp = allTermsArray[i]; double count = this.getTermCollection().count(tmp); termFrequencies[i] = count; } else { termFrequencies[i] = 0; } } }
From source file:org.nuxeo.ecm.webengine.loader.store.ResourceStoreClassLoader.java
protected ResourceStoreClassLoader(final ClassLoader pParent, LinkedHashSet<ResourceStore> cp) { super(pParent); this.cp = cp; if (!cp.isEmpty()) { stores = cp.toArray(new ResourceStore[cp.size()]); }//from www.jav a 2 s. co m }