List of usage examples for java.util LinkedHashSet size
int size();
From source file:org.loklak.api.search.SuggestServlet.java
@Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Query post = RemoteAccess.evaluate(request); // manage DoS if (post.isDoS_blackout()) { response.sendError(503, "your request frequency is too high"); return;//from ww w . j av a 2s .c o m } String callback = post.get("callback", ""); boolean jsonp = callback != null && callback.length() > 0; boolean minified = post.get("minified", false); int requestkey = post.hashCode(); JSONObject m = post.isDoS_servicereduction() ? cache.get(requestkey) : null; if (m == null) { boolean local = post.isLocalhostAccess(); boolean delete = post.get("delete", false); int count = post.get("count", 10); // number of queries String query = post.get("q", ""); // to get a list of queries which match; to get all latest: leave q empty String source = post.get("source", "all"); // values: all,query,geo String orders = post.get("order", query.length() == 0 ? "desc" : "asc").toUpperCase(); SortOrder order = SortOrder.valueOf(orders); String orderby = post.get("orderby", query.length() == 0 ? "retrieval_next" : "query_count"); int timezoneOffset = post.get("timezoneOffset", 0); Date since = post.get("since", "").equals("now") ? new Date() : post.get("since", (Date) null, timezoneOffset); Date until = post.get("until", "").equals("now") ? new Date() : post.get("until", (Date) null, timezoneOffset); String selectby = post.get("selectby", "retrieval_next"); ResultList<QueryEntry> queryList = new ResultList<>(); if ((source.equals("all") || source.equals("query")) && query.length() >= 0) { long start = System.currentTimeMillis(); queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby); post.recordEvent("localqueries_time", System.currentTimeMillis() - start); } if (delete && local && queryList.size() > 0) { long start = System.currentTimeMillis(); for (QueryEntry qe : queryList) DAO.deleteQuery(qe.getQuery(), qe.getSourceType()); queryList.clear(); queryList = DAO.SearchLocalQueries(query, count, orderby, "long", order, since, until, selectby); post.recordEvent("localquerydelete_time", System.currentTimeMillis() - start); } if (source.equals("all") || source.equals("geo")) { long start = System.currentTimeMillis(); LinkedHashSet<String> suggestions = DAO.geoNames.suggest(query, count, 0); if (suggestions.size() < count && query.length() > 2) suggestions.addAll(DAO.geoNames.suggest(query, count, 1)); if (suggestions.size() < count && query.length() > 5) suggestions.addAll(DAO.geoNames.suggest(query, count, 2)); for (String s : suggestions) { QueryEntry qe = new QueryEntry(s, 0, Long.MAX_VALUE, SourceType.TWITTER, false); queryList.add(qe); } post.recordEvent("suggestionsquery_time", System.currentTimeMillis() - start); } long start = System.currentTimeMillis(); post.setResponse(response, "application/javascript"); List<Object> queries = new ArrayList<>(); if (queryList != null) for (QueryEntry t : queryList) queries.add(t.toJSON().toMap()); int random = post.get("random", -1); if (random > 0 && random < queries.size()) { // take the given number from the result list and use random to choose List<Object> random_queries = new ArrayList<>(); Random r = new Random(System.currentTimeMillis()); while (random-- > 0) { random_queries.add(queries.remove(r.nextInt(queries.size()))); int shrink = Math.max(queries.size() / 2, random * 10); while (queries.size() > shrink) queries.remove(queries.size() - 1); // prefer from top } queries = random_queries; } // generate json m = new JSONObject(true); JSONObject metadata = new JSONObject(true); metadata.put("count", queryList == null ? "0" : Integer.toString(queries.size())); metadata.put("hits", queryList.getHits()); metadata.put("query", query); metadata.put("order", orders); metadata.put("orderby", orderby); if (since != null) metadata.put("since", AbstractObjectEntry.utcFormatter.print(since.getTime())); if (until != null) metadata.put("until", AbstractObjectEntry.utcFormatter.print(until.getTime())); if (since != null || until != null) metadata.put("selectby", selectby); metadata.put("client", post.getClientHost()); m.put("search_metadata", metadata); m.put("queries", queries); post.recordEvent("postprocessing_time", System.currentTimeMillis() - start); } // write json response.setCharacterEncoding("UTF-8"); PrintWriter sos = response.getWriter(); if (jsonp) sos.print(callback + "("); sos.print(m.toString(minified ? 0 : 2)); if (jsonp) sos.println(");"); sos.println(); post.finalize(); }
From source file:com.geewhiz.pacify.TestCheckTargetFileExist.java
@Test public void checkRegExDoesNotMatchInArchive() throws ArchiveException, IOException { Logger logger = LogManager.getLogger(TestArchive.class.getName()); LoggingUtils.setLogLevel(logger, Level.INFO); String testFolder = "checkTargetFileExistTest/wrong/regExArchive"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(Collections.<String, String>emptyMap()), new CheckTargetFileExist()); Assert.assertEquals("We should get a defect.", 1, defects.size()); Assert.assertEquals("We expect FileDoesNotExistDefect", FileDoesNotExistDefect.class, defects.iterator().next().getClass()); }
From source file:org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.AbstractScriptableDataFactory.java
public final String[] getReferencedFields(final String query, final DataRow parameter) { try {//from w w w. j a va2s . c o m final String[] additionalFields = scriptingSupport.computeAdditionalQueryFields(query, parameter); if (additionalFields == null) { return null; } final String realQuery = scriptingSupport.computeQuery(query, parameter); if (realQuery == null) { throw new ReportDataFactoryException("Query '" + query + "' is not recognized."); //$NON-NLS-1$ //$NON-NLS-2$ } String[] referencedFieldsInternal = getReferencedFieldsInternal(realQuery, parameter); if (referencedFieldsInternal == null) { return null; } final LinkedHashSet<String> fields = new LinkedHashSet<String>(); fields.addAll(Arrays.asList(referencedFieldsInternal)); fields.addAll(Arrays.asList(additionalFields)); return fields.toArray(new String[fields.size()]); } catch (final ReportDataFactoryException rx) { logger.debug("Failed to compute referenced fields", rx); // NON-NLS return null; } }
From source file:com.haulmont.cuba.core.DataManagerDistinctResultsTest.java
@Test public void testDistinctResults() { checkSetup();/*from w w w .ja va 2 s . c o m*/ LinkedHashSet<User> set; AppBeans.get(Configuration.class).getConfig(ServerConfig.class).setInMemoryDistinct(false); set = load(0, 10, QUERY); assertEquals(5, set.size()); assertEquals("user00", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user04", Iterables.getLast(set, null).getLoginLowerCase()); set = load(0, 10, DISTINCT_QUERY); assertEquals(10, set.size()); assertEquals("user00", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user09", Iterables.getLast(set, null).getLoginLowerCase()); AppBeans.get(Configuration.class).getConfig(ServerConfig.class).setInMemoryDistinct(true); set = load(0, 10, QUERY); assertEquals(5, set.size()); assertEquals("user00", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user04", Iterables.getLast(set, null).getLoginLowerCase()); set = load(0, 10, DISTINCT_QUERY); assertEquals(10, set.size()); assertEquals("user00", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user09", Iterables.getLast(set, null).getLoginLowerCase()); set = load(0, 20, DISTINCT_QUERY); assertEquals(17, set.size()); assertEquals("user00", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user16", Iterables.getLast(set, null).getLoginLowerCase()); set = load(0, 17, DISTINCT_QUERY); assertEquals(17, set.size()); assertEquals("user00", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user16", Iterables.getLast(set, null).getLoginLowerCase()); set = load(5, 5, DISTINCT_QUERY); assertEquals(5, set.size()); assertEquals("user05", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user09", Iterables.getLast(set, null).getLoginLowerCase()); set = load(10, 5, DISTINCT_QUERY); assertEquals(5, set.size()); assertEquals("user10", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user14", Iterables.getLast(set, null).getLoginLowerCase()); set = load(15, 5, DISTINCT_QUERY); assertEquals(2, set.size()); assertEquals("user15", Iterables.getFirst(set, null).getLoginLowerCase()); assertEquals("user16", Iterables.getLast(set, null).getLoginLowerCase()); }
From source file:jenkins.plugins.ivyreport.IvyAccess.java
String[] expandConfs(String[] requested) { if (moduleDescriptor == null) { recomputeModuleDescriptor();//from w w w.j a v a2s . c o m if (moduleDescriptor == null) { return requested; } } String[] expanded = ConfigurationUtils.replaceWildcards(requested, moduleDescriptor); LinkedHashSet<String> result = new LinkedHashSet<String>(); Collections.addAll(result, expanded); result.retainAll(Arrays.asList(moduleDescriptor.getConfigurationsNames())); return result.toArray(new String[result.size()]); }
From source file:com.geewhiz.pacify.TestArchive.java
@Test public void checkUnkownArchiveType() { String testFolder = "testArchive/wrong/unkownArchiveType"; LinkedHashSet<Defect> defects = createPrepareValidateAndReplace(testFolder, createPropertyResolveManager(propertiesToUseWhileResolving)); Assert.assertEquals("We should get a defect.", 1, defects.size()); Assert.assertEquals("We expect ArchiveTypeNotImplementedDefect", ArchiveTypeNotImplementedDefect.class, defects.iterator().next().getClass()); }
From source file:com.geewhiz.pacify.TestArchive.java
@Test public void checkDuplicateArchiveEntry() { String testFolder = "testArchive/wrong/duplicateEntry"; LinkedHashSet<Defect> defects = createPrepareAndExecuteValidator(testFolder, createPropertyResolveManager(propertiesToUseWhileResolving)); Assert.assertEquals("We should get a defect.", 1, defects.size()); Assert.assertEquals("We expect ArchiveTypeNotImplementedDefect", ArchiveDuplicateDefinedInPMarkerDefect.class, defects.iterator().next().getClass()); }
From source file:com.geewhiz.pacify.TestArchive.java
@Test public void checkJarInEar() { String testFolder = "testArchive/correct/jarInEar"; LinkedHashSet<Defect> defects = createPrepareValidateAndReplace(testFolder, createPropertyResolveManager(propertiesToUseWhileResolving)); Assert.assertEquals("We shouldnt get any defects.", 0, defects.size()); checkIfResultIsAsExpected(testFolder); }
From source file:com.geewhiz.pacify.TestArchive.java
@Test public void checkTar() { String testFolder = "testArchive/correct/tar"; LinkedHashSet<Defect> defects = createPrepareValidateAndReplace(testFolder, createPropertyResolveManager(propertiesToUseWhileResolving)); Assert.assertEquals("We shouldnt get any defects.", 0, defects.size()); checkIfResultIsAsExpected(testFolder); }
From source file:com.geewhiz.pacify.TestArchive.java
@Test public void checkZip() { String testFolder = "testArchive/correct/zip"; LinkedHashSet<Defect> defects = createPrepareValidateAndReplace(testFolder, createPropertyResolveManager(propertiesToUseWhileResolving)); Assert.assertEquals("We shouldnt get any defects.", 0, defects.size()); checkIfResultIsAsExpected(testFolder); }