List of usage examples for java.util Collections synchronizedList
public static <T> List<T> synchronizedList(List<T> list)
From source file:org.languagetool.server.TextChecker.java
void checkText(AnnotatedText aText, HttpExchange httpExchange, Map<String, String> parameters, ErrorRequestLimiter errorRequestLimiter, String remoteAddress) throws Exception { checkParams(parameters);/*from ww w . ja v a 2s.c om*/ long timeStart = System.currentTimeMillis(); UserLimits limits = ServerTools.getUserLimits(parameters, config); // logging information String agent = parameters.get("useragent") != null ? parameters.get("useragent") : "-"; Long agentId = null, userId = null; if (logger.isLogging()) { DatabaseAccess db = DatabaseAccess.getInstance(); agentId = db.getOrCreateClientId(parameters.get("useragent")); userId = limits.getPremiumUid(); } String referrer = httpExchange.getRequestHeaders().getFirst("Referer"); String userAgent = httpExchange.getRequestHeaders().getFirst("User-Agent"); if (aText.getPlainText().length() > limits.getMaxTextLength()) { String msg = "limit: " + limits.getMaxTextLength() + ", size: " + aText.getPlainText().length(); logger.log(new DatabaseAccessLimitLogEntry("MaxCharacterSizeExceeded", logServerId, agentId, userId, msg, referrer, userAgent)); ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.MAX_TEXT_SIZE); throw new TextTooLongException( "Your text exceeds the limit of " + limits.getMaxTextLength() + " characters (it's " + aText.getPlainText().length() + " characters). Please submit a shorter text."); } UserConfig userConfig = new UserConfig( limits.getPremiumUid() != null ? getUserDictWords(limits.getPremiumUid()) : Collections.emptyList(), new HashMap<>(), config.getMaxSpellingSuggestions()); // NOTE: at the moment, feedback for A/B-Tests is only delivered from this client, so only run tests there if (agent != null && agent.equals("ltorg")) { userConfig.setAbTest(config.getAbTest()); } //print("Check start: " + text.length() + " chars, " + langParam); boolean autoDetectLanguage = getLanguageAutoDetect(parameters); List<String> preferredVariants = getPreferredVariants(parameters); if (parameters.get("noopLanguages") != null && !autoDetectLanguage) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException( "You can specify 'noopLanguages' only when also using 'language=auto'"); } List<String> noopLangs = parameters.get("noopLanguages") != null ? Arrays.asList(parameters.get("noopLanguages").split(",")) : Collections.emptyList(); List<String> preferredLangs = parameters.get("preferredLanguages") != null ? Arrays.asList(parameters.get("preferredLanguages").split(",")) : Collections.emptyList(); DetectedLanguage detLang = getLanguage(aText.getPlainText(), parameters, preferredVariants, noopLangs, preferredLangs); Language lang = detLang.getGivenLanguage(); Integer count = languageCheckCounts.get(lang.getShortCodeWithCountryAndVariant()); if (count == null) { count = 1; } else { count++; } //print("Starting check: " + aText.getPlainText().length() + " chars, #" + count); String motherTongueParam = parameters.get("motherTongue"); Language motherTongue = motherTongueParam != null ? Languages.getLanguageForShortCode(motherTongueParam) : null; boolean useEnabledOnly = "yes".equals(parameters.get("enabledOnly")) || "true".equals(parameters.get("enabledOnly")); List<Language> altLanguages = new ArrayList<>(); if (parameters.get("altLanguages") != null) { String[] altLangParams = parameters.get("altLanguages").split(",\\s*"); for (String langCode : altLangParams) { Language altLang = Languages.getLanguageForShortCode(langCode); altLanguages.add(altLang); if (altLang.hasVariant() && !altLang.isVariant()) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException("You specified altLanguage '" + langCode + "', but for this language you need to specify a variant, e.g. 'en-GB' instead of just 'en'"); } } } List<String> enabledRules = getEnabledRuleIds(parameters); List<String> disabledRules = getDisabledRuleIds(parameters); List<CategoryId> enabledCategories = getCategoryIds("enabledCategories", parameters); List<CategoryId> disabledCategories = getCategoryIds("disabledCategories", parameters); if ((disabledRules.size() > 0 || disabledCategories.size() > 0) && useEnabledOnly) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException( "You cannot specify disabled rules or categories using enabledOnly=true"); } if (enabledRules.isEmpty() && enabledCategories.isEmpty() && useEnabledOnly) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException( "You must specify enabled rules or categories when using enabledOnly=true"); } boolean useQuerySettings = enabledRules.size() > 0 || disabledRules.size() > 0 || enabledCategories.size() > 0 || disabledCategories.size() > 0; boolean allowIncompleteResults = "true".equals(parameters.get("allowIncompleteResults")); boolean enableHiddenRules = "true".equals(parameters.get("enableHiddenRules")); JLanguageTool.Mode mode = ServerTools.getMode(parameters); String callback = parameters.get("callback"); QueryParams params = new QueryParams(altLanguages, enabledRules, disabledRules, enabledCategories, disabledCategories, useEnabledOnly, useQuerySettings, allowIncompleteResults, enableHiddenRules, mode, callback); Long textSessionId = null; try { if (parameters.containsKey("textSessionId")) { String textSessionIdStr = parameters.get("textSessionId"); if (textSessionIdStr.contains(":")) { // transitioning to new format used in chrome addon // format: "{random number in 0..99999}:{unix time}" long random, timestamp; int sepPos = textSessionIdStr.indexOf(':'); random = Long.valueOf(textSessionIdStr.substring(0, sepPos)); timestamp = Long.valueOf(textSessionIdStr.substring(sepPos + 1)); // use random number to choose a slice in possible range of values // then choose position in slice by timestamp long maxRandom = 100000; long randomSegmentSize = (Long.MAX_VALUE - maxRandom) / maxRandom; long segmentOffset = random * randomSegmentSize; if (timestamp > randomSegmentSize) { print(String.format("Could not transform textSessionId '%s'", textSessionIdStr)); } textSessionId = segmentOffset + timestamp; } else { textSessionId = Long.valueOf(textSessionIdStr); } userConfig.setTextSessionId(textSessionId); } } catch (NumberFormatException ex) { print("Could not parse textSessionId '" + parameters.get("textSessionId") + "' as long: " + ex.getMessage()); } int textSize = aText.getPlainText().length(); List<RuleMatch> ruleMatchesSoFar = Collections.synchronizedList(new ArrayList<>()); Future<List<RuleMatch>> future = executorService.submit(new Callable<List<RuleMatch>>() { @Override public List<RuleMatch> call() throws Exception { // use to fake OOM in thread for testing: /*if (Math.random() < 0.1) { throw new OutOfMemoryError(); }*/ return getRuleMatches(aText, lang, motherTongue, parameters, params, userConfig, f -> ruleMatchesSoFar.add(f)); } }); String incompleteResultReason = null; List<RuleMatch> matches; try { if (limits.getMaxCheckTimeMillis() < 0) { matches = future.get(); } else { matches = future.get(limits.getMaxCheckTimeMillis(), TimeUnit.MILLISECONDS); } } catch (ExecutionException e) { future.cancel(true); if (ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.TOO_MANY_ERRORS); logger.log(new DatabaseCheckErrorLogEntry("ErrorRateTooHigh", logServerId, agentId, userId, lang, detLang.getDetectedLanguage(), textSize, "matches: " + ruleMatchesSoFar.size())); } if (params.allowIncompleteResults && ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) { print(e.getMessage() + " - returning " + ruleMatchesSoFar.size() + " matches found so far. Detected language: " + detLang); matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy incompleteResultReason = "Results are incomplete: " + ExceptionUtils.getRootCause(e).getMessage(); } else if (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) { throw (OutOfMemoryError) e.getCause(); } else { throw new RuntimeException(e.getMessage() + ", detected: " + detLang, e); } } catch (TimeoutException e) { boolean cancelled = future.cancel(true); Path loadFile = Paths.get("/proc/loadavg"); // works in Linux only(?) String loadInfo = loadFile.toFile().exists() ? Files.readAllLines(loadFile).toString() : "(unknown)"; if (errorRequestLimiter != null) { errorRequestLimiter.logAccess(remoteAddress, httpExchange.getRequestHeaders(), parameters); } String message = "Text checking took longer than allowed maximum of " + limits.getMaxCheckTimeMillis() + " milliseconds (cancelled: " + cancelled + ", lang: " + lang.getShortCodeWithCountryAndVariant() + ", detected: " + detLang + ", #" + count + ", " + aText.getPlainText().length() + " characters of text" + ", mode: " + mode.toString().toLowerCase() + ", h: " + reqCounter.getHandleCount() + ", r: " + reqCounter.getRequestCount() + ", system load: " + loadInfo + ")"; if (params.allowIncompleteResults) { print(message + " - returning " + ruleMatchesSoFar.size() + " matches found so far"); matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy incompleteResultReason = "Results are incomplete: text checking took longer than allowed maximum of " + String.format(Locale.ENGLISH, "%.2f", limits.getMaxCheckTimeMillis() / 1000.0) + " seconds"; } else { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.MAX_CHECK_TIME); logger.log(new DatabaseCheckErrorLogEntry("MaxCheckTimeExceeded", logServerId, agentId, limits.getPremiumUid(), lang, detLang.getDetectedLanguage(), textSize, "load: " + loadInfo)); throw new RuntimeException(message, e); } } setHeaders(httpExchange); List<RuleMatch> hiddenMatches = new ArrayList<>(); if (config.getHiddenMatchesServer() != null && params.enableHiddenRules && config.getHiddenMatchesLanguages().contains(lang)) { if (config.getHiddenMatchesServerFailTimeout() > 0 && lastHiddenMatchesServerTimeout != -1 && System.currentTimeMillis() - lastHiddenMatchesServerTimeout < config .getHiddenMatchesServerFailTimeout()) { ServerMetricsCollector.getInstance().logHiddenServerStatus(false); print("Warn: Skipped querying hidden matches server at " + config.getHiddenMatchesServer() + " because of recent error/timeout (timeout=" + config.getHiddenMatchesServerFailTimeout() + "ms)."); } else { ResultExtender resultExtender = new ResultExtender(config.getHiddenMatchesServer(), config.getHiddenMatchesServerTimeout()); try { long start = System.currentTimeMillis(); List<RemoteRuleMatch> extensionMatches = resultExtender .getExtensionMatches(aText.getPlainText(), parameters); hiddenMatches = resultExtender.getFilteredExtensionMatches(matches, extensionMatches); long end = System.currentTimeMillis(); print("Hidden matches: " + extensionMatches.size() + " -> " + hiddenMatches.size() + " in " + (end - start) + "ms for " + lang.getShortCodeWithCountryAndVariant()); ServerMetricsCollector.getInstance().logHiddenServerStatus(true); lastHiddenMatchesServerTimeout = -1; } catch (Exception e) { ServerMetricsCollector.getInstance().logHiddenServerStatus(false); print("Warn: Failed to query hidden matches server at " + config.getHiddenMatchesServer() + ": " + e.getClass() + ": " + e.getMessage()); lastHiddenMatchesServerTimeout = System.currentTimeMillis(); } } } int compactMode = Integer.parseInt(parameters.getOrDefault("c", "0")); String response = getResponse(aText, detLang, motherTongue, matches, hiddenMatches, incompleteResultReason, compactMode); if (params.callback != null) { // JSONP - still needed today for the special case of hosting your own on-premise LT without SSL // and using it from a local MS Word (not Online Word) - issue #89 in the add-in repo: response = params.callback + "(" + response + ");"; } String messageSent = "sent"; String languageMessage = lang.getShortCodeWithCountryAndVariant(); try { httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.getBytes(ENCODING).length); httpExchange.getResponseBody().write(response.getBytes(ENCODING)); ServerMetricsCollector.getInstance().logResponse(HttpURLConnection.HTTP_OK); } catch (IOException exception) { // the client is disconnected messageSent = "notSent: " + exception.getMessage(); } if (motherTongue != null) { languageMessage += " (mother tongue: " + motherTongue.getShortCodeWithCountryAndVariant() + ")"; } if (autoDetectLanguage) { languageMessage += "[auto]"; } languageCheckCounts.put(lang.getShortCodeWithCountryAndVariant(), count); int computationTime = (int) (System.currentTimeMillis() - timeStart); String version = parameters.get("v") != null ? ", v:" + parameters.get("v") : ""; print("Check done: " + aText.getPlainText().length() + " chars, " + languageMessage + ", #" + count + ", " + referrer + ", " + matches.size() + " matches, " + computationTime + "ms, agent:" + agent + version + ", " + messageSent + ", q:" + (workQueue != null ? workQueue.size() : "?") + ", h:" + reqCounter.getHandleCount() + ", dH:" + reqCounter.getDistinctIps() + ", m:" + mode.toString().toLowerCase()); int matchCount = matches.size(); Map<String, Integer> ruleMatchCount = new HashMap<>(); for (RuleMatch match : matches) { String ruleId = match.getRule().getId(); ruleMatchCount.put(ruleId, ruleMatchCount.getOrDefault(ruleId, 0) + 1); } ServerMetricsCollector.getInstance().logCheck(lang, computationTime, textSize, matchCount, mode, agent, ruleMatchCount); if (!config.isSkipLoggingChecks()) { DatabaseCheckLogEntry logEntry = new DatabaseCheckLogEntry(userId, agentId, logServerId, textSize, matchCount, lang, detLang.getDetectedLanguage(), computationTime, textSessionId, mode.toString()); logEntry.setRuleMatches(new DatabaseRuleMatchLogEntry( config.isSkipLoggingRuleMatches() ? Collections.emptyMap() : ruleMatchCount)); logger.log(logEntry); } }
From source file:com.google.cloud.hadoop.gcsio.GoogleCloudStorageImpl.java
@Override public void createEmptyObjects(List<StorageResourceId> resourceIds, CreateObjectOptions options) throws IOException { // TODO(user): This method largely follows a pattern similar to // deleteObjects(List<StorageResourceId>); extract a generic method for both. log.debug("createEmptyObjects(%s)", resourceIds); // Validate that all the elements represent StorageObjects. for (StorageResourceId resourceId : resourceIds) { Preconditions.checkArgument(resourceId.isStorageObject(), "Expected full StorageObject names only, got: '%s'", resourceId); }/*from ww w . ja va 2 s.c om*/ // Gather exceptions to wrap in a composite exception at the end. final List<IOException> innerExceptions = Collections.synchronizedList(new ArrayList<IOException>()); final CountDownLatch latch = new CountDownLatch(resourceIds.size()); for (final StorageResourceId resourceId : resourceIds) { final Storage.Objects.Insert insertObject = prepareEmptyInsert(resourceId, options); manualBatchingThreadPool.execute(new Runnable() { @Override public void run() { try { insertObject.execute(); log.debug("Successfully inserted %s", resourceId.toString()); } catch (IOException ioe) { innerExceptions.add(wrapException(ioe, "Error inserting", resourceId.getBucketName(), resourceId.getObjectName())); } catch (Throwable t) { innerExceptions.add(wrapException(new IOException(t), "Error inserting", resourceId.getBucketName(), resourceId.getObjectName())); } finally { latch.countDown(); } } }); } try { latch.await(); } catch (InterruptedException ie) { throw new IOException(ie); } if (!innerExceptions.isEmpty()) { throw GoogleCloudStorageExceptions.createCompositeException(innerExceptions); } }
From source file:org.pentaho.reporting.engine.classic.core.testsupport.gold.GoldTestBase.java
protected void runAllGoldReportsSerial() throws Exception { initializeTestEnvironment();//from w w w . j a v a2 s . co m List<Throwable> errors = Collections.synchronizedList(new ArrayList<Throwable>()); List<ExecuteReportRunner> reports = new ArrayList<ExecuteReportRunner>(); reports.addAll(collectReports("reports", ReportProcessingMode.legacy, errors)); reports.addAll(collectReports("reports", ReportProcessingMode.migration, errors)); reports.addAll(collectReports("reports", ReportProcessingMode.current, errors)); reports.addAll(collectReports("reports-4.0", ReportProcessingMode.migration, errors)); reports.addAll(collectReports("reports-4.0", ReportProcessingMode.current, errors)); for (ExecuteReportRunner report : reports) { report.run(); } if (errors.isEmpty() == false) { Log log = LogFactory.getLog(GoldTestBase.class); for (Throwable throwable : errors) { log.error("Failed", throwable); } Assert.fail(); } System.out.println(findMarker()); }
From source file:org.apache.hadoop.ipc.TestAsyncIPC.java
/** * Tests that client generates a unique sequential call ID for each RPC call, * even if multiple threads are using the same client. * * @throws InterruptedException/*w w w.j av a 2 s . c o m*/ * @throws ExecutionException */ @Test(timeout = 60000) public void testUniqueSequentialCallIds() throws IOException, InterruptedException, ExecutionException { int serverThreads = 10, callerCount = 100, perCallerCallCount = 100; TestServer server = new TestIPC.TestServer(serverThreads, false, conf); // Attach a listener that tracks every call ID received by the server. This // list must be synchronized, because multiple server threads will add to // it. final List<Integer> callIds = Collections.synchronizedList(new ArrayList<Integer>()); server.callListener = new Runnable() { @Override public void run() { callIds.add(Server.getCallId()); } }; Client client = new Client(LongWritable.class, conf); try { InetSocketAddress addr = NetUtils.getConnectAddress(server); server.start(); AsyncCaller[] callers = new AsyncCaller[callerCount]; for (int i = 0; i < callerCount; ++i) { callers[i] = new AsyncCaller(client, addr, perCallerCallCount); callers[i].start(); } for (int i = 0; i < callerCount; ++i) { callers[i].join(); callers[i].assertReturnValues(); } } finally { client.stop(); server.stop(); } int expectedCallCount = callerCount * perCallerCallCount; assertEquals(expectedCallCount, callIds.size()); // It is not guaranteed that the server executes requests in sequential // order // of client call ID, so we must sort the call IDs before checking that it // contains every expected value. Collections.sort(callIds); final int startID = callIds.get(0).intValue(); for (int i = 0; i < expectedCallCount; ++i) { assertEquals(startID + i, callIds.get(i).intValue()); } }
From source file:net.sf.ehcache.store.DiskStore.java
/** * Remove all of the elements from the store. * <p/>/*from ww w .j a v a 2 s . c o m*/ * If there are registered <code>CacheEventListener</code>s they are notified of the expiry or removal * of the <code>Element</code> as each is removed. */ public final synchronized void removeAll() { if (persistent == false) { return; } try { checkActive(); // Ditch all the elements, and truncate the file spool = Collections.synchronizedMap(new HashMap()); diskElements = Collections.synchronizedMap(new HashMap()); freeSpace = Collections.synchronizedList(new ArrayList()); totalSize = 0; randomAccessFile.setLength(0); if (persistent) { indexFile.delete(); indexFile.createNewFile(); } } catch (Exception e) { // Clean up LOG.error(name + " Cache: Could not rebuild disk store. Initial cause was " + e.getMessage(), e); dispose(); } }
From source file:jp.co.acroquest.endosnipe.perfdoctor.rule.RuleManager.java
/** * ??<br>//from ww w . j av a2s . c o m * * @param serializedRules * ?? */ @SuppressWarnings("unchecked") public synchronized void rollbackRuleSet(final SerializedRules serializedRules) { byte[] ruleSetConfigMapData = serializedRules.getRuleSetConfigMapData(); byte[] ruleMapData = serializedRules.getRuleMapData(); if (ruleSetConfigMapData == null || ruleSetConfigMapData.length == 0 || ruleMapData == null || ruleMapData.length == 0) { return; } this.ruleSetConfigMap_ = (HashMap<String, RuleSetConfig>) SerializationUtils .deserialize(ruleSetConfigMapData); this.ruleSetMap_ = (HashMap<String, RuleSetDef>) SerializationUtils.deserialize(ruleMapData); this.removeList_ = Collections.synchronizedList(new ArrayList<RuleSetConfig>()); }
From source file:org.springframework.osgi.extender.internal.activator.ContextLoaderListener.java
/** * Shutdown the extender and all bundled managed by it. Shutdown of contexts * is in the topological order of the dependency graph formed by the service * references.//from ww w .ja va2 s. c om */ protected void shutdown() { synchronized (monitor) { // if already closed, bail out if (isClosed) return; else isClosed = true; } log.info("Stopping [" + bundleContext.getBundle().getSymbolicName() + "] bundle v.[" + extenderVersion + "]"); // first stop the watchdog stopTimer(); // remove the bundle listeners (we are closing down) if (contextListener != null) { bundleContext.removeBundleListener(contextListener); contextListener = null; } if (nsListener != null) { bundleContext.removeBundleListener(nsListener); nsListener = null; } // destroy bundles Bundle[] bundles = new Bundle[managedContexts.size()]; int i = 0; for (Iterator it = managedContexts.values().iterator(); it.hasNext();) { ConfigurableOsgiBundleApplicationContext context = (ConfigurableOsgiBundleApplicationContext) it.next(); bundles[i++] = context.getBundle(); } bundles = shutdownDependencySorter.computeServiceDependencyGraph(bundles); boolean debug = log.isDebugEnabled(); StringBuffer buffer = new StringBuffer(); if (debug) { buffer.append("Shutdown order is: {"); for (i = 0; i < bundles.length; i++) { buffer.append("\nBundle [" + bundles[i].getSymbolicName() + "]"); ServiceReference[] services = bundles[i].getServicesInUse(); HashSet usedBundles = new HashSet(); if (services != null) { for (int j = 0; j < services.length; j++) { if (BundleDependencyComparator.isSpringManagedService(services[j])) { Bundle used = services[j].getBundle(); if (!used.equals(bundleContext.getBundle()) && !usedBundles.contains(used)) { usedBundles.add(used); buffer.append("\n Using [" + used.getSymbolicName() + "]"); } } } } } buffer.append("\n}"); log.debug(buffer); } final List taskList = new ArrayList(managedContexts.size()); final List closedContexts = Collections.synchronizedList(new ArrayList()); final Object[] contextClosingDown = new Object[1]; for (i = 0; i < bundles.length; i++) { Long id = new Long(bundles[i].getBundleId()); final ConfigurableOsgiBundleApplicationContext context = (ConfigurableOsgiBundleApplicationContext) managedContexts .get(id); if (context != null) { closedContexts.add(context); // add a new runnable taskList.add(new Runnable() { private final String toString = "Closing runnable for context " + context.getDisplayName(); public void run() { contextClosingDown[0] = context; // eliminate context closedContexts.remove(context); if (log.isDebugEnabled()) log.debug("Closing appCtx " + context.getDisplayName()); context.close(); } public String toString() { return toString; } }); } } // tasks final Runnable[] tasks = (Runnable[]) taskList.toArray(new Runnable[taskList.size()]); // start the ripper >:) for (int j = 0; j < tasks.length; j++) { if (RunnableTimedExecution.execute(tasks[j], extenderConfiguration.getShutdownWaitTime(), shutdownTaskExecutor)) { if (debug) { log.debug(contextClosingDown[0] + " context did not close successfully; forcing shutdown..."); } } } this.managedContexts.clear(); // clear the namespace registry nsManager.destroy(); // release listeners if (applicationListeners != null) { applicationListeners = null; try { applicationListenersCleaner.destroy(); } catch (Exception ex) { log.warn("exception thrown while releasing OSGi event listeners", ex); } } // release multicaster if (multicaster != null) { multicaster.removeAllListeners(); multicaster = null; } // before bailing out; wait for the threads that might be left by // the task executor stopTaskExecutor(); extenderConfiguration.destroy(); }
From source file:com.smartitengineering.cms.spi.impl.content.GroovyGeneratorTest.java
@Test public void testMultiGroovyRepGeneration() throws IOException { TypeRepresentationGenerator generator = new GroovyRepresentationGenerator(); final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class); WorkspaceAPIImpl impl = new WorkspaceAPIImpl() { @Override//from w w w.j a va 2s . c o m public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) { return template; } }; impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.GROOVY, generator)); final RepresentationProvider provider = new RepresentationProviderImpl(); final WorkspaceAPI api = impl; registerBeanFactory(api); final Content content = mockery.mock(Content.class); final Field field = mockery.mock(Field.class); final FieldValue value = mockery.mock(FieldValue.class); final ContentType type = mockery.mock(ContentType.class); final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap"); final RepresentationDef def = mockery.mock(RepresentationDef.class); final int threadCount = new Random().nextInt(100); logger.info("Number of parallel threads " + threadCount); mockery.checking(new Expectations() { { exactly(threadCount).of(template).getTemplateType(); will(returnValue(TemplateType.GROOVY)); exactly(threadCount).of(template).getTemplate(); final byte[] toByteArray = IOUtils.toByteArray(getClass().getClassLoader() .getResourceAsStream("scripts/groovy/GroovyTestRepresentationGenerator.groovy")); will(returnValue(toByteArray)); exactly(threadCount).of(template).getName(); will(returnValue(REP_NAME)); for (int i = 0; i < threadCount; ++i) { exactly(1).of(value).getValue(); will(returnValue(String.valueOf(i))); } exactly(threadCount).of(field).getValue(); will(returnValue(value)); exactly(threadCount).of(content).getField(with(Expectations.<String>anything())); will(returnValue(field)); exactly(threadCount).of(content).getContentDefinition(); will(returnValue(type)); final ContentId contentId = mockery.mock(ContentId.class); exactly(2 * threadCount).of(content).getContentId(); will(returnValue(contentId)); final WorkspaceId wId = mockery.mock(WorkspaceId.class); exactly(threadCount).of(contentId).getWorkspaceId(); will(returnValue(wId)); exactly(2 * threadCount).of(type).getRepresentationDefs(); will(returnValue(reps)); exactly(2 * threadCount).of(reps).get(with(REP_NAME)); will(returnValue(def)); exactly(threadCount).of(def).getParameters(); will(returnValue(Collections.emptyMap())); exactly(threadCount).of(def).getMIMEType(); will(returnValue(GroovyGeneratorTest.MIME_TYPE)); final ResourceUri rUri = mockery.mock(ResourceUri.class); exactly(threadCount).of(def).getResourceUri(); will(returnValue(rUri)); exactly(threadCount).of(rUri).getValue(); will(returnValue("iUri")); } }); final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount)); final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount)); final AtomicInteger integer = new AtomicInteger(0); Threads group = new Threads(); for (int i = 0; i < threadCount; ++i) { group.addThread(new Thread(new Runnable() { public void run() { Representation representation = provider.getRepresentation(REP_NAME, type, content); Assert.assertNotNull(representation); Assert.assertEquals(REP_NAME, representation.getName()); final String rep = StringUtils.newStringUtf8(representation.getRepresentation()); list.add(rep); set.add(rep); Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType()); integer.addAndGet(1); } })); } group.start(); try { group.join(); } catch (Exception ex) { logger.error(ex.getMessage(), ex); } logger.info("Generated reps list: " + list); logger.info("Generated reps set: " + set); Assert.assertEquals(threadCount, integer.get()); Assert.assertEquals(threadCount, list.size()); Assert.assertEquals(threadCount, set.size()); }
From source file:org.codice.ddf.catalog.ui.util.EndpointUtil.java
public CqlQueryResponse executeCqlQuery(CqlRequest cqlRequest) throws UnsupportedQueryException, SourceUnavailableException, FederationException { QueryRequest request = cqlRequest.createQueryRequest(catalogFramework.getId(), filterBuilder); Stopwatch stopwatch = Stopwatch.createStarted(); List<QueryResponse> responses = Collections.synchronizedList(new ArrayList<>()); List<Result> results; if (cqlRequest.getCount() == 0) { results = retrieveHitCount(request, responses); } else {/*from w w w .j a v a 2s . c o m*/ results = retrieveResults(cqlRequest, request, responses); } QueryResponse response = new QueryResponseImpl(request, results, true, responses.stream().filter(Objects::nonNull).map(QueryResponse::getHits).findFirst().orElse(-1L), responses.stream().filter(Objects::nonNull).map(QueryResponse::getProperties).findFirst() .orElse(Collections.emptyMap())); stopwatch.stop(); return new CqlQueryResponse(cqlRequest.getId(), request, response, cqlRequest.getSourceResponseString(), stopwatch.elapsed(TimeUnit.MILLISECONDS), cqlRequest.isNormalize(), filterAdapter, actionRegistry, descriptors); }