List of usage examples for java.util.concurrent Future cancel
boolean cancel(boolean mayInterruptIfRunning);
From source file:org.apache.helix.messaging.handling.HelixTaskExecutor.java
@Override public boolean cancelTask(MessageTask task) { Message message = task.getMessage(); NotificationContext notificationContext = task.getNotificationContext(); String taskId = task.getTaskId(); synchronized (_lock) { if (_taskMap.containsKey(taskId)) { MessageTaskInfo taskInfo = _taskMap.get(taskId); // cancel timeout task if (taskInfo._timerTask != null) { taskInfo._timerTask.cancel(); }//from w ww . java 2 s. c om // cancel task Future<HelixTaskResult> future = taskInfo.getFuture(); removeMessageFromTaskAndFutureMap(message); _statusUpdateUtil.logInfo(message, HelixTaskExecutor.class, "Canceling task: " + taskId, notificationContext.getManager()); // If the thread is still running it will be interrupted if cancel(true) // is called. So state transition callbacks should implement logic to // return if it is interrupted. if (future.cancel(true)) { _statusUpdateUtil.logInfo(message, HelixTaskExecutor.class, "Canceled task: " + taskId, notificationContext.getManager()); _taskMap.remove(taskId); return true; } else { _statusUpdateUtil.logInfo(message, HelixTaskExecutor.class, "fail to cancel task: " + taskId, notificationContext.getManager()); } } else { _statusUpdateUtil.logWarning(message, HelixTaskExecutor.class, "fail to cancel task: " + taskId + ", future not found", notificationContext.getManager()); } } return false; }
From source file:com.splicemachine.compactions.SpliceDefaultCompactor.java
@Override public List<Path> compact(CompactionRequest request, CompactionThroughputController throughputController, User user) throws IOException { if (!allowSpark || store.getRegionInfo().isSystemTable()) return super.compact(request, throughputController, user); if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "compact(): request=%s", request); assert request instanceof SpliceCompactionRequest; smallestReadPoint = store.getSmallestReadPoint(); FileDetails fd = getFileDetails(request.getFiles(), request.isAllFiles()); this.progress = new CompactionProgress(fd.maxKeyCount); List<String> files = new ArrayList<>(); for (StoreFile sf : request.getFiles()) { files.add(sf.getPath().toString()); }// w w w.j a v a2s.c o m String regionLocation = getRegionLocation(store); SConfiguration config = HConfiguration.getConfiguration(); DistributedCompaction jobRequest = new DistributedCompaction(getCompactionFunction(), files, getJobDetails(request), getJobGroup(request, regionLocation), getJobDescription(request), getPoolName(), getScope(request), regionLocation, config.getOlapCompactionMaximumWait()); CompactionResult result = null; Future<CompactionResult> futureResult = EngineDriver.driver().getOlapClient().submit(jobRequest); while (result == null) { try { result = futureResult.get(config.getOlapClientTickTime(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { //we were interrupted processing, so we're shutting down. Nothing to be done, just die gracefully Thread.currentThread().interrupt(); throw new IOException(e); } catch (ExecutionException e) { if (e.getCause() instanceof RejectedExecutionException) { LOG.warn("Spark compaction execution rejected, falling back to RegionServer execution", e.getCause()); return super.compact(request, throughputController, user); } throw Exceptions.rawIOException(e.getCause()); } catch (TimeoutException e) { // check region write status if (!store.areWritesEnabled()) { futureResult.cancel(true); progress.cancel(); // TODO should we cleanup files written by Spark? throw new IOException("Region has been closed, compaction aborted"); } } } List<String> sPaths = result.getPaths(); if (LOG.isTraceEnabled()) SpliceLogUtils.trace(LOG, "Paths Returned: %s", sPaths); this.progress.complete(); ScanType scanType = request.isRetainDeleteMarkers() ? ScanType.COMPACT_RETAIN_DELETES : ScanType.COMPACT_DROP_DELETES; // trigger MemstoreAwareObserver postCreateCoprocScanner(request, scanType, null, user); SpliceCompactionRequest scr = (SpliceCompactionRequest) request; scr.preStorefilesRename(); List<Path> paths = new ArrayList<>(); for (String spath : sPaths) { paths.add(new Path(spath)); } return paths; }
From source file:org.zodiark.service.action.ActionServiceImpl.java
/** * {@inheritDoc}/*from w w w .j a v a 2 s .c om*/ */ public void actionStarted(Envelope e) { try { final PublisherResults results = mapper.readValue(e.getMessage().getData(), PublisherResults.class); eventBus.message(RETRIEVE_PUBLISHER, results.getUuid(), new Reply<PublisherEndpoint, String>() { @Override public void ok(final PublisherEndpoint p) { final AtomicInteger time = new AtomicInteger(p.action().time()); final AtmosphereResource publisher = p.resource(); final AtmosphereResource subscriber = p.action().subscriber().resource(); final Future<?> timerFuture = timer.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (time.get() == 0) return; Message m = new Message(); m.setPath(ACTION_TIMER); try { m.setData(mapper.writeValueAsString(time.getAndDecrement())); Envelope e = Envelope.newPublisherMessage(p.uuid(), m); String w = mapper.writeValueAsString(e); publisher.write(w); e = Envelope.newSubscriberMessage(p.uuid(), m); w = mapper.writeValueAsString(e); subscriber.write(w); } catch (JsonProcessingException e1) { logger.error("", e1); } } }, 1, 1, TimeUnit.SECONDS); timer.schedule(new Runnable() { @Override public void run() { timerFuture.cancel(false); Message m = new Message(); m.setPath(ACTION_COMPLETED); try { m.setData(mapper.writeValueAsString(new PublisherResults("OK"))); Envelope e = Envelope.newPublisherMessage(p.uuid(), m); String w = mapper.writeValueAsString(e); publisher.write(w); m.setData(mapper.writeValueAsString(new SubscriberResults("OK"))); e = Envelope.newSubscriberMessage(p.uuid(), m); w = mapper.writeValueAsString(e); subscriber.write(w); } catch (JsonProcessingException e1) { logger.error("", e1); } finally { eventBus.message(STREAMING_COMPLETE_ACTION, p); } } }, p.action().time(), TimeUnit.SECONDS); } @Override public void fail(ReplyException replyException) { logger.error("Unable to retrieve Publishere for {}", results.getUuid()); } }); } catch (IOException e1) { logger.error("", e1); } }
From source file:com.powers.wsexplorer.gui.WSExplorer.java
/** * Cancel the current sending operation. * @param future// w w w. ja v a 2 s.co m */ public void cancelSoapSend(Future<SoapResponse> future) { future.cancel(true); }
From source file:de.blizzy.documentr.search.PageFinder.java
public SearchResult findPages(final String searchText, final int page, final Authentication authentication) throws ParseException, IOException, TimeoutException { Assert.hasLength(searchText);/* w w w .j a va 2s.c o m*/ Assert.isTrue(page >= 1); Assert.notNull(authentication); IndexSearcher searcher = null; Future<SearchResult> findFuture = null; try { searcher = searcherManager.acquire(); final IndexSearcher indexSearcher = searcher; Callable<SearchResult> findCallable = new Callable<SearchResult>() { @Override public SearchResult call() throws ParseException, IOException, TimeoutException { return findPages(searchText, page, authentication, indexSearcher); } }; findFuture = taskExecutor.submit(findCallable); SearchTextSuggestion suggestion = getSearchTextSuggestion(searchText, authentication, indexSearcher); SearchResult result = findFuture.get(DocumentrConstants.INTERACTIVE_TIMEOUT, TimeUnit.SECONDS); result.setSuggestion(suggestion); return result; } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { Throwable cause = e.getCause(); if (cause instanceof ParseException) { throw (ParseException) cause; } else if (cause instanceof IOException) { throw (IOException) cause; } else if (cause instanceof TimeoutException) { throw (TimeoutException) cause; } else { throw Util.toRuntimeException(cause); } } finally { if (findFuture != null) { findFuture.cancel(false); } if (searcher != null) { searcherManager.release(searcher); } } }
From source file:ee.ria.xroad.proxy.clientproxy.ClientMessageProcessor.java
@Override public void process() throws Exception { log.trace("process()"); updateOpMonitoringClientSecurityServerAddress(); Future<?> soapHandler = SOAP_HANDLER_EXECUTOR.submit(this::handleSoap); try {// w w w . j av a2s .co m // Wait for the request SOAP message to be parsed before we can // start sending stuff. waitForSoapMessage(); // If the handler thread excepted, do not continue. checkError(); // Verify that the client is registered verifyClientStatus(); // Check client authentication mode verifyClientAuthentication(); processRequest(); if (response != null) { sendResponse(); } } catch (Exception e) { if (reqIns != null) { reqIns.close(); } // Let's interrupt the handler thread so that it won't // block forever waiting for us to do something. soapHandler.cancel(true); throw e; } finally { if (response != null) { response.consume(); } } }
From source file:com.hygenics.parser.GetImages.java
private void getImages() { // controls the web process from a removed method log.info("Setting Up Pull"); String[] proxyarr = (proxies == null) ? null : proxies.split(","); // cleanup//from w w w. ja va 2 s . c o m if (cleanup) { cleanupDir(fpath); } // image grab CookieManager cm = new CookieManager(); cm.setCookiePolicy(CookiePolicy.ACCEPT_ALL); CookieHandler.setDefault(cm); int numimages = 0; InputStream is; byte[] bytes; int iter = 0; int found = 0; // set proxy if needed if (proxyuser != null) { proxy(proxyhost, proxyport, https, proxyuser, proxypass); } int i = 0; ArrayList<String> postImages = new ArrayList<String>(); ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors()); Set<Callable<String>> pulls = new HashSet<Callable<String>>(); Set<Callable<ArrayList<String>>> sqls = new HashSet<Callable<ArrayList<String>>>(); List<Future<String>> imageFutures; ArrayList<String> images; int chunksize = (int) Math.ceil(commitsize / numqueries); log.info("Chunksize: " + chunksize); if (baseurl != null || baseurlcolumn != null) { do { log.info("Offset: " + offset); log.info("Getting Images"); images = new ArrayList<String>(commitsize); log.info("Getting Columns"); for (int n = 0; n < numqueries; n++) { String tempsql = sql + " WHERE " + idString + " >= " + offset + " AND " + idString + " < " + (offset + chunksize); if (conditions != null) { tempsql += conditions; } sqls.add(new QueryDatabase( ((extracondition != null) ? tempsql + " " + extracondition : tempsql))); offset += chunksize; } List<Future<ArrayList<String>>> futures = fjp.invokeAll(sqls); int w = 0; while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) { w++; } for (Future<ArrayList<String>> f : futures) { try { ArrayList<String> fjson; fjson = f.get(); if (fjson.size() > 0) { images.addAll(fjson); } if (f.isDone() == false) { f.cancel(true); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } log.info(Integer.toString(images.size()) + " image links found. Pulling."); ArrayList<String> tempproxies = new ArrayList<String>(); if (proxyarr != null) { for (String proxy : proxyarr) { tempproxies.add(proxy.trim()); } } if (maxproxies > 0) { maxproxies -= 1;// 0 and 1 should be equivalent conditions // --num is not like most 0 based still due // to >= } // get images for (int num = 0; num < images.size(); num++) { String icols = images.get(num); int proxnum = (int) Math.random() * (tempproxies.size() - 1); String proxy = (tempproxies.size() == 0) ? null : tempproxies.get(proxnum); // add grab pulls.add(new ImageGrabber(icols, proxy)); if (proxy != null) { tempproxies.remove(proxy); } // check for execution if (num + 1 == images.size() || pulls.size() >= commitsize || tempproxies.size() == 0) { if (tempproxies.size() == 0 && proxies != null) { tempproxies = new ArrayList<String>(proxyarr.length); for (String p : proxyarr) { tempproxies.add(p.trim()); } } imageFutures = fjp.invokeAll(pulls); w = 0; while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) { w++; } for (Future<String> f : imageFutures) { String add; try { add = f.get(); if (add != null) { postImages.add(add); } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } imageFutures = null;// garbage collect elligible pulls = new HashSet<Callable<String>>(commitsize); } if (postImages.size() >= commitsize && addtoDB == true) { if (addtoDB) { log.info("Posting to Database"); log.info("Found " + postImages.size() + " images"); numimages += postImages.size(); int size = (int) Math.floor(postImages.size() / numqueries); for (int n = 0; n < numqueries; n++) { if (((n + 1) * size) < postImages.size() && (n + 1) < numqueries) { fjp.execute(new ImagePost(postImages.subList(n * size, (n + 1) * size))); } else { fjp.execute(new ImagePost(postImages.subList(n * size, postImages.size() - 1))); } } w = 0; while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) { w++; } } found += postImages.size(); postImages.clear(); } } if (postImages.size() > 0 && addtoDB == true) { log.info("Posting to Database"); numimages += postImages.size(); int size = (int) Math.floor(postImages.size() / numqueries); for (int n = 0; n < numqueries; n++) { if (((n + 1) * size) < postImages.size()) { fjp.execute(new ImagePost(postImages.subList(n * size, (n + 1) * size))); } else { fjp.execute(new ImagePost(postImages.subList(n * size, postImages.size()))); } } w = 0; while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) { w++; } found += postImages.size(); postImages.clear(); } // handle iterations specs iter += 1; log.info("Iteration: " + iter); if ((iter < iterations && found < images.size()) || tillfound == true) { log.info("Not All Images Obtained Trying Iteration " + iter + " of " + iterations); offset -= commitsize; } else if ((iter < iterations && found >= images.size()) && tillfound == false) { log.info("Images Obtained in " + iter + " iterations. Continuing."); iter = 0; } else { // precautionary log.info("Images Obtained in " + iter + " iterations. Continuing"); iter = 0; } } while (images.size() > 0 && iter < iterations); if (fjp.isShutdown()) { fjp.shutdownNow(); } } log.info("Complete. Check for Errors \n " + numimages + " Images Found"); }
From source file:de.unisb.cs.st.javalanche.mutation.runtime.testDriver.MutationTestDriver.java
/** * Runs given test in a new thread with specified timeout * (DEFAULT_TIMEOUT_IN_SECONDS) and stores the results in given testResult. * //w w w . j a v a 2 s. c o m * @param r * the test to be run * @return the time needed for executing the test */ protected long runWithTimeoutOld(MutationTestRunnable r) { // ArrayList<Thread> threadsPre = ThreadUtil.getThreads(); ExecutorService service = Executors.newSingleThreadExecutor(); Future<?> future = service.submit(r); StopWatch stopWatch = new StopWatch(); stopWatch.start(); service.shutdown(); String exceptionMessage = null; Throwable capturedThrowable = null; try { logger.debug("Start test: "); boolean terminated = service.awaitTermination(timeout, TimeUnit.SECONDS); logger.debug("First timeout"); long time1 = stopWatch.getTime(); if (!terminated) { service.shutdownNow(); } future.get(1, TimeUnit.SECONDS); logger.debug("Second timeout"); long time2 = stopWatch.getTime(); if (time2 - time1 > 1000) { logger.info("Process got some extra time: " + (time2 - time1) + " " + time2); } future.cancel(true); } catch (InterruptedException e) { capturedThrowable = e; } catch (ExecutionException e) { capturedThrowable = e; } catch (TimeoutException e) { exceptionMessage = "Mutation causes test timeout"; capturedThrowable = e; } catch (Throwable t) { capturedThrowable = t; } finally { if (capturedThrowable != null) { if (exceptionMessage == null) { exceptionMessage = "Exception caught during test execution."; } r.setFailed(exceptionMessage, capturedThrowable); } } if (!future.isDone()) { r.setFailed("Mutated Thread is still running after timeout.", null); switchOfMutation(future); } stopWatch.stop(); if (!r.hasFinished()) { shutDown(r, stopWatch); } logger.debug("End timed test, it took " + stopWatch.getTime() + " ms"); return stopWatch.getTime(); }
From source file:org.languagetool.server.TextChecker.java
void checkText(AnnotatedText aText, HttpExchange httpExchange, Map<String, String> parameters, ErrorRequestLimiter errorRequestLimiter, String remoteAddress) throws Exception { checkParams(parameters);//from w w w . j a va 2s .com long timeStart = System.currentTimeMillis(); UserLimits limits = ServerTools.getUserLimits(parameters, config); // logging information String agent = parameters.get("useragent") != null ? parameters.get("useragent") : "-"; Long agentId = null, userId = null; if (logger.isLogging()) { DatabaseAccess db = DatabaseAccess.getInstance(); agentId = db.getOrCreateClientId(parameters.get("useragent")); userId = limits.getPremiumUid(); } String referrer = httpExchange.getRequestHeaders().getFirst("Referer"); String userAgent = httpExchange.getRequestHeaders().getFirst("User-Agent"); if (aText.getPlainText().length() > limits.getMaxTextLength()) { String msg = "limit: " + limits.getMaxTextLength() + ", size: " + aText.getPlainText().length(); logger.log(new DatabaseAccessLimitLogEntry("MaxCharacterSizeExceeded", logServerId, agentId, userId, msg, referrer, userAgent)); ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.MAX_TEXT_SIZE); throw new TextTooLongException( "Your text exceeds the limit of " + limits.getMaxTextLength() + " characters (it's " + aText.getPlainText().length() + " characters). Please submit a shorter text."); } UserConfig userConfig = new UserConfig( limits.getPremiumUid() != null ? getUserDictWords(limits.getPremiumUid()) : Collections.emptyList(), new HashMap<>(), config.getMaxSpellingSuggestions()); // NOTE: at the moment, feedback for A/B-Tests is only delivered from this client, so only run tests there if (agent != null && agent.equals("ltorg")) { userConfig.setAbTest(config.getAbTest()); } //print("Check start: " + text.length() + " chars, " + langParam); boolean autoDetectLanguage = getLanguageAutoDetect(parameters); List<String> preferredVariants = getPreferredVariants(parameters); if (parameters.get("noopLanguages") != null && !autoDetectLanguage) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException( "You can specify 'noopLanguages' only when also using 'language=auto'"); } List<String> noopLangs = parameters.get("noopLanguages") != null ? Arrays.asList(parameters.get("noopLanguages").split(",")) : Collections.emptyList(); List<String> preferredLangs = parameters.get("preferredLanguages") != null ? Arrays.asList(parameters.get("preferredLanguages").split(",")) : Collections.emptyList(); DetectedLanguage detLang = getLanguage(aText.getPlainText(), parameters, preferredVariants, noopLangs, preferredLangs); Language lang = detLang.getGivenLanguage(); Integer count = languageCheckCounts.get(lang.getShortCodeWithCountryAndVariant()); if (count == null) { count = 1; } else { count++; } //print("Starting check: " + aText.getPlainText().length() + " chars, #" + count); String motherTongueParam = parameters.get("motherTongue"); Language motherTongue = motherTongueParam != null ? Languages.getLanguageForShortCode(motherTongueParam) : null; boolean useEnabledOnly = "yes".equals(parameters.get("enabledOnly")) || "true".equals(parameters.get("enabledOnly")); List<Language> altLanguages = new ArrayList<>(); if (parameters.get("altLanguages") != null) { String[] altLangParams = parameters.get("altLanguages").split(",\\s*"); for (String langCode : altLangParams) { Language altLang = Languages.getLanguageForShortCode(langCode); altLanguages.add(altLang); if (altLang.hasVariant() && !altLang.isVariant()) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException("You specified altLanguage '" + langCode + "', but for this language you need to specify a variant, e.g. 'en-GB' instead of just 'en'"); } } } List<String> enabledRules = getEnabledRuleIds(parameters); List<String> disabledRules = getDisabledRuleIds(parameters); List<CategoryId> enabledCategories = getCategoryIds("enabledCategories", parameters); List<CategoryId> disabledCategories = getCategoryIds("disabledCategories", parameters); if ((disabledRules.size() > 0 || disabledCategories.size() > 0) && useEnabledOnly) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException( "You cannot specify disabled rules or categories using enabledOnly=true"); } if (enabledRules.isEmpty() && enabledCategories.isEmpty() && useEnabledOnly) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST); throw new IllegalArgumentException( "You must specify enabled rules or categories when using enabledOnly=true"); } boolean useQuerySettings = enabledRules.size() > 0 || disabledRules.size() > 0 || enabledCategories.size() > 0 || disabledCategories.size() > 0; boolean allowIncompleteResults = "true".equals(parameters.get("allowIncompleteResults")); boolean enableHiddenRules = "true".equals(parameters.get("enableHiddenRules")); JLanguageTool.Mode mode = ServerTools.getMode(parameters); String callback = parameters.get("callback"); QueryParams params = new QueryParams(altLanguages, enabledRules, disabledRules, enabledCategories, disabledCategories, useEnabledOnly, useQuerySettings, allowIncompleteResults, enableHiddenRules, mode, callback); Long textSessionId = null; try { if (parameters.containsKey("textSessionId")) { String textSessionIdStr = parameters.get("textSessionId"); if (textSessionIdStr.contains(":")) { // transitioning to new format used in chrome addon // format: "{random number in 0..99999}:{unix time}" long random, timestamp; int sepPos = textSessionIdStr.indexOf(':'); random = Long.valueOf(textSessionIdStr.substring(0, sepPos)); timestamp = Long.valueOf(textSessionIdStr.substring(sepPos + 1)); // use random number to choose a slice in possible range of values // then choose position in slice by timestamp long maxRandom = 100000; long randomSegmentSize = (Long.MAX_VALUE - maxRandom) / maxRandom; long segmentOffset = random * randomSegmentSize; if (timestamp > randomSegmentSize) { print(String.format("Could not transform textSessionId '%s'", textSessionIdStr)); } textSessionId = segmentOffset + timestamp; } else { textSessionId = Long.valueOf(textSessionIdStr); } userConfig.setTextSessionId(textSessionId); } } catch (NumberFormatException ex) { print("Could not parse textSessionId '" + parameters.get("textSessionId") + "' as long: " + ex.getMessage()); } int textSize = aText.getPlainText().length(); List<RuleMatch> ruleMatchesSoFar = Collections.synchronizedList(new ArrayList<>()); Future<List<RuleMatch>> future = executorService.submit(new Callable<List<RuleMatch>>() { @Override public List<RuleMatch> call() throws Exception { // use to fake OOM in thread for testing: /*if (Math.random() < 0.1) { throw new OutOfMemoryError(); }*/ return getRuleMatches(aText, lang, motherTongue, parameters, params, userConfig, f -> ruleMatchesSoFar.add(f)); } }); String incompleteResultReason = null; List<RuleMatch> matches; try { if (limits.getMaxCheckTimeMillis() < 0) { matches = future.get(); } else { matches = future.get(limits.getMaxCheckTimeMillis(), TimeUnit.MILLISECONDS); } } catch (ExecutionException e) { future.cancel(true); if (ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.TOO_MANY_ERRORS); logger.log(new DatabaseCheckErrorLogEntry("ErrorRateTooHigh", logServerId, agentId, userId, lang, detLang.getDetectedLanguage(), textSize, "matches: " + ruleMatchesSoFar.size())); } if (params.allowIncompleteResults && ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) { print(e.getMessage() + " - returning " + ruleMatchesSoFar.size() + " matches found so far. Detected language: " + detLang); matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy incompleteResultReason = "Results are incomplete: " + ExceptionUtils.getRootCause(e).getMessage(); } else if (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) { throw (OutOfMemoryError) e.getCause(); } else { throw new RuntimeException(e.getMessage() + ", detected: " + detLang, e); } } catch (TimeoutException e) { boolean cancelled = future.cancel(true); Path loadFile = Paths.get("/proc/loadavg"); // works in Linux only(?) String loadInfo = loadFile.toFile().exists() ? Files.readAllLines(loadFile).toString() : "(unknown)"; if (errorRequestLimiter != null) { errorRequestLimiter.logAccess(remoteAddress, httpExchange.getRequestHeaders(), parameters); } String message = "Text checking took longer than allowed maximum of " + limits.getMaxCheckTimeMillis() + " milliseconds (cancelled: " + cancelled + ", lang: " + lang.getShortCodeWithCountryAndVariant() + ", detected: " + detLang + ", #" + count + ", " + aText.getPlainText().length() + " characters of text" + ", mode: " + mode.toString().toLowerCase() + ", h: " + reqCounter.getHandleCount() + ", r: " + reqCounter.getRequestCount() + ", system load: " + loadInfo + ")"; if (params.allowIncompleteResults) { print(message + " - returning " + ruleMatchesSoFar.size() + " matches found so far"); matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy incompleteResultReason = "Results are incomplete: text checking took longer than allowed maximum of " + String.format(Locale.ENGLISH, "%.2f", limits.getMaxCheckTimeMillis() / 1000.0) + " seconds"; } else { ServerMetricsCollector.getInstance() .logRequestError(ServerMetricsCollector.RequestErrorType.MAX_CHECK_TIME); logger.log(new DatabaseCheckErrorLogEntry("MaxCheckTimeExceeded", logServerId, agentId, limits.getPremiumUid(), lang, detLang.getDetectedLanguage(), textSize, "load: " + loadInfo)); throw new RuntimeException(message, e); } } setHeaders(httpExchange); List<RuleMatch> hiddenMatches = new ArrayList<>(); if (config.getHiddenMatchesServer() != null && params.enableHiddenRules && config.getHiddenMatchesLanguages().contains(lang)) { if (config.getHiddenMatchesServerFailTimeout() > 0 && lastHiddenMatchesServerTimeout != -1 && System.currentTimeMillis() - lastHiddenMatchesServerTimeout < config .getHiddenMatchesServerFailTimeout()) { ServerMetricsCollector.getInstance().logHiddenServerStatus(false); print("Warn: Skipped querying hidden matches server at " + config.getHiddenMatchesServer() + " because of recent error/timeout (timeout=" + config.getHiddenMatchesServerFailTimeout() + "ms)."); } else { ResultExtender resultExtender = new ResultExtender(config.getHiddenMatchesServer(), config.getHiddenMatchesServerTimeout()); try { long start = System.currentTimeMillis(); List<RemoteRuleMatch> extensionMatches = resultExtender .getExtensionMatches(aText.getPlainText(), parameters); hiddenMatches = resultExtender.getFilteredExtensionMatches(matches, extensionMatches); long end = System.currentTimeMillis(); print("Hidden matches: " + extensionMatches.size() + " -> " + hiddenMatches.size() + " in " + (end - start) + "ms for " + lang.getShortCodeWithCountryAndVariant()); ServerMetricsCollector.getInstance().logHiddenServerStatus(true); lastHiddenMatchesServerTimeout = -1; } catch (Exception e) { ServerMetricsCollector.getInstance().logHiddenServerStatus(false); print("Warn: Failed to query hidden matches server at " + config.getHiddenMatchesServer() + ": " + e.getClass() + ": " + e.getMessage()); lastHiddenMatchesServerTimeout = System.currentTimeMillis(); } } } int compactMode = Integer.parseInt(parameters.getOrDefault("c", "0")); String response = getResponse(aText, detLang, motherTongue, matches, hiddenMatches, incompleteResultReason, compactMode); if (params.callback != null) { // JSONP - still needed today for the special case of hosting your own on-premise LT without SSL // and using it from a local MS Word (not Online Word) - issue #89 in the add-in repo: response = params.callback + "(" + response + ");"; } String messageSent = "sent"; String languageMessage = lang.getShortCodeWithCountryAndVariant(); try { httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.getBytes(ENCODING).length); httpExchange.getResponseBody().write(response.getBytes(ENCODING)); ServerMetricsCollector.getInstance().logResponse(HttpURLConnection.HTTP_OK); } catch (IOException exception) { // the client is disconnected messageSent = "notSent: " + exception.getMessage(); } if (motherTongue != null) { languageMessage += " (mother tongue: " + motherTongue.getShortCodeWithCountryAndVariant() + ")"; } if (autoDetectLanguage) { languageMessage += "[auto]"; } languageCheckCounts.put(lang.getShortCodeWithCountryAndVariant(), count); int computationTime = (int) (System.currentTimeMillis() - timeStart); String version = parameters.get("v") != null ? ", v:" + parameters.get("v") : ""; print("Check done: " + aText.getPlainText().length() + " chars, " + languageMessage + ", #" + count + ", " + referrer + ", " + matches.size() + " matches, " + computationTime + "ms, agent:" + agent + version + ", " + messageSent + ", q:" + (workQueue != null ? workQueue.size() : "?") + ", h:" + reqCounter.getHandleCount() + ", dH:" + reqCounter.getDistinctIps() + ", m:" + mode.toString().toLowerCase()); int matchCount = matches.size(); Map<String, Integer> ruleMatchCount = new HashMap<>(); for (RuleMatch match : matches) { String ruleId = match.getRule().getId(); ruleMatchCount.put(ruleId, ruleMatchCount.getOrDefault(ruleId, 0) + 1); } ServerMetricsCollector.getInstance().logCheck(lang, computationTime, textSize, matchCount, mode, agent, ruleMatchCount); if (!config.isSkipLoggingChecks()) { DatabaseCheckLogEntry logEntry = new DatabaseCheckLogEntry(userId, agentId, logServerId, textSize, matchCount, lang, detLang.getDetectedLanguage(), computationTime, textSessionId, mode.toString()); logEntry.setRuleMatches(new DatabaseRuleMatchLogEntry( config.isSkipLoggingRuleMatches() ? Collections.emptyMap() : ruleMatchCount)); logger.log(logEntry); } }