List of usage examples for java.lang Thread interrupted
public static boolean interrupted()
From source file:org.archive.modules.recrawl.wbm.WbmPersistLoadProcessor.java
protected InputStream getCDX(String qurl) throws InterruptedException, IOException { final String url = buildURL(qurl); HttpGet m = new HttpGet(url); m.setConfig(RequestConfig.custom().setConnectTimeout(connectionTimeout).setSocketTimeout(socketTimeout) .build());/*from w w w . ja v a2 s. com*/ HttpEntity entity = null; int attempts = 0; do { if (Thread.interrupted()) throw new InterruptedException("interrupted while GET " + url); if (attempts > 0) { Thread.sleep(5000); } try { long t0 = System.currentTimeMillis(); HttpResponse resp = getHttpClient().execute(m); cumulativeFetchTime.addAndGet(System.currentTimeMillis() - t0); StatusLine sl = resp.getStatusLine(); if (sl.getStatusCode() != 200) { log.error("GET " + url + " failed with status=" + sl.getStatusCode() + " " + sl.getReasonPhrase()); entity = resp.getEntity(); entity.getContent().close(); entity = null; continue; } entity = resp.getEntity(); } catch (IOException ex) { log.error("GEt " + url + " failed with error " + ex.getMessage()); } catch (Exception ex) { log.error("GET " + url + " failed with error ", ex); } } while (entity == null && ++attempts < 3); if (entity == null) { throw new IOException("giving up on GET " + url + " after " + attempts + " attempts"); } return entity.getContent(); }
From source file:org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler.java
@Override protected void serviceStart() throws Exception { if (timelineClient != null) { timelineClient.start();//from w w w .j a v a 2 s . c o m } eventHandlingThread = new Thread(new Runnable() { @Override public void run() { JobHistoryEvent event = null; while (!stopped && !Thread.currentThread().isInterrupted()) { // Log the size of the history-event-queue every so often. if (eventCounter != 0 && eventCounter % 1000 == 0) { eventCounter = 0; LOG.info("Size of the JobHistory event queue is " + eventQueue.size()); } else { eventCounter++; } try { event = eventQueue.take(); } catch (InterruptedException e) { LOG.info("EventQueue take interrupted. Returning"); return; } // If an event has been removed from the queue. Handle it. // The rest of the queue is handled via stop() // Clear the interrupt status if it's set before calling handleEvent // and set it if it was set before calling handleEvent. // Interrupts received from other threads during handleEvent cannot be // dealth with - Shell.runCommand() ignores them. synchronized (lock) { boolean isInterrupted = Thread.interrupted(); handleEvent(event); if (isInterrupted) { LOG.debug("Event handling interrupted"); Thread.currentThread().interrupt(); } } } } }, "eventHandlingThread"); eventHandlingThread.start(); super.serviceStart(); }
From source file:org.apache.hadoop.hive.druid.DruidStorageHandler.java
public void publishSegments(Table table, boolean overwrite) throws MetaException { if (MetaStoreUtils.isExternalTable(table)) { return;// w w w . ja v a 2s. c o m } Lifecycle lifecycle = new Lifecycle(); LOG.info("Committing table {} to the druid metastore", table.getDbName()); final Path tableDir = getSegmentDescriptorDir(); try { List<DataSegment> segmentList = DruidStorageHandlerUtils.getPublishedSegments(tableDir, getConf()); LOG.info("Found {} segments under path {}", segmentList.size(), tableDir); final String dataSourceName = table.getParameters().get(Constants.DRUID_DATA_SOURCE); final String segmentDirectory = table.getParameters().get(Constants.DRUID_SEGMENT_DIRECTORY) != null ? table.getParameters().get(Constants.DRUID_SEGMENT_DIRECTORY) : HiveConf.getVar(getConf(), HiveConf.ConfVars.DRUID_SEGMENT_DIRECTORY); DruidStorageHandlerUtils.publishSegments(connector, druidMetadataStorageTablesConfig, dataSourceName, segmentList, overwrite, segmentDirectory, getConf() ); final String coordinatorAddress = HiveConf.getVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_COORDINATOR_DEFAULT_ADDRESS); int maxTries = HiveConf.getIntVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_MAX_TRIES); LOG.info("checking load status from coordinator {}", coordinatorAddress); String coordinatorResponse = null; try { coordinatorResponse = RetryUtils.retry(new Callable<String>() { @Override public String call() throws Exception { return DruidStorageHandlerUtils.getURL(getHttpClient(), new URL(String.format("http://%s/status", coordinatorAddress))); } }, new Predicate<Throwable>() { @Override public boolean apply(@Nullable Throwable input) { return input instanceof IOException; } }, maxTries); } catch (Exception e) { console.printInfo("Will skip waiting for data loading"); return; } if (Strings.isNullOrEmpty(coordinatorResponse)) { console.printInfo("Will skip waiting for data loading"); return; } console.printInfo(String.format("Waiting for the loading of [%s] segments", segmentList.size())); long passiveWaitTimeMs = HiveConf.getLongVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_PASSIVE_WAIT_TIME); ImmutableSet<URL> setOfUrls = FluentIterable.from(segmentList) .transform(new Function<DataSegment, URL>() { @Override public URL apply(DataSegment dataSegment) { try { //Need to make sure that we are using UTC since most of the druid cluster use UTC by default return new URL( String.format("http://%s/druid/coordinator/v1/datasources/%s/segments/%s", coordinatorAddress, dataSourceName, DataSegment.makeDataSegmentIdentifier(dataSegment.getDataSource(), new DateTime(dataSegment.getInterval().getStartMillis(), DateTimeZone.UTC), new DateTime(dataSegment.getInterval().getEndMillis(), DateTimeZone.UTC), dataSegment.getVersion(), dataSegment.getShardSpec()))); } catch (MalformedURLException e) { Throwables.propagate(e); } return null; } }).toSet(); int numRetries = 0; while (numRetries++ < maxTries && !setOfUrls.isEmpty()) { setOfUrls = ImmutableSet.copyOf(Sets.filter(setOfUrls, new Predicate<URL>() { @Override public boolean apply(URL input) { try { String result = DruidStorageHandlerUtils.getURL(getHttpClient(), input); LOG.debug("Checking segment {} response is {}", input, result); return Strings.isNullOrEmpty(result); } catch (IOException e) { LOG.error(String.format("Error while checking URL [%s]", input), e); return true; } } })); try { if (!setOfUrls.isEmpty()) { Thread.sleep(passiveWaitTimeMs); } } catch (InterruptedException e) { Thread.interrupted(); Throwables.propagate(e); } } if (!setOfUrls.isEmpty()) { // We are not Throwing an exception since it might be a transient issue that is blocking loading console.printError( String.format("Wait time exhausted and we have [%s] out of [%s] segments not loaded yet", setOfUrls.size(), segmentList.size())); } } catch (IOException e) { LOG.error("Exception while commit", e); Throwables.propagate(e); } finally { cleanWorkingDir(); lifecycle.stop(); } }
From source file:net.sf.jasperreports.engine.export.HtmlExporter.java
protected void exportReportToWriter() throws JRException, IOException { HtmlExporterConfiguration configuration = getCurrentConfiguration(); String htmlHeader = configuration.getHtmlHeader(); String betweenPagesHtml = configuration.getBetweenPagesHtml(); String htmlFooter = configuration.getHtmlFooter(); boolean flushOutput = configuration.isFlushOutput();//FIXMEEXPORT maybe move flush flag to output if (htmlHeader == null) { writer.write(/*from w ww . j a v a2s . c o m*/ "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"); writer.write("<html>\n"); writer.write("<head>\n"); writer.write(" <title></title>\n"); writer.write(" <meta http-equiv=\"Content-Type\" content=\"text/html; charset="); writer.write(JRStringUtil.encodeXmlAttribute(getExporterOutput().getEncoding())); writer.write("\"/>\n"); writer.write(" <style type=\"text/css\">\n"); writer.write(" a {text-decoration: none}\n"); writer.write(" </style>\n"); writer.write("</head>\n"); writer.write("<body text=\"#000000\" link=\"#000000\" alink=\"#000000\" vlink=\"#000000\">\n"); writer.write("<table width=\"100%\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n"); writer.write("<tr><td width=\"50%\"> </td><td align=\"center\">\n"); writer.write("\n"); } else { writer.write(htmlHeader); } List<ExporterInputItem> items = exporterInput.getItems(); for (reportIndex = 0; reportIndex < items.size(); reportIndex++) { ExporterInputItem item = items.get(reportIndex); setCurrentExporterInputItem(item); List<JRPrintPage> pages = jasperPrint.getPages(); if (pages != null && pages.size() > 0) { PageRange pageRange = getPageRange(); int startPageIndex = (pageRange == null || pageRange.getStartPageIndex() == null) ? 0 : pageRange.getStartPageIndex(); int endPageIndex = (pageRange == null || pageRange.getEndPageIndex() == null) ? (pages.size() - 1) : pageRange.getEndPageIndex(); JRPrintPage page = null; for (pageIndex = startPageIndex; pageIndex <= endPageIndex; pageIndex++) { if (Thread.interrupted()) { throw new ExportInterruptedException(); } page = pages.get(pageIndex); writer.write("<a name=\"" + JR_PAGE_ANCHOR_PREFIX + reportIndex + "_" + (pageIndex + 1) + "\"></a>\n"); /* */ exportPage(page); if (reportIndex < items.size() - 1 || pageIndex < endPageIndex) { if (betweenPagesHtml == null) { writer.write("<br/>\n<br/>\n"); } else { writer.write(betweenPagesHtml); } } writer.write("\n"); } } } ReportContext reportContext = getReportContext(); if (fontsToProcess != null && fontsToProcess.size() > 0)// when no resourceHandler, fonts are not processed { if (reportContext == null) { @SuppressWarnings("deprecation") HtmlResourceHandler resourceHandler = getExporterOutput().getResourceHandler() == null ? getResourceHandler() : getExporterOutput().getResourceHandler(); for (HtmlFontFamily htmlFontFamily : fontsToProcess.values()) { writer.write("<link class=\"jrWebFont\" rel=\"stylesheet\" href=\"" + JRStringUtil .encodeXmlAttribute(resourceHandler.getResourcePath(htmlFontFamily.getId())) + "\">\n"); } // generate script tag on static export only writer.write("<!--[if IE]>\n"); writer.write("<script>\n"); writer.write("var links = document.querySelectorAll('link.jrWebFont');\n"); writer.write( "setTimeout(function(){ if (links) { for (var i = 0; i < links.length; i++) { links.item(i).href = links.item(i).href; } } }, 0);\n"); writer.write("</script>\n"); writer.write("<![endif]-->\n"); } else { reportContext.setParameterValue(JsonExporter.REPORT_CONTEXT_PARAMETER_WEB_FONTS, fontsToProcess); } } // place hyperlinksData on reportContext if (hyperlinksData.size() > 0) { //for sure reportContext is not null, because otherwise there would be no item in the hyperilnkData reportContext.setParameterValue("net.sf.jasperreports.html.hyperlinks", hyperlinksData); } if (htmlFooter == null) { writer.write("</td><td width=\"50%\"> </td></tr>\n"); writer.write("</table>\n"); writer.write("</body>\n"); writer.write("</html>\n"); } else { writer.write(htmlFooter); } if (flushOutput) { writer.flush(); } }
From source file:fr.msch.wissl.server.Library.java
private Library() { this.songs = new ConcurrentLinkedQueue<Song>(); this.toRead = new ConcurrentHashMap<String, File>(); this.files = new ConcurrentLinkedQueue<File>(); this.toInsert = new ConcurrentLinkedQueue<Song>(); this.hashes = new HashSet<String>(); this.artworks = new HashMap<String, Map<String, String>>(); this.artworkFallback = new FileFilter() { @Override/*from w w w . ja va2s. co m*/ public boolean accept(File pathname) { return Pattern.matches(".*[.](jpeg|jpg|png|bmp|gif)$", pathname.getName().toLowerCase()); } }; Runnable timer = new Runnable() { @Override public void run() { while (!kill) { final long t1 = System.currentTimeMillis(); final List<File> music = new ArrayList<File>(); for (String path : Config.getMusicPath()) { music.add(new File(path)); } addSongCount = 0; skipSongCount = 0; failedSongCount = 0; fileSearchTime = 0; dbCheckTime = 0; fileReadTime = 0; dbInsertTime = 0; resizeTime = 0; songs.clear(); toRead.clear(); files.clear(); hashes.clear(); toInsert.clear(); artworks.clear(); songsTodo = 0; songsDone = 0; working = true; stop = false; percentDone = 0.0f; secondsLeft = -1; artworkRegex = Pattern.compile(Config.getArtworkRegex()); artworkFilter = new FileFilter() { @Override public boolean accept(File pathname) { return (artworkRegex.matcher(pathname.getName().toLowerCase()).matches()); } }; // walks filesystem and indexes files that look like music fileSearchDone = false; Thread fileSearch = new Thread(new Runnable() { public void run() { long f1 = System.currentTimeMillis(); for (File f : music) { try { listFiles(f, files); } catch (IOException e) { Logger.error("Failed to add directory to library: " + f.getAbsolutePath(), e); } catch (InterruptedException e) { return; } } fileSearchDone = true; fileSearchTime = (System.currentTimeMillis() - f1); } }); fileSearch.start(); // exclude files that are already in DB dbCheckDone = false; Thread dbCheck = new Thread(new Runnable() { public void run() { while (!stop && !dbCheckDone) { long f1 = System.currentTimeMillis(); while (!files.isEmpty()) { File f = files.remove(); String hash = new String(md5.digest(f.getAbsolutePath().getBytes())); boolean hasSong = false; try { hasSong = DB.get().hasSong(hash); } catch (SQLException e) { Logger.error("Failed to query DB for file " + f.getAbsolutePath(), e); } if (!hasSong) { toRead.put(hash, f); } else { skipSongCount++; } hashes.add(hash); } dbCheckTime += (System.currentTimeMillis() - f1); if (fileSearchDone && files.isEmpty()) { dbCheckDone = true; return; } } } }); dbCheck.start(); // read file metadata fileReadDone = false; Thread fileRead = new Thread(new Runnable() { public void run() { while (!stop && !fileReadDone) { long f1 = System.currentTimeMillis(); Iterator<Entry<String, File>> it = toRead.entrySet().iterator(); while (it.hasNext()) { Entry<String, File> f = it.next(); it.remove(); try { Song s = getSong(f.getValue(), f.getKey()); songs.add(s); addSongCount++; } catch (IOException e) { Logger.warn("Failed to read music file " + f.getValue(), e); failedSongCount++; } } fileReadTime += (System.currentTimeMillis() - f1); if (dbCheckDone && toRead.isEmpty()) { fileReadDone = true; return; } } } }); fileRead.start(); // resize images resizeDone = false; Thread resize = new Thread(new Runnable() { public void run() { while (!stop && !resizeDone) { long f1 = System.currentTimeMillis(); while (!songs.isEmpty()) { Song s = songs.remove(); String path = null; Map<String, String> m = artworks.get(s.artist.name); if (m != null && m.containsKey(s.album.name)) { path = m.get(s.album.name); } if (path != null) { if (new File(path + "_SCALED.jpg").exists()) { path = path + "_SCALED.jpg"; } else { try { path = resizeArtwork(path); } catch (IOException e) { Logger.warn("Failed to resize image", e); } } s.album.artwork_path = path; s.album.artwork_id = "" + System.currentTimeMillis(); } toInsert.add(s); } resizeTime += (System.currentTimeMillis() - f1); if (fileReadDone && songs.isEmpty()) { resizeDone = true; return; } } } }); resize.start(); // insert Songs in DB Thread dbInsert = new Thread(new Runnable() { public void run() { while (!stop) { long f1 = System.currentTimeMillis(); while (!toInsert.isEmpty()) { Song s = toInsert.remove(); try { DB.get().addSong(s); } catch (SQLException e) { Logger.warn("Failed to insert in DB " + s.filepath, e); failedSongCount++; } songsDone++; percentDone = songsDone / ((float) songsTodo); float songsPerSec = songsDone / ((System.currentTimeMillis() - t1) / 1000f); secondsLeft = (long) ((songsTodo - songsDone) / songsPerSec); } dbInsertTime += (System.currentTimeMillis() - f1); if (resizeDone && toInsert.isEmpty()) { return; } } } }); dbInsert.start(); try { dbInsert.join(); } catch (InterruptedException e3) { Logger.warn("Library indexer interrupted", e3); fileSearch.interrupt(); dbCheck.interrupt(); fileRead.interrupt(); resize.interrupt(); dbInsert.interrupt(); } if (Thread.interrupted()) { Logger.warn("Library indexer has been interrupted"); continue; } // remove files from DB that were not found int removed = 0; long r1 = System.currentTimeMillis(); try { removed = DB.get().removeSongs(hashes); } catch (SQLException e3) { Logger.error("Failed to remove songs", e3); } long dbRemoveTime = (System.currentTimeMillis() - r1); // update statistics long u1 = System.currentTimeMillis(); try { DB.get().updateSongCount(); } catch (SQLException e1) { Logger.error("Failed to update song count", e1); } long dbUpdateTime = (System.currentTimeMillis() - u1); try { RuntimeStats.get().updateFromDB(); } catch (SQLException e) { Logger.error("Failed to update runtime statistics", e); } working = false; long t2 = (System.currentTimeMillis() - t1); Logger.info("Processed " + songsDone + " files " // + "(add:" + addSongCount + "," // + "skip:" + skipSongCount + "," // + "fail:" + failedSongCount + "," // + "rem:" + removed + ")"); Logger.info("Indexer took " + t2 + " (" + ((float) songsDone / ((float) t2 / 1000)) + " /s) (" // + "search:" + fileSearchTime + "," // + "check:" + dbCheckTime + ","// + "read:" + fileReadTime + "," // + "resize:" + resizeTime + "," // + "insert:" + dbInsertTime + "," // + "remove:" + dbRemoveTime + "," // + "update:" + dbUpdateTime + ")"); int seconds = Config.getMusicRefreshRate(); try { Thread.sleep(seconds * 1000); } catch (InterruptedException e) { Logger.warn("Library indexer interrupted", e); } } } }; this.thread = new Thread(timer, "MusicIndexer"); }
From source file:com.palantir.opensource.sysmon.linux.LinuxIOStatJMXWrapper.java
@Override public void run() { boolean wasInterrupted = Thread.interrupted(); try {/* ww w .ja va2 s. c om*/ do { String line = null; try { if (iostatStdout.ready()) { line = iostatStdout.readLine(); if (DEVICE_ONLY.matcher(line).matches()) { // we have broken lines, put them together String remainder = iostatStdout.readLine(); if (log.isTraceEnabled()) { log.trace("Joining '" + line + "' and '" + remainder + "'."); } line = line + remainder; } } } catch (Exception e) { line = null; if (!shutdown) { log.warn("Caught exception while reading line.", e); } else { log.debug("Exception caused by shutdown", e); } } if (line != null) { try { processLine(line); continue; } catch (LinuxMonitoringException e) { log.error(e, e); } } try { Thread.sleep(1000L); } catch (InterruptedException e) { wasInterrupted = true; } } while (!shutdown); } catch (Exception e) { if (!shutdown) { log.error("Caught unexpected Exception", e); } else { log.debug("Shutdown caused exception", e); } } finally { if (wasInterrupted) { Thread.currentThread().interrupt(); } cleanup(); } }
From source file:org.apache.hadoop.hdfs.server.datanode.DWRRBlockReceiver.java
/** * While writing to mirrorOut, failure to write to mirror should not * affect this datanode unless it is caused by interruption. *//*ww w . j av a2s . c o m*/ private void handleMirrorOutError(IOException ioe) throws IOException { String bpid = block.getBlockPoolId(); LOG.info(datanode.getDNRegistrationForBP(bpid) + ":Exception writing " + block + " to mirror " + mirrorAddr, ioe); if (Thread.interrupted()) { // shut down if the thread is interrupted throw ioe; } else { // encounter an error while writing to mirror // continue to run even if can not write to mirror // notify client of the error // and wait for the client to shut down the pipeline mirrorError = true; } }
From source file:org.graphwalker.ModelBasedTesting.java
public String[] getNextStep() throws InterruptedException { if (threadSuspended) { logger.debug("Execution is now suspended: " + getGraph().getLabelKey()); synchronized (lock) { while (threadSuspended) { wait();/*from w w w.ja va 2s.co m*/ } } logger.debug("Executions is now resumed: " + getGraph().getLabelKey()); } if (Thread.interrupted()) { throw new InterruptedException(); } if (this.machine == null) { getMachine(); } getStatisticsManager(); Util.AbortIf(getGenerator() == null, "No generator has been defined!"); PathGenerator backupGenerator = null; if (runRandomGeneratorOnce) { backupGenerator = getGenerator(); try { setGenerator(Keywords.GENERATOR_RANDOM); } catch (GeneratorException e) { logger.error(e.getMessage()); throw new RuntimeException("ERROR: " + e.getMessage(), e); } } try { return getGenerator().getNext(); } catch (RuntimeException e) { logger.fatal(e.toString()); throw new RuntimeException("ERROR: " + e.getMessage(), e); } finally { if (runRandomGeneratorOnce) { runRandomGeneratorOnce = false; setGenerator(backupGenerator); } } }