List of usage examples for java.lang InterruptedException InterruptedException
public InterruptedException()
InterruptedException
with no detail message. From source file:org.sonatype.nexus.testsuite.obr.ObrITSupport.java
private void waitFor(final InputStream input, final String expectedLine) throws Exception { final long startMillis = System.currentTimeMillis(); final StringBuilder content = new StringBuilder(); do {/*from ww w . java 2 s. co m*/ final int available = input.available(); if (available > 0) { final byte[] bytes = new byte[available]; input.read(bytes); final String current = new String(bytes); System.out.print(current); content.append(current); Thread.yield(); } else if (System.currentTimeMillis() - startMillis > 5 * 60 * 1000) { throw new InterruptedException(); // waited for more than 5 minutes } else { try { Thread.sleep(100); } catch (final InterruptedException e) { // continue... } } } while (content.indexOf(expectedLine) == -1); System.out.println(); }
From source file:org.jenkinsci.plugins.objectstudio.ObjectStudioRunner.java
/** * Run ObjectStudio with Jenkins build instance. * @param build Jenkins build/*www . j ava2s. com*/ * @param launcher Jenkins launcher * @param listener Jenkins listener * @param builder ObjectStudioBuilder * @throws AbortException abort build on failure * @throws InterruptedException abort build on interrupt */ public void run(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener, ObjectStudioBuilder builder) throws AbortException, InterruptedException { logger.println("[ObjectStudio] - Get Workdir"); // Get absolute workspace directory from Jenkins build workdir = build.getWorkspace(); initTempDirectory(); initNetworkDrives(); // Change working directory to sub directory if required if (builder.getBuildPath() != null && !builder.getBuildPath().isEmpty()) { workdir = workdir.child(builder.getBuildPath()); } logger.println("[ObjectStudio] - Get Environment"); EnvVars envVars = getEnv(build, listener); // Get absolute ObjectStudio log file in working directory logger.println("[ObjectStudio] - Get Logfile"); FilePath log = getAbsoluteWorkspacePath(builder.getOstudioLog()); initLogfile(log); initPreloadScript(builder); initPostloadScript(builder); initOstudioIni(builder); initOstudioImage(builder); printBuildInfo(builder); printEnvInfo(envVars); logger.println("[ObjectStudio] - Get Commandline"); ArgumentListBuilder command = objectStudio.getCommandline(build, builder, this); logger.println("[ObjectStudio] - Starting: " + command); ByteArrayOutputStream errorStream = new ByteArrayOutputStream(); ProcStarter procStarter = launcher.decorateByEnv(envVars).launch().pwd(workdir).envs(envs).cmds(command) .stderr(errorStream).stdout(listener); Proc proc = null; try { proc = procStarter.start(); } catch (IOException e) { e.printStackTrace(); throw new AbortException("Error starting ObjectStudio: " + e.getMessage()); } logger.println("[ObjectStudio] - Reading Log: " + log.getRemote()); try { waitForProcess(log, proc); readLog(log, proc); } catch (IOException e) { e.printStackTrace(); throw new AbortException("Error waiting ObjectStudio: " + e.getMessage()); } logger.println("[ObjectStudio] - Joining"); int rc; try { rc = proc.join(); if (rc != 0) { logger.println("Error running command: " + errorStream.toString()); throw new AbortException(errorStream.toString()); } } catch (IOException e) { e.printStackTrace(); throw new AbortException("Error joining ObjectStudio: " + e.getMessage()); } catch (InterruptedException e) { try { proc.kill(); } catch (IOException e1) { e1.printStackTrace(); } throw new InterruptedException(); } }
From source file:org.marketcetera.util.except.ExceptUtilsTest.java
@Test public void interrupt() { interruptHelper(new CloneNotSupportedException(), false); interruptHelper(new InterruptedException(), true); interruptHelper(new InterruptedIOException(), true); interruptHelper(new ClosedByInterruptException(), true); interruptHelper(new FileLockInterruptionException(), true); interruptHelper(new InterruptedNamingException(), true); interruptHelper(new I18NInterruptedException(), true); interruptHelper(new I18NInterruptedRuntimeException(), true); }
From source file:org.fusesource.meshkeeper.distribution.provisioner.embedded.StreamPumper.java
private void waitForInput(InputStream is) throws IOException, InterruptedException { if (useAvailable) { while (!finish && is.available() == 0) { if (Thread.interrupted()) { throw new InterruptedException(); }/*from w w w.ja v a 2 s . com*/ synchronized (this) { this.wait(POLL_INTERVAL); } } } }
From source file:eu.stratosphere.runtime.io.gates.InputGate.java
/** * Reads a record from one of the associated input channels. Channels are read such that one buffer from a channel is * consecutively consumed. The buffers in turn are consumed in the order in which they arrive. * Note that this method is not guaranteed to return a record, because the currently available channel data may not always * constitute an entire record, when events or partial records are part of the data. * * When called even though no data is available, this call will block until data is available, so this method should be called * when waiting is desired (such as when synchronously consuming a single gate) or only when it is known that data is available * (such as when reading a union of multiple input gates). * * @param target The record object into which to construct the complete record. * @return The result indicating whether a complete record is available, a event is available, only incomplete data * is available (NONE), or the gate is exhausted. * @throws IOException Thrown when an error occurred in the network stack relating to this channel. * @throws InterruptedException Thrown, when the thread working on this channel is interrupted. *///w ww. j a v a 2 s . co m public InputChannelResult readRecord(T target) throws IOException, InterruptedException { if (this.channelToReadFrom == -1) { if (this.isClosed()) { return InputChannelResult.END_OF_STREAM; } if (Thread.interrupted()) { throw new InterruptedException(); } this.channelToReadFrom = waitForAnyChannelToBecomeAvailable(); } InputChannelResult result = this.getInputChannel(this.channelToReadFrom).readRecord(target); switch (result) { case INTERMEDIATE_RECORD_FROM_BUFFER: // full record and we can stay on the same channel return InputChannelResult.INTERMEDIATE_RECORD_FROM_BUFFER; case LAST_RECORD_FROM_BUFFER: // full record, but we must switch the channel afterwards this.channelToReadFrom = -1; return InputChannelResult.LAST_RECORD_FROM_BUFFER; case END_OF_SUPERSTEP: this.channelToReadFrom = -1; return InputChannelResult.END_OF_SUPERSTEP; case TASK_EVENT: // task event this.currentEvent = this.getInputChannel(this.channelToReadFrom).getCurrentEvent(); this.channelToReadFrom = -1; // event always marks a unit as consumed return InputChannelResult.TASK_EVENT; case NONE: // internal event or an incomplete record that needs further chunks // the current unit is exhausted this.channelToReadFrom = -1; return InputChannelResult.NONE; case END_OF_STREAM: // channel is done this.channelToReadFrom = -1; return isClosed() ? InputChannelResult.END_OF_STREAM : InputChannelResult.NONE; default: // silence the compiler throw new RuntimeException(); } }
From source file:org.apache.atlas.notification.NotificationHookConsumerTest.java
@Test public void testConsumerProceedsWithFalseIfInterrupted() throws Exception { NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry); NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer( mock(NotificationConsumer.class)); NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class); doThrow(new InterruptedException()).when(timer).sleep(NotificationHookConsumer.SERVER_READY_WAIT_TIME_MS); when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.PASSIVE); assertFalse(hookConsumer.serverAvailable(timer)); }
From source file:net.paoding.spdy.client.netty.ResponseFuture.java
private boolean await0(long timeoutNanos, boolean interruptable) throws InterruptedException { if (interruptable && Thread.interrupted()) { throw new InterruptedException(); }/*from w ww . j av a2 s. c om*/ long startTime = timeoutNanos <= 0 ? 0 : System.nanoTime(); long waitTime = timeoutNanos; boolean interrupted = false; try { synchronized (this) { if (done) { return done; } else if (waitTime <= 0) { return done; } checkDeadLock(); waiters++; try { for (;;) { try { this.wait(waitTime / 1000000, (int) (waitTime % 1000000)); } catch (InterruptedException e) { if (interruptable) { throw e; } else { interrupted = true; } } if (done) { return true; } else { waitTime = timeoutNanos - (System.nanoTime() - startTime); if (waitTime <= 0) { return done; } } } } finally { waiters--; } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } }
From source file:com.robonobo.eon.DEONConnection.java
/** * @return 2 element array - bytebuffer at 0, eonsocketaddress at 1 *//*ww w . jav a2 s .c om*/ public Object[] read() throws EONException, InterruptedException { receiveLock.lock(); try { while (incomingDataBufs.size() == 0 && state == DEONConnectionState_Open) { canReceive.await(); } if (state == DEONConnectionState_Closed) throw new InterruptedException(); ByteBuffer buf = (ByteBuffer) incomingDataBufs.get(0); incomingDataBufs.remove(0); EonSocketAddress addr = (EonSocketAddress) incomingDataAddrs.get(0); Object[] result = new Object[2]; result[0] = buf; result[1] = addr; return result; } finally { receiveLock.unlock(); } }
From source file:com.haulmont.cuba.web.gui.components.WebSuggestionField.java
protected List<?> asyncSearch(SearchExecutor<?> searchExecutor, String searchString, Map<String, Object> params) throws Exception { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); }//from w w w.j av a 2 s. com log.debug("Search '{}'", searchString); List<?> searchResultItems; if (searchExecutor instanceof ParametrizedSearchExecutor) { //noinspection unchecked ParametrizedSearchExecutor<?> pSearchExecutor = (ParametrizedSearchExecutor<?>) searchExecutor; searchResultItems = pSearchExecutor.search(searchString, params); } else { searchResultItems = searchExecutor.search(searchString, Collections.emptyMap()); } return searchResultItems; }
From source file:org.zuinnote.hadoop.office.format.mapreduce.AbstractSpreadSheetDocumentRecordReader.java
/** * Initializes reader//from w w w. j a v a 2s .co m * @param split Split to use (assumed to be a file split) * @param context context of the job * * * @throws java.io.IOException in case of errors reading from the filestream provided by Hadoop * @throws java.lang.InterruptedException in case of thread interruption * */ @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { FileSplit fSplit = (FileSplit) split; // Initialize start and end of split start = fSplit.getStart(); end = start + fSplit.getLength(); final Path file = fSplit.getPath(); codec = new CompressionCodecFactory(context.getConfiguration()).getCodec(file); this.hocr.setFileName(file.getName()); this.readKeyStore(context.getConfiguration()); this.readTrustStore(context.getConfiguration()); FSDataInputStream fileIn = file.getFileSystem(conf).open(file); // open stream if (isCompressedInput()) { // decompress decompressor = CodecPool.getDecompressor(codec); if (codec instanceof SplittableCompressionCodec) { LOG.debug("Reading from a compressed file \"" + file + "\" with splittable compression codec"); final SplitCompressionInputStream cIn = ((SplittableCompressionCodec) codec).createInputStream( fileIn, decompressor, start, end, SplittableCompressionCodec.READ_MODE.CONTINUOUS); officeReader = new OfficeReader(cIn, this.hocr); start = cIn.getAdjustedStart(); end = cIn.getAdjustedEnd(); filePosition = cIn; // take pos from compressed stream } else { LOG.debug("Reading from a compressed file \"" + file + "\" with non-splittable compression codec"); officeReader = new OfficeReader(codec.createInputStream(fileIn, decompressor), this.hocr); filePosition = fileIn; } } else { LOG.debug("Reading from an uncompressed file \"" + file + "\""); fileIn.seek(start); officeReader = new OfficeReader(fileIn, this.hocr); filePosition = fileIn; } // initialize reader this.officeReader.parse(); // read linked workbooks if (this.hocr.getReadLinkedWorkbooks()) { // get current path Path currentPath = fSplit.getPath(); Path parentPath = currentPath.getParent(); if (!"".equals(this.hocr.getLinkedWorkbookLocation())) { // use a custom location for linked workbooks parentPath = new Path(this.hocr.getLinkedWorkbookLocation()); } // read linked workbook filenames List<String> linkedWorkbookList = this.officeReader.getCurrentParser().getLinkedWorkbooks(); LOG.debug(linkedWorkbookList.size()); this.currentHFR = new HadoopFileReader(context.getConfiguration()); for (String listItem : linkedWorkbookList) { LOG.info("Adding linked workbook \"" + listItem + "\""); String sanitizedListItem = new Path(listItem).getName(); // read file from hadoop file Path currentFile = new Path(parentPath, sanitizedListItem); InputStream currentIn = this.currentHFR.openFile(currentFile); this.officeReader.getCurrentParser().addLinkedWorkbook(listItem, currentIn, this.hocr.getLinkedWBCredentialMap().get(sanitizedListItem)); } } } catch (FormatNotUnderstoodException fnue) { LOG.error(fnue); this.close(); throw new InterruptedException(); } }