List of usage examples for java.lang InterruptedException InterruptedException
public InterruptedException()
InterruptedException
with no detail message. From source file:org.jenkinsci.plugins.objectstudio.ObjectStudioRunner.java
private void waitForProcess(FilePath log, Proc proc) throws InterruptedException, IOException { // Wait at least 10 seconds for (int i = 0; i < 10; i++) { if (!log.exists()) { try { logger.println(" Waiting for ObjectStudio..."); Thread.sleep(1000); } catch (InterruptedException e) { try { proc.kill();// w w w . j a va 2s .co m } catch (IOException e1) { e1.printStackTrace(); } throw new InterruptedException(); } } } }
From source file:io.hops.ha.common.TransactionStateManager.java
private TransactionState getCurrentTransactionState(int rpcId, String callingFuncition, boolean priority) throws InterruptedException { while (true && !Thread.interrupted()) { int accepted = acceptedRPC.incrementAndGet(); if (priority || accepted < batchMaxSize) { lock.readLock().lock();/*from www. j a v a 2 s.c o m*/ try { transactionStateWrapper wrapper = new transactionStateWrapper( (TransactionStateImpl) currentTransactionState, TransactionState.TransactionType.RM, rpcId, callingFuncition); wrapper.incCounter(TransactionState.TransactionType.INIT); if (rpcId >= 0) { wrapper.addRPCId(rpcId); } curentRPCs.add(wrapper); return wrapper; } finally { lock.readLock().unlock(); } } else { acceptedRPC.decrementAndGet(); try { Thread.sleep(1); } catch (InterruptedException e) { LOG.warn(e, e); } } } throw new InterruptedException(); }
From source file:org.roda_project.commons_ip.model.impl.eark.EARKUtils.java
protected static void addRepresentationDataFilesToZipAndMETS(IPInterface ip, Map<String, ZipEntryInfo> zipEntries, MetsWrapper representationMETSWrapper, IPRepresentation representation, String representationId) throws IPException, InterruptedException { if (representation.getData() != null && !representation.getData().isEmpty()) { if (ip instanceof SIP) { ((SIP) ip).notifySipBuildRepresentationProcessingStarted(representation.getData().size()); }//www. j av a 2 s .c o m int i = 0; for (IPFile file : representation.getData()) { if (Thread.interrupted()) { throw new InterruptedException(); } String dataFilePath = IPConstants.DATA_FOLDER + ModelUtils.getFoldersFromList(file.getRelativeFolders()) + file.getFileName(); FileType fileType = EARKMETSUtils.addDataFileToMETS(representationMETSWrapper, dataFilePath, file.getPath()); dataFilePath = IPConstants.REPRESENTATIONS_FOLDER + representationId + IPConstants.ZIP_PATH_SEPARATOR + dataFilePath; ZIPUtils.addFileTypeFileToZip(zipEntries, file.getPath(), dataFilePath, fileType); i++; if (ip instanceof SIP) { ((SIP) ip).notifySipBuildRepresentationProcessingCurrentStatus(i); } } if (ip instanceof SIP) { ((SIP) ip).notifySipBuildRepresentationProcessingEnded(); } } }
From source file:org.sonar.plugins.javascript.JavaScriptSquidSensorTest.java
@Test public void cancelled_analysis() throws Exception { JavaScriptCheck check = new ExceptionRaisingCheck(new IllegalStateException(new InterruptedException())); analyseFileWithException(check, inputFile("cpd/Person.js"), "Analysis cancelled"); }
From source file:com.jeremyhaberman.playgrounds.WebPlaygroundDAO.java
@Override public Collection<? extends Playground> getWithin(Context context, GeoPoint topLeft, GeoPoint bottomRight, int maxQuantity) { playgrounds = new ArrayList<Playground>(); String result = swingset.getResources().getString(R.string.error); HttpURLConnection httpConnection = null; Log.d(TAG, "getPlaygrounds()"); try {// www .j a v a 2 s .com // Check if task has been interrupted if (Thread.interrupted()) { throw new InterruptedException(); } // Build query URL url = new URL("http://swingsetweb.appspot.com/playground?" + TYPE_PARAM + "=" + WITHIN + "&" + TOP_LEFT_LATITUDE_PARAM + "=" + topLeft.getLatitudeE6() / 1E6 + "&" + TOP_LEFT_LONGITUDE_PARAM + "=" + topLeft.getLongitudeE6() / 1E6 + "&" + BOTTOM_RIGHT_LATITUDE_PARAM + "=" + bottomRight.getLatitudeE6() / 1E6 + "&" + BOTTOM_RIGHT_LONGITUDE_PARAM + "=" + bottomRight.getLongitudeE6() / 1E6); httpConnection = (HttpURLConnection) url.openConnection(); httpConnection.setConnectTimeout(15000); httpConnection.setReadTimeout(15000); StringBuilder response = new StringBuilder(); if (httpConnection.getResponseCode() == HttpURLConnection.HTTP_OK) { // Read results from the query BufferedReader input = new BufferedReader( new InputStreamReader(httpConnection.getInputStream(), "UTF-8")); String strLine = null; while ((strLine = input.readLine()) != null) { response.append(strLine); } input.close(); } // Parse to get translated text JSONArray jsonPlaygrounds = new JSONArray(response.toString()); int numOfPlaygrounds = jsonPlaygrounds.length(); JSONObject jsonPlayground = null; for (int i = 0; i < numOfPlaygrounds; i++) { jsonPlayground = jsonPlaygrounds.getJSONObject(i); playgrounds.add(toPlayground(jsonPlayground)); } } catch (Exception e) { Log.e(TAG, "Exception", e); Intent errorIntent = new Intent(context, Playgrounds.class); errorIntent.putExtra("Exception", e.getLocalizedMessage()); errorIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); context.startActivity(errorIntent); } finally { if (httpConnection != null) { httpConnection.disconnect(); } } Log.d(TAG, " -> returned " + result); return playgrounds; }
From source file:org.apache.giraph.io.hcatalog.HCatGiraphRunner.java
/** * process arguments//from w w w . j a v a 2s. com * @param args to process * @return CommandLine instance * @throws ParseException error parsing arguments * @throws InterruptedException interrupted */ private CommandLine processArguments(String[] args) throws ParseException, InterruptedException { Options options = new Options(); options.addOption("h", "help", false, "Help"); options.addOption("v", "verbose", false, "Verbose"); options.addOption("D", "hiveconf", true, "property=value for Hive/Hadoop configuration"); options.addOption("w", "workers", true, "Number of workers"); if (vertexClass == null) { options.addOption(null, "vertexClass", true, "Giraph Vertex class to use"); } if (vertexInputFormatClass == null) { options.addOption(null, "vertexInputFormatClass", true, "Giraph HCatalogVertexInputFormat class to use"); } if (edgeInputFormatClass == null) { options.addOption(null, "edgeInputFormatClass", true, "Giraph HCatalogEdgeInputFormat class to use"); } if (vertexOutputFormatClass == null) { options.addOption(null, "vertexOutputFormatClass", true, "Giraph HCatalogVertexOutputFormat class to use"); } options.addOption("db", "dbName", true, "Hive database name"); options.addOption("vi", "vertexInputTable", true, "Vertex input table name"); options.addOption("VI", "vertexInputFilter", true, "Vertex input table filter expression (e.g., \"a<2 AND b='two'\""); options.addOption("ei", "edgeInputTable", true, "Edge input table name"); options.addOption("EI", "edgeInputFilter", true, "Edge input table filter expression (e.g., \"a<2 AND b='two'\""); options.addOption("o", "outputTable", true, "Output table name"); options.addOption("O", "outputPartition", true, "Output table partition values (e.g., \"a=1,b=two\")"); options.addOption("s", "skipOutput", false, "Skip output?"); addMoreOptions(options); CommandLineParser parser = new GnuParser(); final CommandLine cmdln = parser.parse(options, args); if (args.length == 0 || cmdln.hasOption("help")) { new HelpFormatter().printHelp(getClass().getName(), options, true); throw new InterruptedException(); } // Giraph classes if (cmdln.hasOption("vertexClass")) { vertexClass = findClass(cmdln.getOptionValue("vertexClass"), Vertex.class); } if (cmdln.hasOption("vertexInputFormatClass")) { vertexInputFormatClass = findClass(cmdln.getOptionValue("vertexInputFormatClass"), HCatalogVertexInputFormat.class); } if (cmdln.hasOption("edgeInputFormatClass")) { edgeInputFormatClass = findClass(cmdln.getOptionValue("edgeInputFormatClass"), HCatalogEdgeInputFormat.class); } if (cmdln.hasOption("vertexOutputFormatClass")) { vertexOutputFormatClass = findClass(cmdln.getOptionValue("vertexOutputFormatClass"), HCatalogVertexOutputFormat.class); } if (cmdln.hasOption("skipOutput")) { skipOutput = true; } if (vertexClass == null) { throw new IllegalArgumentException("Need the Giraph Vertex class name (-vertexClass) to use"); } if (vertexInputFormatClass == null && edgeInputFormatClass == null) { throw new IllegalArgumentException( "Need at least one of Giraph VertexInputFormat " + "class name (-vertexInputFormatClass) and " + "EdgeInputFormat class name (-edgeInputFormatClass)"); } if (vertexOutputFormatClass == null) { throw new IllegalArgumentException( "Need the Giraph VertexOutputFormat " + "class name (-vertexOutputFormatClass) to use"); } if (!cmdln.hasOption("workers")) { throw new IllegalArgumentException("Need to choose the number of workers (-w)"); } if (!cmdln.hasOption("vertexInputTable") && vertexInputFormatClass != null) { throw new IllegalArgumentException("Need to set the vertex input table name (-vi)"); } if (!cmdln.hasOption("edgeInputTable") && edgeInputFormatClass != null) { throw new IllegalArgumentException("Need to set the edge input table name (-ei)"); } if (!cmdln.hasOption("outputTable")) { throw new IllegalArgumentException("Need to set the output table name (-o)"); } dbName = cmdln.getOptionValue("dbName", "default"); vertexInputTableName = cmdln.getOptionValue("vertexInputTable"); vertexInputTableFilterExpr = cmdln.getOptionValue("vertexInputFilter"); edgeInputTableName = cmdln.getOptionValue("edgeInputTable"); edgeInputTableFilterExpr = cmdln.getOptionValue("edgeInputFilter"); outputTableName = cmdln.getOptionValue("outputTable"); outputTablePartitionValues = HiveUtils.parsePartitionValues(cmdln.getOptionValue("outputPartition")); workers = Integer.parseInt(cmdln.getOptionValue("workers")); isVerbose = cmdln.hasOption("verbose"); // pick up -hiveconf arguments for (String hiveconf : cmdln.getOptionValues("hiveconf")) { String[] keyval = hiveconf.split("=", 2); if (keyval.length == 2) { String name = keyval[0]; String value = keyval[1]; if (name.equals("tmpjars") || name.equals("tmpfiles")) { addToStringCollection(conf, name, value); } else { conf.set(name, value); } } } processMoreArguments(cmdln); return cmdln; }
From source file:org.rhq.plugins.cassandra.CassandraNodeComponent.java
protected void waitForNodeToGoDown() throws InterruptedException { if (OperatingSystem.getInstance().getName().equals(OperatingSystem.NAME_MACOSX)) { // See this thread on VMWare forum: http://communities.vmware.com/message/2187972#2187972 // Unfortunately there is no work around for this failure on Mac OSX so the method will silently return on // this platform. return;//from w ww. ja v a2s. com } for (ProcessInfoSnapshot processInfoSnapshot = getProcessInfoSnapshot();; processInfoSnapshot = getProcessInfoSnapshot()) { if (processInfoSnapshot == null || !processInfoSnapshot.isRunning()) { // Process not found, so it died, that's fine // OR // Process info says process is no longer running, that's fine as well break; } if (getResourceContext().getComponentInvocationContext().isInterrupted()) { // Operation canceled or timed out throw new InterruptedException(); } // Process is still running, wait a second and check again Thread.sleep(SECONDS.toMillis(2)); } }
From source file:org.rhq.modules.plugins.jbossas7.StandaloneASComponent.java
private boolean waitUntilReloaded() throws InterruptedException { boolean reloaded = false; while (!reloaded) { Operation op = new ReadAttribute(new Address(), "release-version"); try {/*from w ww . ja v a 2 s. c o m*/ Result res = getASConnection().execute(op); if (res.isSuccess() && !res.isReloadRequired()) { reloaded = true; } } catch (Exception e) { //do absolutely nothing //if an exception is thrown that means the server is still reloading, so consider this //a single failed attempt, equivalent to res.isSuccess == false } if (!reloaded) { if (context.getComponentInvocationContext().isInterrupted()) { // Operation canceled or timed out throw new InterruptedException(); } Thread.sleep(SECONDS.toMillis(1)); } } return reloaded; }
From source file:com.htmlhifive.tools.jslint.parse.JsParser.java
/** * ????????.// www . j a va2 s .c o m * * @throws InterruptedException . */ private void checkCancel() throws InterruptedException { if (monitor.isCanceled()) { ParserManager.clearCurrentParser(); throw new InterruptedException(); } }
From source file:org.globus.ftp.vanilla.FTPControlChannel.java
/** Block until one of the conditions are true: <ol>/*from w w w .ja va 2 s . c o m*/ <li> a reply is available in the control channel, <li> timeout (maxWait) expired <li> aborted flag changes to true. </ol> If maxWait == WAIT_FOREVER, never timeout and only check conditions (1) and (3). @param maxWait timeout in miliseconds @param ioDelay frequency of polling the control channel and checking the conditions @param aborted flag indicating wait aborted. **/ public void waitFor(Flag aborted, int ioDelay, int maxWait) throws ServerException, IOException, InterruptedException { int oldTimeout = this.socket.getSoTimeout(); try { int c = 0; if (maxWait != WAIT_FOREVER) { this.socket.setSoTimeout(maxWait); } else { this.socket.setSoTimeout(0); } c = this.checkSocketDone(aborted, ioDelay, maxWait); /* A bug in the server causes it to append \0 to each reply. As the result, we receive this \0 before the next reply. The code below handles this case. */ if (c != 0) { // if we're here, the server is healthy // and the reply is waiting in the buffer return; } // if we're here, we deal with the buggy server. // we discarded the \0 and now resume wait. logger.debug("Server sent \\0; resume wait"); try { // gotta read past the 0 we just remarked c = ftpIn.read(); c = this.checkSocketDone(aborted, ioDelay, maxWait); } catch (SocketTimeoutException e) { throw new ServerException(ServerException.REPLY_TIMEOUT); } catch (EOFException e) { throw new InterruptedException(); } } finally { this.socket.setSoTimeout(oldTimeout); } }