List of usage examples for java.lang InterruptedException toString
public String toString()
From source file:com.cisco.oss.foundation.monitoring.RegistryFinder.java
private Registry internalProcessStart(final Configuration configuration, final int port, final String command) throws IOException, RemoteException, AccessException { String maxHeapArg = "-J-Xmx" + configuration.getInt(FoundationMonitoringConstants.RMIREGISTRY_MAXHEAPSIZE) + "m"; // start rmiregistry process if (System.getProperty("os.name").toLowerCase(Locale.getDefault()).contains("windows")) { String[] commandArgsArr = new String[] { command, maxHeapArg, String.valueOf(port) }; List<String> commandArgs = Arrays.asList(commandArgsArr); LOGGER.info("running command: " + commandArgs); new ProcessBuilder(commandArgsArr).start(); } else {//from w w w . jav a 2 s . c om // support background process to prevent zombies String[] commandArgsArr = new String[] { "/bin/sh", "-c", command + maxHeapArg + " " + port + "&" }; List<String> commandArgs = Arrays.asList(commandArgsArr); LOGGER.info("running command: " + commandArgs); new ProcessBuilder(commandArgsArr).start(); } try { // wait for the process to start properly Thread.sleep(1000); } catch (InterruptedException e) { LOGGER.warn("The sleep for rmi registry to end has been interrupted: " + e.toString()); } // get registry final Registry registry = LocateRegistry.getRegistry(port); // test registry registry.list(); LOGGER.info("New RMI Registry created using port: " + port); return registry; }
From source file:org.apache.hadoop.hdfs.BlockStorageLocationUtil.java
/** * Queries datanodes for the blocks specified in <code>datanodeBlocks</code>, * making one RPC to each datanode. These RPCs are made in parallel using a * threadpool.//from www.j a v a2 s.c o m * * @param datanodeBlocks * Map of datanodes to the blocks present on the DN * @return metadatas Map of datanodes to block metadata of the DN * @throws InvalidBlockTokenException * if client does not have read access on a requested block */ static Map<DatanodeInfo, HdfsBlocksMetadata> queryDatanodesForHdfsBlocksMetadata(Configuration conf, Map<DatanodeInfo, List<LocatedBlock>> datanodeBlocks, int poolsize, int timeoutMs, boolean connectToDnViaHostname, Tracer tracer, SpanId parentSpanId) throws InvalidBlockTokenException { List<VolumeBlockLocationCallable> callables = createVolumeBlockLocationCallables(conf, datanodeBlocks, timeoutMs, connectToDnViaHostname, tracer, parentSpanId); // Use a thread pool to execute the Callables in parallel List<Future<HdfsBlocksMetadata>> futures = new ArrayList<>(); ExecutorService executor = new ScheduledThreadPoolExecutor(poolsize); try { futures = executor.invokeAll(callables, timeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // Swallow the exception here, because we can return partial results } executor.shutdown(); Map<DatanodeInfo, HdfsBlocksMetadata> metadatas = Maps.newHashMapWithExpectedSize(datanodeBlocks.size()); // Fill in metadatas with results from DN RPCs, where possible for (int i = 0; i < futures.size(); i++) { VolumeBlockLocationCallable callable = callables.get(i); DatanodeInfo datanode = callable.getDatanodeInfo(); Future<HdfsBlocksMetadata> future = futures.get(i); try { HdfsBlocksMetadata metadata = future.get(); metadatas.put(callable.getDatanodeInfo(), metadata); } catch (CancellationException e) { LOG.info( "Cancelled while waiting for datanode " + datanode.getIpcAddr(false) + ": " + e.toString()); } catch (ExecutionException e) { Throwable t = e.getCause(); if (t instanceof InvalidBlockTokenException) { LOG.warn("Invalid access token when trying to retrieve " + "information from datanode " + datanode.getIpcAddr(false)); throw (InvalidBlockTokenException) t; } else if (t instanceof UnsupportedOperationException) { LOG.info("Datanode " + datanode.getIpcAddr(false) + " does not support" + " required #getHdfsBlocksMetadata() API"); throw (UnsupportedOperationException) t; } else { LOG.info( "Failed to query block locations on datanode " + datanode.getIpcAddr(false) + ": " + t); } if (LOG.isDebugEnabled()) { LOG.debug("Could not fetch information from datanode", t); } } catch (InterruptedException e) { // Shouldn't happen, because invokeAll waits for all Futures to be ready LOG.info("Interrupted while fetching HdfsBlocksMetadata"); } } return metadatas; }
From source file:org.apache.ftpserver.socketfactory.FtpSocketFactoryTest.java
private void testCreateServerSocket(final FtpSocketFactory ftpSocketFactory, final int port) throws Exception, IOException { boolean freePort = false; try {/*from w ww. j a v a2s . c o m*/ final ServerSocket testSocket = new ServerSocket(port, 100); freePort = testSocket.isBound(); testSocket.close(); } catch (Exception exc) { // ok freePort = true; } if (freePort) { new Thread() { public void run() { synchronized (this) { try { this.wait(1000); } catch (InterruptedException e) { e.printStackTrace(); fail(e.toString()); } } try { Socket socket = new Socket(); socket.connect(new InetSocketAddress("localhost", port)); socket.getInputStream(); socket.getOutputStream(); socket.close(); } catch (UnknownHostException e) { e.printStackTrace(); fail(e.toString()); } catch (IOException e) { e.printStackTrace(); fail(e.toString()); } } }.start(); ServerSocket serverSocket = ftpSocketFactory.createServerSocket(); assertNotNull(serverSocket); serverSocket.accept(); } }
From source file:info.smartkit.hairy_batman.query.KJsonApiQuery.java
public void query() { // KJSON API testing using RestTemplate. RestTemplate restTemplate = new RestTemplate(); // restTemplate.getMessageConverters().add(new // StringHttpMessageConverter()); MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter(); // @see:/*from www . j a v a2s . com*/ // http://stackoverflow.com/questions/22329368/spring-android-rest-template-parse-json-data-with-content-type-text-html converter.setSupportedMediaTypes(Collections.singletonList(MediaType.TEXT_HTML)); restTemplate.getMessageConverters().add(converter); // Spring batch for CSV reading. // WxBar api_query_resutls = new WxBar(); try { /*System.out.println("this.getParameters():" + this.getParameters()); api_query_resutls = restTemplate.postForObject( GlobalConsts.KJSON_API_URI, this.getParameters(), WxBar.class);*/ } catch (RestClientException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (api_query_resutls.getData() != null) {// FIXME: null JSON // exception handler // here. // WxBar returns = // restTemplate.getForObject(GlobalConsts.KJSON_API_URI, // WxBar.class); ArrayList<WxKJson> api_query_resutls_data = api_query_resutls.getData(); // System.out.println("ApiQuery result data: " + // api_query_resutls_data); LOG.info("ApiQuery result data: " + api_query_resutls_data.toString()); WxKJson wxKJson = api_query_resutls_data.get(0); // System.out.println("Parsed ApiQuery results,articleReadNum:" + // wxKJson.getRead() + ",articleLikeNum: " // + wxKJson.getLike()); LOG.info("Parsed ApiQuery results,articleReadNum:" + wxKJson.getRead() + ",articleLikeNum: " + wxKJson.getLike()); // this.readNum = Long.parseLong(wxKJson.getRead()); this.likeNum = Long.parseLong(wxKJson.getLike()); this.queriedSubscriber.setArticleReadNum(wxKJson.getRead()); this.queriedSubscriber.setArticleLikeNum(wxKJson.getLike()); double likeRate = (double) likeNum / readNum * 100; java.math.BigDecimal bigLikeRate = new java.math.BigDecimal(likeRate); String bigLikeRateStr = bigLikeRate .setScale(GlobalConsts.DEFINITION_PRECISION, java.math.BigDecimal.ROUND_HALF_UP).doubleValue() + "%"; this.queriedSubscriber.setArticleLikeRate(bigLikeRateStr); // this.queriedSubscriber.setMoniterTime(GlobalVariables.now()); this.queriedSubscriber.setArticleUrl(wxKJson.getUrl()); // GlobalVariables.wxFooListWithOpenIdArticleReadLike.add(this.queriedSubscriber); // File reporting... new FileReporter(GlobalConsts.REPORT_FILE_OUTPUT_OPENID_ARITICLE_READ_LIKE, GlobalVariables.wxFooListWithOpenIdArticleReadLike, FileReporter.REPORTER_TYPE.R_T_OPENID_ARTICLE_READ_LIKE, FileReporter.REPORTER_FILE_TYPE.EXCEL) .write(); // Save to DB. Object[] params = { likeNum.intValue(), readNum.intValue(), this.queriedSubscriber.getArticleLikeRate(), this.queriedSubscriber.getArticleUrl() }; int[] types = { Types.INTEGER, Types.INTEGER, Types.VARCHAR, Types.VARCHAR }; int rows = GlobalVariables.jdbcTempate.update(GlobalConsts.JDBC_QUERY_UPDATE_OPENID_ARTICLE_READ_LIKE, params, types); LOG.info("rows(s): " + rows + " updated." + ",likeNum: " + likeNum + ",readNum: " + readNum + ",bigLikeRateStr: " + bigLikeRateStr + ",articleUrl: " + this.queriedSubscriber.getArticleUrl()); // LOG.info("GlobalVariables.wxFooListWithOpenIdArticleReadLike(size):" + GlobalVariables.wxFooListWithOpenIdArticleReadLike.size() + ",raw: " + GlobalVariables.wxFooListWithOpenIdArticleReadLike.toString()); // if (this.subscribers.size() > 0) { this.query();// Recursively call. GlobalVariables.kjsonQueryCounter++; if (GlobalVariables.kjsonQueryCounter >= GlobalConsts.KJSON_API_QPM) { try { Thread.sleep(30000); GlobalVariables.kjsonQueryCounter = 0; } catch (InterruptedException e) { LOG.error(e.toString()); } } } else { // } } }
From source file:com.cloudera.sqoop.mapreduce.AutoProgressMapper.java
/** * Run the mapping process for this task, wrapped in an auto-progress system. *///from w w w. j ava 2s. com @Override public void run(Context context) throws IOException, InterruptedException { configureAutoProgress(context.getConfiguration()); ProgressThread thread = this.new ProgressThread(context); try { thread.setDaemon(true); thread.start(); // use default run() method to actually drive the mapping. super.run(context); } finally { // Tell the progress thread to exit.. LOG.debug("Instructing auto-progress thread to quit."); thread.signalShutdown(); try { // And wait for that to happen. LOG.debug("Waiting for progress thread shutdown..."); thread.join(); LOG.debug("Progress thread shutdown detected."); } catch (InterruptedException ie) { LOG.warn("Interrupted when waiting on auto-progress thread: " + ie.toString()); } } }
From source file:uk.co.modularaudio.util.timing.NanosecondPeriodicThreadedTimer.java
public void stop() { if (timerThread == null) { log.error("Attempted to stop timer thread without one running!"); } else {//www .ja v a 2s. c om timerThread.halt(); try { timerThread.join(); } catch (final InterruptedException e) { if (log.isErrorEnabled()) { log.error("Failed during timer thread join: " + e.toString(), e); } } timerThread = null; } }
From source file:org.castor.cpa.test.test07.TestCacheLeakage.java
/** * Helper class to run the stress tests once for a cache type. *//*from w ww .j a v a 2 s.c o m*/ public void runOnce() throws Exception { // clear the table _db.begin(); int del = _db.getJdbcConnection().createStatement().executeUpdate("DELETE FROM test07_race"); _db.commit(); LOG.debug("row(s) deleted in table core_race: " + del); // set the className and classType to be used switch (_cacheType) { case COUNT_LIMITED: _classType = RaceCount.class; break; case TIME_LIMITED: _classType = RaceTime.class; break; case NO_CACHE: _classType = RaceNone.class; break; case UNLIMITED: _classType = RaceUnlimited.class; break; default: LOG.error("Unknown cache type"); } CreateDeleteThread cdThread = new CreateDeleteThread(this, _jdo, _cacheType, NUM_OF_CREATE_DELETE); ReadThread[] rThread = new ReadThread[NUM_OF_READ_THREADS]; for (int i = 0; i < NUM_OF_READ_THREADS; i++) { rThread[i] = new ReadThread(this, cdThread, _jdo, NUM_OF_READ); rThread[i].start(); } cdThread.start(); // Polling the test case to see if it is finished try { while (!cdThread.isDone()) { Thread.sleep(500); } // Joins all the finished threads cdThread.join(); for (int i = 0; i < NUM_OF_READ_THREADS; i++) { rThread[i].join(); } } catch (InterruptedException ex) { fail(ex.toString()); } }
From source file:com.chinamobile.bcbsp.bspstaff.StaffRunner.java
/** * Run the child process.//from w w w . j a va 2 s. c o m * @throws Exception e */ private void runChild(String[] args, File dir) throws Exception { this.process = Runtime.getRuntime().exec(args, null, dir); try { int exit_code = process.waitFor(); if (!killed && exit_code != 0) { throw new Exception("Staff process exit with nonzero status of " + exit_code + "."); } } catch (InterruptedException e) { throw new IOException(e.toString()); } finally { kill(); } }
From source file:org.j4me.collections.CubbyHole.java
/** * Tests that a consumer thread blocks waiting for a producer to add * something to the cubby hole./* w w w . java2s .co m*/ */ public void testBlocking() { final CubbyHole one = new CubbyHole(); final CubbyHole two = new CubbyHole(); class Consumer extends Thread { public void run() { try { // Block waiting for something in the first cubby hole. Object consume = one.get(); // The producer thread should be blocking waiting for // this thread to put something into the second cubby hole. two.set(consume); } catch (Throwable t) { fail(t.toString()); } } } try { // Create a consumer thread. Consumer consumer = new Consumer(); consumer.start(); // Give up the CPU to let the consumer start and block. Thread.sleep(0); // Put some data into the first cubby hole to unblock the consumer. Integer data = new Integer(13); one.set(data); // Get data from the second cubby hole. This thread will block // until the consumer puts something into it. Integer result = (Integer) two.get(); // Verify the consumer thread read our original data from the // first cubby hole and put it into the second. assertSame("Data integrety verified.", data, result); } catch (InterruptedException e) { fail(e.toString()); } }
From source file:org.smartfrog.avalanche.client.sf.exec.simple.StartComponent.java
public void startApplication() throws IOException { Runtime run;//from ww w. j a v a 2 s. c o m log.info("Starting Component " + componentName); run = Runtime.getRuntime(); log.info("Starting application " + command); int exitVal = 0; try { System.out.println("Running the command : " + command); proc = run.exec(command, env); stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream())); stdError = new BufferedReader(new InputStreamReader(proc.getErrorStream())); readOutput(); exitVal = proc.waitFor(); if (exitVal != 0) { log.error("Return for the command " + command + " is " + exitVal); String err = "Error : "; String s = null; while ((s = stdError.readLine()) != null) { err += s; } throw new IOException("Return value for the command " + command + " is not 0." + err); } } catch (IOException e) { log.error("Error while executing the command : " + command, e); throw new IOException(e.toString()); } catch (InterruptedException ie) { log.error("Error while executing the command : " + command, ie); throw new IOException(ie.toString()); } }