List of usage examples for java.lang Thread join
public final void join() throws InterruptedException
From source file:io.ingenieux.lambda.shell.LambdaShell.java
private static void runCommandArray(OutputStream os, String... args) throws Exception { PrintWriter pw = new PrintWriter(os, true); File tempPath = File.createTempFile("tmp-", ".sh"); IOUtils.write(StringUtils.join(args, " "), new FileOutputStream(tempPath)); List<String> processArgs = new ArrayList<>(Arrays.asList("/bin/bash", "-x", tempPath.getAbsolutePath())); ProcessBuilder psBuilder = new ProcessBuilder(processArgs).// redirectErrorStream(true);// final Process process = psBuilder.start(); final Thread t = new Thread(() -> { try {//from w w w . j a v a2 s . co m IOUtils.copy(process.getInputStream(), os); } catch (IOException e) { throw new RuntimeException(e); } }); t.start(); process.waitFor(); t.join(); int resultCode = process.exitValue(); }
From source file:cosmos.results.integration.CosmosIntegrationSetup.java
public static void loadAllWikis() throws Exception { List<Thread> threads = Lists.newArrayList(); threads.add(new Thread(new Runnable() { public void run() { try { CosmosIntegrationSetup.getWiki1(); } catch (Exception e) { throw new RuntimeException(e); }/* w w w.j a va 2 s. c o m*/ } })); threads.add(new Thread(new Runnable() { public void run() { try { CosmosIntegrationSetup.getWiki2(); } catch (Exception e) { throw new RuntimeException(e); } } })); threads.add(new Thread(new Runnable() { public void run() { try { CosmosIntegrationSetup.getWiki3(); } catch (Exception e) { throw new RuntimeException(e); } } })); threads.add(new Thread(new Runnable() { public void run() { try { CosmosIntegrationSetup.getWiki4(); } catch (Exception e) { throw new RuntimeException(e); } } })); threads.add(new Thread(new Runnable() { public void run() { try { CosmosIntegrationSetup.getWiki5(); } catch (Exception e) { throw new RuntimeException(e); } } })); for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } }
From source file:com.healthmarketscience.rmiio.RemoteIteratorTest.java
public static List<List<TestObject>> mainTest(final boolean sendEmptyList, final boolean doAbort, final boolean noDelayAbort, final List<Throwable> clientExceptions, final List<AccumulateRemoteStreamMonitor<?>> monitors) throws Exception { ObjectServer server = new ObjectServer(); final RemoteObjectServer stub = (RemoteObjectServer) RemoteStreamServerTest .simulateRemote(UnicastRemoteObject.exportObject(server, 0)); LOG.debug("Server ready"); LOG.debug("Sleeping 3000 ms..."); Thread.sleep(3000);//w w w. j av a 2s. co m LOG.debug("Running tests"); Thread clientThread = new Thread(new Runnable() { public void run() { clientExceptions.addAll(ObjectClient.main(stub, sendEmptyList, doAbort, noDelayAbort, monitors)); } }); clientThread.start(); clientThread.join(); LOG.debug("Unexporting server"); UnicastRemoteObject.unexportObject(server, true); return server._recvdObjectLists; }
From source file:com.textocat.textokit.commons.io.ProcessIOUtils.java
/** * @param proc process which input stream will receive bytes from the * argument input stream/*ww w.j a v a2 s . co m*/ * @param in input stream. Note that it is closed at the end. */ public static void feedProcessInput(Process proc, final InputStream in, final boolean closeStdIn) throws IOException { final OutputStream procStdIn = proc.getOutputStream(); final List<Exception> exceptions = Lists.newLinkedList(); Thread writerThread = new Thread(new Runnable() { @Override public void run() { try { IOUtils.copy(in, procStdIn); if (closeStdIn) { procStdIn.flush(); closeQuietly(procStdIn); } } catch (Exception e) { exceptions.add(e); } finally { closeQuietly(in); } } }); writerThread.start(); try { writerThread.join(); } catch (InterruptedException e) { // do nothing, just set flag Thread.currentThread().interrupt(); } if (!exceptions.isEmpty()) { Exception ex = exceptions.get(0); throw ex instanceof IOException ? (IOException) ex : new IOException("Unexpected exception in writing thread", ex); } }
From source file:com.jkoolcloud.tnt4j.streams.custom.kafka.interceptors.InterceptorsTest.java
/** * Runs interceptions test scenario.// w w w. j a v a2s. c o m * * @throws Exception * if exception occurs while running interceptions test */ public static void interceptionsTest() throws Exception { String tnt4jCfgPath = System.getProperty(TrackerConfigStore.TNT4J_PROPERTIES_KEY); if (StringUtils.isEmpty(tnt4jCfgPath)) { URL defaultCfg = InterceptionsManager.getDefaultTrackerConfiguration(); System.setProperty(TrackerConfigStore.TNT4J_PROPERTIES_KEY, defaultCfg.toExternalForm()); } final Consumer<String, String> consumer = initConsumer(); Thread pt = new Thread(new Runnable() { @Override public void run() { try { produce(); } catch (Exception exc) { exc.printStackTrace(); } } }); Thread ct = new Thread(new Runnable() { @Override public void run() { consume(consumer); } }); ct.start(); pt.start(); pt.join(); consumer.wakeup(); ct.join(); }
From source file:agileinterop.AgileInterop.java
private static JSONObject getPSRData(String body) throws ParseException, InterruptedException, APIException { // Parse body as JSON JSONParser parser = new JSONParser(); JSONArray jsonBody = (JSONArray) parser.parse(body); Map<String, Object> data = new HashMap<>(); class GetObjectData implements Runnable { private String psrNumber; private Map<String, Object> data; private IServiceRequest psr; public GetObjectData(String psrNumber, Map<String, Object> data) throws APIException, InterruptedException { this.psrNumber = psrNumber; this.data = data; psr = (IServiceRequest) Agile.session.getObject(IServiceRequest.OBJECT_TYPE, psrNumber); }// www . j a va2s .c o m @Override public void run() { this.data.put(psrNumber, new HashMap<String, Object>()); try { if (psr != null) { getCellValues(); getAttachments(); getHistory(); } } catch (APIException ex) { Logger.getLogger(AgileInterop.class.getName()).log(Level.SEVERE, null, ex); } } private void getCellValues() throws APIException { Map<String, Object> cellValues = new HashMap<>(); long startTime = System.currentTimeMillis(); // Get cell values ICell[] cells = psr.getCells(); for (ICell cell : cells) { if (cell.getDataType() == DataTypeConstants.TYPE_DATE) { if (cell.getValue() != null) { SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a zz"); sdf.setTimeZone(TimeZone.getTimeZone("Europe/London")); cellValues.put(cell.getName(), sdf.format((Date) cell.getValue())); } else { cellValues.put(cell.getName(), cell.toString()); } } else { cellValues.put(cell.getName(), cell.toString()); } } long endTime = System.currentTimeMillis(); String logMessage = String.format("%s: getCellValues executed in %d milliseconds", psrNumber, endTime - startTime); System.out.println(logMessage); ((HashMap<String, Object>) this.data.get(psrNumber)).put("cellValues", cellValues); } private void getAttachments() throws APIException { List<Map<String, String>> attachments = new ArrayList<>(); long startTime = System.currentTimeMillis(); // Get attachments information ITable table = psr.getTable("Attachments"); ITwoWayIterator tableIterator = table.getTableIterator(); while (tableIterator.hasNext()) { IRow row = (IRow) tableIterator.next(); Map<String, String> attachment = new HashMap<>(); ICell[] cells = row.getCells(); for (ICell cell : cells) { if (cell.getDataType() == DataTypeConstants.TYPE_DATE) { if (cell.getValue() != null) { SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a zz"); sdf.setTimeZone(TimeZone.getTimeZone("Europe/London")); attachment.put(cell.getName(), sdf.format((Date) cell.getValue())); } else { attachment.put(cell.getName(), cell.toString()); } } else { attachment.put(cell.getName(), cell.toString()); } } attachments.add(attachment); } long endTime = System.currentTimeMillis(); String logMessage = String.format("%s: getAttachments executed in %d milliseconds", psrNumber, endTime - startTime); System.out.println(logMessage); ((HashMap<String, Object>) this.data.get(psrNumber)).put("attachments", attachments); } private void getHistory() throws APIException { List<Map<String, String>> histories = new ArrayList<>(); long startTime = System.currentTimeMillis(); // Get history information ITable table = psr.getTable("History"); ITwoWayIterator tableIterator = table.getTableIterator(); while (tableIterator.hasNext()) { IRow row = (IRow) tableIterator.next(); Map<String, String> history = new HashMap<>(); ICell[] cells = row.getCells(); for (ICell cell : cells) { if (cell.getDataType() == DataTypeConstants.TYPE_DATE) { if (cell.getValue() != null) { SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a zz"); sdf.setTimeZone(TimeZone.getTimeZone("Europe/London")); history.put(cell.getName(), sdf.format((Date) cell.getValue())); } else { history.put(cell.getName(), cell.toString()); } } else { history.put(cell.getName(), cell.toString()); } } histories.add(history); } long endTime = System.currentTimeMillis(); String logMessage = String.format("%s: getHistory executed in %d milliseconds", psrNumber, endTime - startTime); System.out.println(logMessage); ((HashMap<String, Object>) this.data.get(psrNumber)).put("history", histories); } } synchronized (data) { // Do something funky with the first one Thread t = new Thread(new GetObjectData(jsonBody.get(0).toString(), data)); t.start(); t.join(); ExecutorService executor = Executors.newFixedThreadPool(10); for (Object object : jsonBody.subList(1, jsonBody.size() - 1)) { executor.execute(new Thread(new GetObjectData(object.toString(), data))); } executor.shutdown(); while (!executor.isTerminated()) { } } JSONObject obj = new JSONObject(); obj.put("data", data); return obj; }
From source file:io.fabric8.tooling.archetype.generator.ArchetypeTest.java
@AfterClass public static void afterAll() throws Exception { // now let invoke the projects final int[] resultPointer = new int[1]; StringWriter sw = new StringWriter(); Set<String> modules = new HashSet<String>(); for (final String outDir : outDirs) { String module = new File(outDir).getName(); if (modules.add(module)) { sw.append(String.format(" <module>%s</module>\n", module)); }// www. ja v a 2s . c o m } sw.close(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); IOUtils.copy(ArchetypeTest.class.getResourceAsStream("/archetypes-test-pom.xml"), baos); String pom = new String(baos.toByteArray()).replace(" <!-- to be replaced -->", sw.toString()); FileWriter modulePom = new FileWriter("target/archetypes-test-pom.xml"); IOUtils.copy(new StringReader(pom), modulePom); modulePom.close(); final String outDir = new File("target").getCanonicalPath(); // thread locals are evil (I'm talking to you - org.codehaus.plexus.DefaultPlexusContainer#lookupRealm!) Thread t = new Thread(new Runnable() { @Override public void run() { System.out.println("Invoking projects in " + outDir); MavenCli maven = new MavenCli(); resultPointer[0] = maven.doMain( new String[] { "clean", "package", "-f", "archetypes-test-pom.xml" }, outDir, System.out, System.out); System.out.println("result: " + resultPointer[0]); } }); t.start(); t.join(); assertEquals("Build of project " + outDir + " failed. Result = " + resultPointer[0], 0, resultPointer[0]); }
From source file:edu.umass.cs.gnsserver.installer.EC2Runner.java
/** * Starts a set of EC2 hosts running GNS that we call a runset. * * @param runSetName/*w w w. java 2s . co m*/ */ public static void createRunSetMulti(String runSetName) { int timeout = AWSEC2.DEFAULTREACHABILITYWAITTIME; System.out.println("EC2 User Name: " + ec2UserName); System.out.println("AMI Name: " + amiRecordType.toString()); System.out.println("Datastore: " + dataStoreType.toString()); //preferences.put(RUNSETNAME, runSetName); // store the last one startAllMonitoringAndGUIProcesses(); attachShutDownHook(runSetName); ArrayList<Thread> threads = new ArrayList<Thread>(); // use threads to do a bunch of installs in parallel do { hostsThatDidNotStart.clear(); //StatusModel.getInstance().queueDeleteAllEntries(); // for gui int cnt = STARTINGNODENUMBER; for (EC2RegionSpec regionSpec : regionsList) { int i; for (i = 0; i < regionSpec.getCount(); i++) { threads.add(new EC2RunnerThread(runSetName, regionSpec.getRegion(), Integer.toString(cnt), i == 0 ? regionSpec.getIp() : null, timeout)); cnt = cnt + 1; } } for (Thread thread : threads) { thread.start(); } // and wait for all of them to complete try { for (Thread thread : threads) { thread.join(); } } catch (InterruptedException e) { System.out.println("Problem joining threads: " + e); } if (!hostsThatDidNotStart.isEmpty()) { System.out.println("Hosts that did not start: " + hostsThatDidNotStart.keySet()); timeout = (int) ((float) timeout * 1.5); System.out.println("Maybe kill them all and try again with timeout " + timeout + "ms?"); if (showDialog("Hosts that did not start: " + hostsThatDidNotStart.keySet() + "\nKill them all and try again with with timeout " + timeout + "ms?" + "\nIf you don't respond in 10 seconds this will happen.", 10000)) { System.out.println("Yes, kill them all and try again with timeout " + timeout + "ms."); terminateRunSet(runSetName); } else { terminateRunSet(runSetName); System.out.println("No, kill them all and quit."); return; } } threads.clear(); // keep repeating until everything starts } while (!hostsThatDidNotStart.isEmpty()); // got a complete set running... now on to step 2 System.out.println(hostTable.toString()); // after we know all the hosts are we run the last part System.out.println("Hosts that did not start: " + hostsThatDidNotStart.keySet()); // write out a config file that the GNS installer can use for this set of EC2 hosts writeGNSINstallerConf(configName); removeShutDownHook(); System.out.println("Finished creation of Run Set " + runSetName); }
From source file:com.asakusafw.runtime.util.hadoop.ConfigurationProvider.java
private static File detectHadoopConfigurationDirectory(File command, File temporary, Map<String, String> envp) throws IOException { assert command != null; assert temporary != null; assert envp != null; prepareClasspath(temporary, ConfigurationDetecter.class); File resultOutput = new File(temporary, PATH_SUBPROC_OUTPUT); List<String> arguments = new ArrayList<>(); arguments.add(command.getAbsolutePath()); arguments.add(ConfigurationDetecter.class.getName()); arguments.add(resultOutput.getAbsolutePath()); ProcessBuilder processBuilder = new ProcessBuilder(arguments); processBuilder.environment().clear(); processBuilder.environment().putAll(envp); processBuilder.environment().put(ENV_HADOOP_CLASSPATH, temporary.getPath()); Process process = processBuilder.start(); try {/*w w w. ja va 2 s . c o m*/ Thread redirectOut = redirect(process.getInputStream(), System.out); Thread redirectErr = redirect(process.getErrorStream(), System.err); try { int exit = process.waitFor(); redirectOut.join(); redirectErr.join(); if (exit != 0) { throw new IOException( MessageFormat.format("Failed to execute Hadoop command (exitcode={1}): {0}", arguments, String.valueOf(exit))); } } catch (InterruptedException e) { throw (IOException) new InterruptedIOException( MessageFormat.format("Failed to execute Hadoop command (interrupted): {0}", arguments)) .initCause(e); } } finally { process.destroy(); } if (resultOutput.isFile() == false) { throw new IOException( MessageFormat.format("Failed to restore Hadoop configuration path: {0}", resultOutput)); } File path = ConfigurationDetecter.read(resultOutput); return path; }
From source file:net.sf.sahi.util.Utils.java
public static String executeCommand(String[] command) throws Exception { StringBuffer sb = new StringBuffer(); Process p = Runtime.getRuntime().exec(command); InputStream stdInput = p.getInputStream(); InputStream stdError = p.getErrorStream(); StringBuffer inBuffer = new StringBuffer(); StringBuffer errBuffer = new StringBuffer(); Thread inThread = new Thread(new StreamReader(stdInput, inBuffer)); inThread.start();/*from w w w .j a v a 2 s.co m*/ Thread errThread = new Thread(new StreamReader(stdError, errBuffer)); errThread.start(); p.waitFor(); inThread.join(); errThread.join(); sb.append(inBuffer); sb.append(errBuffer); return sb.toString(); }