List of usage examples for java.lang ProcessBuilder ProcessBuilder
public ProcessBuilder(String... command)
From source file:com.kylinolap.common.util.CliCommandExecutor.java
private Pair<Integer, String> runNativeCommand(String command) throws IOException { String[] cmd = new String[3]; String osName = System.getProperty("os.name"); if (osName.startsWith("Windows")) { cmd[0] = "cmd.exe"; cmd[1] = "/C"; } else {/*from w w w . jav a2 s . co m*/ cmd[0] = "/bin/bash"; cmd[1] = "-c"; } cmd[2] = command; ProcessBuilder builder = new ProcessBuilder(cmd); builder.redirectErrorStream(true); Process proc = builder.start(); ByteArrayOutputStream buf = new ByteArrayOutputStream(); IOUtils.copy(proc.getInputStream(), buf); String output = buf.toString("UTF-8"); try { int exitCode = proc.waitFor(); return new Pair<Integer, String>(exitCode, output); } catch (InterruptedException e) { throw new IOException(e); } }
From source file:it.isislab.dmason.util.SystemManagement.Worker.Updater.java
public static void updateWithGUI(Address FTPaddress, String name, String myTopic, Address address) { logger.debug("Update (with GUI) command received"); FTPIP = FTPaddress.getIPaddress();/*from ww w . j a v a 2 s . co m*/ FTPPORT = FTPaddress.getPort(); jarName = name; setSeparator(); downloadJar(jarName, "upd"); //DOWNLOADED_JAR_PATH+SEPARATOR+ File fDown = new File(DOWNLOADED_JAR_PATH + SEPARATOR + jarName); File fDest = new File(jarName); try { //FileUtils.copyFile(fDown, fDest); copyFile(fDown, fDest); try { ArrayList<String> command = new ArrayList<String>(); command.add("java"); //command.add("-jar"); command.add("-cp"); command.add(fDest.getAbsolutePath()); command.add(DMasonWorkerWithGui.class.getName()); command.add(address.getIPaddress()); command.add(address.getPort()); command.add(myTopic); command.add("update"); logger.info("Restarting with command: " + command.toString()); ProcessBuilder builder = new ProcessBuilder(command); Process process = builder.start(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } /*Timer timer = new Timer(4000, new ActionListener() { public void actionPerformed(ActionEvent e) { System.exit(0); } }); timer.start(); */ } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.ikanow.aleph2.harvest.script.utils.ScriptUtils.java
/** * Creates a processbuilder pointed at the given script path and adds the working dir and environment vars for you. * Just runs a process that does "sh <script_file_path>" * @param script_file_path// w w w.j a v a 2 s. c o m * @param working_dir * @return * @throws JsonProcessingException * @throws ExecutionException * @throws InterruptedException */ public static ProcessBuilder createProcessBuilderForScriptFile(final String script_file_path, final String working_dir, final Optional<Long> test_requested_num_objects, final Optional<Long> test_max_runtime_s, final Map<String, String> user_args, final IHarvestContext context, final DataBucketBean bucket, final String aleph_global_root_path) throws JsonProcessingException, InterruptedException, ExecutionException { _logger.debug("create pb for script file: " + script_file_path); ArrayList<String> args = new ArrayList<String>(); args.add("sh"); args.add(script_file_path); final ProcessBuilder pb = new ProcessBuilder(args); pb.directory(new File(working_dir)).redirectErrorStream(true); pb.environment().put("JAVA_OPTS", ""); if (test_requested_num_objects.isPresent()) pb.environment().put(ENV_TEST_NUM_OBJ, test_requested_num_objects.get().toString()); if (test_max_runtime_s.isPresent()) pb.environment().put(ENV_TEST_MAX_RUNTIME_S, test_max_runtime_s.get().toString()); //add in default env vars final String classpath = Stream .concat(context.getHarvestContextLibraries(Optional.empty()).stream(), context.getHarvestLibraries(Optional.of(bucket)).get().values().stream()) .collect(Collectors.joining(":")); pb.environment().put(ENV_MODULE_PATH, context.getHarvestContextLibraries(Optional.empty()).stream().collect(Collectors.joining(":"))); pb.environment().put(ENV_LIBRARY_PATH, context.getHarvestLibraries(Optional.of(bucket)).get().values() .stream().collect(Collectors.joining(":"))); pb.environment().put(ENV_CLASS_PATH, classpath); pb.environment().put(ENV_BUCKET_HDFS_PATH, aleph_global_root_path + "/data" + bucket.full_name()); pb.environment().put(ENV_BUCKET_SIGNATURE, BucketUtils.getUniqueSignature(bucket.full_name(), Optional.empty())); pb.environment().put(ENV_BUCKET_PATH, bucket.full_name()); pb.environment().put(ENV_BUCKET_STR, BeanTemplateUtils.toJson(bucket).toString()); //add user args as env vars user_args.forEach((k, val) -> pb.environment().put(k, val)); return pb; }
From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashUtils.java
/** Builds a process to execute * @param global//from www . j a v a 2 s. c o m * @param bucket_config * @param logstash_config * @param requested_docs * @param bucket_path if this is present, will log output to /tmp/unique_sig * @param context * @return */ public static ProcessBuilder buildLogstashTest(final LogstashHarvesterConfigBean global, final LogstashBucketConfigBean bucket_config, final String logstash_config, final long requested_docs, final Optional<String> bucket_path) { final String log_file = System.getProperty("java.io.tmpdir") + File.separator + BucketUtils.getUniqueSignature(bucket_path.orElse("DNE"), Optional.empty()); try { //(delete log file if it exists) new File(log_file).delete(); } catch (Exception e) { } ArrayList<String> args = new ArrayList<String>(); args.addAll(Arrays.asList(global.binary_path(), "-e", logstash_config)); if (bucket_path.isPresent()) { args.addAll(Arrays.asList("-l", log_file)); } if (0L == requested_docs) { args.add("-t"); // test mode, must faster } //TESTED if (bucket_config.debug_verbosity()) { args.add("--debug"); } else { args.add("--verbose"); } ProcessBuilder logstashProcessBuilder = new ProcessBuilder(args); logstashProcessBuilder = logstashProcessBuilder.directory(new File(global.working_dir())) .redirectErrorStream(true); logstashProcessBuilder.environment().put("JAVA_OPTS", ""); return logstashProcessBuilder; }
From source file:it.polimi.modaclouds.qos.linebenchmark.solver.SolutionEvaluator.java
private void runWithLQNS() { StopWatch timer = new StopWatch(); String solverProgram = "lqns"; String command = solverProgram + " " + filePath + " -f"; //using the fast option logger.info("Launch: " + command); //String command = solverProgram+" "+filePath; //without using the fast option try {// w w w .ja v a2 s . c om ProcessBuilder pb = new ProcessBuilder(splitToCommandArray(command)); //start counting timer.start(); Process proc = pb.start(); readStream(proc.getInputStream(), false); readStream(proc.getErrorStream(), true); int exitVal = proc.waitFor(); //stop counting timer.stop(); proc.destroy(); //evaluation error messages if (exitVal == LQNS_RETURN_SUCCESS) ; else if (exitVal == LQNS_RETURN_MODEL_FAILED_TO_CONVERGE) { System.err.println(Main.LQNS_SOLVER + " exited with " + exitVal + ": The model failed to converge. Results are most likely inaccurate. "); System.err.println("Analysis Result has been written to: " + resultfilePath); } else { String message = ""; if (exitVal == LQNS_RETURN_INVALID_INPUT) { message = solverProgram + " exited with " + exitVal + ": Invalid Input."; } else if (exitVal == LQNS_RETURN_FATAL_ERROR) { message = solverProgram + " exited with " + exitVal + ": Fatal error"; } else { message = solverProgram + " returned an unrecognised exit value " + exitVal + ". Key: 0 on success, 1 if the model failed to meet the convergence criteria, 2 if the input was invalid, 4 if a command line argument was incorrect, 8 for file read/write problems and -1 for fatal errors. If multiple input files are being processed, the exit code is the bit-wise OR of the above conditions."; } System.err.println(message); } } catch (IOException | InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } //tell listeners that the evaluation has been performed EvaluationCompletedEvent evaluationCompleted = new EvaluationCompletedEvent(this, 0, null); evaluationCompleted.setEvaluationTime(timer.getTime()); evaluationCompleted.setSolverName(solver); evaluationCompleted.setModelPath(filePath.getFileName()); for (ActionListener l : listeners) l.actionPerformed(evaluationCompleted); }
From source file:com.github.greengerong.NgProtractor.java
private void executeCommand(final String cmd, final String logAction) { final Log log = getLog(); try {/*ww w. j a v a 2s .c o m*/ log.info(String.format("execute %s running: %s", logAction, cmd)); final ProcessBuilder processBuilder = new ProcessBuilder(getCommandAccordingToOS(cmd)); final Process process = processBuilder.start(); final String runningInfo = IOUtil.toString(process.getInputStream()); log.info(runningInfo); } catch (IOException e) { log.warn(String.format("execute %s running script error", logAction), e); } }
From source file:acromusashi.kafka.log.producer.LinuxLogTailExecutor.java
/** * ?Tail???KafkaBroker?????/*from w ww. j a va2s.c o m*/ */ protected void sendTailedLog() { String[] tailCommandArgs = this.tailCommandStr.split("\\s+"); BufferedReader tailReader = null; Process tailProcess = null; try { tailProcess = new ProcessBuilder(tailCommandArgs).start(); tailReader = new BufferedReader(new InputStreamReader(tailProcess.getInputStream(), this.encoding)); String tailedLine = null; List<String> tailedLineList = Lists.newArrayList(); int count = 0; while ((tailedLine = tailReader.readLine()) != null) { tailedLineList.add(tailedLine); count++; if (count >= this.maxSendSize) { List<KeyedMessage<String, String>> messageList = getKeyedMessage(tailedLineList); tailedLineList.clear(); this.producer.send(messageList); count = 0; } } List<KeyedMessage<String, String>> messageList = getKeyedMessage(tailedLineList); tailedLineList.clear(); this.producer.send(messageList); } catch (Exception e) { logger.error("Failed while running command: " + this.tailCommandStr, e); if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } } finally { if (tailReader != null) { IOUtils.closeQuietly(tailReader); } if (tailProcess != null) { tailProcess.destroy(); try { tailProcess.waitFor(); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } } }
From source file:com.qhrtech.emr.launcher.TemplateLauncherManager.java
public void startUp() throws Exception { Process p = null;//from w ww . j a va2 s. c o m synchronized (eventLock) { doGeneration(); if (launchCommand != null) { ProcessBuilder pb = new ProcessBuilder(launchCommand); p = pb.inheritIO().start(); } commandStarted = true; eventLock.notifyAll(); } if (p != null) { System.exit(p.waitFor()); } }
From source file:fr.inria.eventcloud.deployment.cli.launchers.EventCloudsManagementServiceDeployer.java
/** * Deploys an EventCloudsRegistry and an EventClouds Management Service in a * separate JVM according to the specified parameters. * /*ww w . j a v a 2 s. c o m*/ * @param onRelease * {@code true} if the lastest release of the EventCloud has to * be used, {@code false} to use the latest snapshot version. * @param port * the port used to deploy the EventClouds Management Service and * which will also be used to deploy WS-Notification services. * @param urlSuffix * the suffix appended to the end of the URL associated to the * EventClouds Management Service to be deployed. * @param activateLoggers * {@code true} if the loggers have to be activated, * {@code false} otherwise. * @param properties * additional Java properties set to the new JVM. * * @return the endpoint URL of the EventClouds Management Service. * * @throws IOException * if an error occurs during the deployment. */ public synchronized static String deploy(boolean onRelease, int port, String urlSuffix, boolean activateLoggers, String... properties) throws IOException { if (eventCloudsManagementServiceProcess == null) { String binariesBaseUrl = EVENTCLOUD_BINARIES_URL; if (onRelease) { binariesBaseUrl += "releases/latest/"; } else { binariesBaseUrl += "snapshots/latest/"; } List<String> cmd = new ArrayList<String>(); String javaBinaryPath = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java"; if (System.getProperty("os.name").startsWith("Windows")) { javaBinaryPath = javaBinaryPath + ".exe"; } cmd.add(javaBinaryPath); cmd.add("-cp"); cmd.add(addClassPath(binariesBaseUrl + "libs/")); cmd.addAll(addProperties(binariesBaseUrl + "resources/", activateLoggers)); Collections.addAll(cmd, properties); cmd.add(EventCloudsManagementServiceDeployer.class.getCanonicalName()); cmd.add(Integer.toString(port)); cmd.add(urlSuffix); final ProcessBuilder processBuilder = new ProcessBuilder(cmd.toArray(new String[cmd.size()])); processBuilder.redirectErrorStream(true); eventCloudsManagementServiceProcess = processBuilder.start(); final BufferedReader reader = new BufferedReader( new InputStreamReader(eventCloudsManagementServiceProcess.getInputStream())); Thread t = new Thread(new Runnable() { @Override public void run() { String line = null; try { while ((line = reader.readLine()) != null) { if (!servicesDeployed.getValue() && line.contains(LOG_MANAGEMENT_WS_DEPLOYED)) { servicesDeployed.setValue(true); synchronized (servicesDeployed) { servicesDeployed.notifyAll(); } } System.out.println("ECManagement " + line); } } catch (IOException ioe) { ioe.printStackTrace(); } } }); t.setDaemon(true); t.start(); synchronized (servicesDeployed) { while (!servicesDeployed.getValue()) { try { servicesDeployed.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } } StringBuilder eventCloudsManagementWsEndpoint = new StringBuilder("http://"); eventCloudsManagementWsEndpoint.append(ProActiveInet.getInstance().getInetAddress().getHostAddress()); eventCloudsManagementWsEndpoint.append(':'); eventCloudsManagementWsEndpoint.append(port); eventCloudsManagementWsEndpoint.append('/'); eventCloudsManagementWsEndpoint.append(urlSuffix); return eventCloudsManagementWsEndpoint.toString(); } else { throw new IllegalStateException("EventClouds management process already deployed"); } }
From source file:de.tudarmstadt.ukp.csniper.webapp.search.tgrep.TgrepQuery.java
@Override public List<EvaluationItem> execute() { BufferedReader brInput = null; BufferedReader brError = null; List<String> output = new ArrayList<String>(); List<String> error = new ArrayList<String>(); try {// w w w . j a va 2 s . c o m List<String> cmd = new ArrayList<String>(); File exe = engine.getTgrepExecutable(); if (!exe.canExecute()) { exe.setExecutable(true); } cmd.add(exe.getAbsolutePath()); // specify corpus cmd.add("-c"); cmd.add(engine.getCorpusPath(corpus)); // only one match per sentence cmd.add("-f"); // print options cmd.add("-m"); // comment // full sentence // match begin token index // match end token index cmd.add("%c\\n%tw\\n%ym\\n%zm\\n"); // pattern to search for cmd.add(query); if (log.isTraceEnabled()) { log.trace("Invoking [" + StringUtils.join(cmd, " ") + "]"); } final ProcessBuilder pb = new ProcessBuilder(cmd); tgrep = pb.start(); brInput = new BufferedReader(new InputStreamReader(tgrep.getInputStream(), "UTF-8")); brError = new BufferedReader(new InputStreamReader(tgrep.getErrorStream(), "UTF-8")); String line; while ((line = brInput.readLine()) != null) { if (log.isTraceEnabled()) { log.trace("<< " + line); } output.add(line); } while ((line = brError.readLine()) != null) { if (log.isErrorEnabled()) { log.error(line); } error.add(line); } if (!error.isEmpty()) { throw new IOException(StringUtils.join(error, " ")); } } catch (IOException e) { throw new DataAccessResourceFailureException("Unable to start Tgrep process.", e); } finally { IOUtils.closeQuietly(brInput); IOUtils.closeQuietly(brError); } size = output.size() / LINES_PER_MATCH; if (maxResults >= 0 && size > maxResults) { return parseOutput(output.subList(0, LINES_PER_MATCH * maxResults)); } else { return parseOutput(output); } }