List of usage examples for java.lang ProcessBuilder environment
Map environment
To view the source code for java.lang ProcessBuilder environment.
Click Source Link
From source file:com.chinamobile.bcbsp.pipes.Application.java
/** * Run a given command in a subprocess, including threads to copy its stdout * and stderr to our stdout and stderr.//from ww w.jav a 2 s . c o m * @param command * the command and its arguments * @param env * the environment to run the process in * @return a handle on the process * @throws IOException */ static Process runClient(List<String> command, Map<String, String> env) throws IOException { ProcessBuilder builder = new ProcessBuilder(command); if (env != null) { builder.environment().putAll(env); } Process result = builder.start(); if (result == null) { LOG.info("Application : result is null"); } else { LOG.info("Application : result is not null"); } return result; }
From source file:fr.ens.biologie.genomique.eoulsan.actions.HadoopExecAction.java
/** * Run Eoulsan in hadoop mode./*w ww .j a v a 2 s . c o m*/ * @param workflowFile workflow file * @param designFile design file * @param hdfsPath path of data on hadoop file system * @param jobDescription job description */ private static void run(final File workflowFile, final File designFile, final String hdfsPath, final String jobDescription) { checkNotNull(workflowFile, "paramFile is null"); checkNotNull(designFile, "designFile is null"); checkNotNull(hdfsPath, "hdfsPath is null"); // Write log entries Main.getInstance().flushLog(); // Repackage application for Hadoop System.out.println("Package " + Globals.APP_NAME + " for hadoop mode..."); final File repackagedJarFile; try { repackagedJarFile = HadoopJarRepackager.repack(); } catch (IOException e) { Common.errorExit(e, "Error while repackaging " + Globals.APP_NAME_LOWER_CASE + ": " + e.getMessage()); // Never called return; } getLogger().info("Launch Eoulsan in Hadoop mode."); // Create command line final List<String> argsList = new ArrayList<>(); argsList.add("hadoop"); argsList.add("jar"); argsList.add(repackagedJarFile.getAbsolutePath()); final Main main = Main.getInstance(); if (main.getLogLevelArgument() != null) { argsList.add("-loglevel"); argsList.add(main.getLogLevelArgument()); } if (main.getConfigurationFileArgument() != null) { argsList.add("-conf"); argsList.add(main.getConfigurationFileArgument()); } for (String setting : main.getCommandLineSettings()) { argsList.add("-s"); argsList.add(setting); } argsList.add(ExecJarHadoopAction.ACTION_NAME); if (jobDescription != null) { argsList.add("-d"); argsList.add(jobDescription.trim()); } argsList.add("-e"); argsList.add("local hadoop cluster"); argsList.add(workflowFile.toString()); argsList.add(designFile.toString()); argsList.add(hdfsPath); // execute Hadoop System.out.println("Launch " + Globals.APP_NAME + " in hadoop mode..."); try { // Create the process builder the the command line final ProcessBuilder builder = new ProcessBuilder(argsList).inheritIO(); // Set the JVM arguments for Hadoop in the process builder builder.environment().put(HADOOP_CLIENT_OPTS_ENV, getJVMArgs()); // Execute the hadoop jar command final int exitCode = builder.start().waitFor(); // Exit with the same exit of the child process System.exit(exitCode); } catch (IOException | InterruptedException e) { Common.errorExit(e, "Error while executing " + Globals.APP_NAME_LOWER_CASE + ": " + e.getMessage()); } }
From source file:io.mesosphere.mesos.frameworks.cassandra.executor.ProdObjectFactory.java
@NotNull private static String processBuilderToString(@NotNull final ProcessBuilder builder) { return "ProcessBuilder{\n" + "directory() = " + builder.directory() + ",\n" + "command() = " + Joiner.on(" ").join(builder.command()) + ",\n" + "environment() = " + Joiner.on("\n").withKeyValueSeparator("->").join(builder.environment()) + "\n}"; }
From source file:functionaltests2.SchedulerCommandLine.java
/** * Start a Scheduler and Resource Manager. *///www. j a va 2s .c o m public static void startSchedulerCmdLine(boolean restart, File proactiveConf) throws Exception { File schedHome = new File(System.getProperty("pa.scheduler.home")).getCanonicalFile(); File rmHome = new File(System.getProperty("pa.rm.home")).getCanonicalFile(); if (proactiveConf != null) { FileUtils.copyFile(proactiveConf, new File(schedHome, "config" + fs + "proactive" + fs + "ProActiveConfiguration.xml")); } System.out.println(schedHome); p = null; ProcessBuilder pb = new ProcessBuilder(); if (OperatingSystem.getOperatingSystem().equals(OperatingSystem.unix)) { pb.directory(new File(schedHome + fs + "bin" + fs + "unix")); pb.command("/bin/bash", restart ? "scheduler-start" : "scheduler-start-clean", "-Dproactive.communication.protocol=pnp", "-Dproactive.pnp.port=9999"); pb.environment().put("SchedulerTStarter", "SchedulerTStarter"); p = pb.start(); } else { pb.directory(new File(schedHome + fs + "bin" + fs + "windows")); pb.command("cmd.exe", "/c", restart ? "scheduler-start.bat" : "scheduler-start-clean.bat", "-Dproactive.communication.protocol=pnp", "-Dproactive.pnp.port=9999"); pb.environment().put("SchedulerTStarter", "SchedulerTStarter"); p = pb.start(); } IOTools.LoggingThread lt1 = new IOTools.LoggingThread(p.getInputStream(), "[SchedulerTStarter]", System.out); Thread t1 = new Thread(lt1, "SchedulerTStarter"); t1.setDaemon(true); t1.start(); // waiting the initialization RMAuthentication rmAuth = RMConnection.waitAndJoin("pnp://localhost:9999"); System.out.println("RM successfully joined."); SchedulerConnection.waitAndJoin("pnp://localhost:9999"); System.out.println("Scheduler successfully joined."); }
From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashUtils.java
/** Builds a process to execute * @param global/*from ww w . j av a 2 s.c om*/ * @param bucket_config * @param logstash_config * @param requested_docs * @param bucket_path if this is present, will log output to /tmp/unique_sig * @param context * @return */ public static ProcessBuilder buildLogstashTest(final LogstashHarvesterConfigBean global, final LogstashBucketConfigBean bucket_config, final String logstash_config, final long requested_docs, final Optional<String> bucket_path) { final String log_file = System.getProperty("java.io.tmpdir") + File.separator + BucketUtils.getUniqueSignature(bucket_path.orElse("DNE"), Optional.empty()); try { //(delete log file if it exists) new File(log_file).delete(); } catch (Exception e) { } ArrayList<String> args = new ArrayList<String>(); args.addAll(Arrays.asList(global.binary_path(), "-e", logstash_config)); if (bucket_path.isPresent()) { args.addAll(Arrays.asList("-l", log_file)); } if (0L == requested_docs) { args.add("-t"); // test mode, must faster } //TESTED if (bucket_config.debug_verbosity()) { args.add("--debug"); } else { args.add("--verbose"); } ProcessBuilder logstashProcessBuilder = new ProcessBuilder(args); logstashProcessBuilder = logstashProcessBuilder.directory(new File(global.working_dir())) .redirectErrorStream(true); logstashProcessBuilder.environment().put("JAVA_OPTS", ""); return logstashProcessBuilder; }
From source file:at.ac.tuwien.dsg.cloud.salsa.engine.utils.SystemFunctions.java
public static int executeCommandGetReturnCode(String cmd, String workingDir, String executeFrom) { if (workingDir == null) { workingDir = "/tmp"; }//from w ww .j av a 2 s . c om logger.debug("Execute command: " + cmd + ". Working dir: " + workingDir); try { String[] splitStr = cmd.split("\\s+"); ProcessBuilder pb = new ProcessBuilder(splitStr); pb.directory(new File(workingDir)); pb = pb.redirectErrorStream(true); // this is important to redirect the error stream to output stream, prevent blocking with long output Map<String, String> env = pb.environment(); String path = env.get("PATH"); path = path + File.pathSeparator + "/usr/bin:/usr/sbin"; logger.debug("PATH to execute command: " + pb.environment().get("PATH")); env.put("PATH", path); Process p = pb.start(); BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream())); String line; while ((line = reader.readLine()) != null) { logger.debug(line); } p.waitFor(); int returnCode = p.exitValue(); logger.debug("Execute command done: " + cmd + ". Get return code: " + returnCode); return returnCode; } catch (InterruptedException | IOException e1) { logger.error("Error when execute command. Error: " + e1); } return -1; }
From source file:at.ac.tuwien.dsg.cloud.salsa.engine.utils.SystemFunctions.java
/** * Run a command and wait//from w w w. ja v a 2 s . co m * * @param cmd The command to run * @param workingDir The folder where the command is run * @param executeFrom For logging message to the center of where to execute the command. * @return */ public static String executeCommandGetOutput(String cmd, String workingDir, String executeFrom) { logger.debug("Execute command: " + cmd); if (workingDir == null) { workingDir = "/tmp"; } try { String[] splitStr = cmd.split("\\s+"); ProcessBuilder pb = new ProcessBuilder(splitStr); pb.directory(new File(workingDir)); pb = pb.redirectErrorStream(true); // this is important to redirect the error stream to output stream, prevent blocking with long output Map<String, String> env = pb.environment(); String path = env.get("PATH"); path = path + File.pathSeparator + "/usr/bin:/usr/sbin"; logger.debug("PATH to execute command: " + pb.environment().get("PATH")); env.put("PATH", path); Process p = pb.start(); BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream())); String line; StringBuffer output = new StringBuffer(); int lineCount = 0; while ((line = reader.readLine()) != null) { if (lineCount < 10) { // only get 10 lines to prevent the overflow output.append(line); } lineCount += 1; logger.debug(line); } if (lineCount >= 10) { logger.debug("... there are alot of more output here which is not shown ! ..."); } p.waitFor(); System.out.println("Execute Commang output: " + output.toString().trim()); if (p.exitValue() == 0) { logger.debug("Command exit 0, result: " + output.toString().trim()); return output.toString().trim(); } else { logger.debug("Command return non zero code: " + p.exitValue()); return null; } } catch (InterruptedException | IOException e1) { logger.error("Error when execute command. Error: " + e1); } return null; }
From source file:org.oxymores.chronix.engine.RunnerShell.java
public static RunResult run(RunDescription rd, String logFilePath, boolean storeLogFile, boolean returnFullerLog) { RunResult res = new RunResult(); Process p;//from w w w . ja v a2 s . c o m String nl = System.getProperty("line.separator"); Pattern pat = Pattern.compile("^set ([a-zA-Z]+[a-zA-Z0-9]*)=(.+)"); Matcher matcher = pat.matcher("Testing123Testing"); String encoding = getEncoding(rd); log.debug("Encoding is " + encoding); // /////////////////////////// // Build command List<String> argsStrings = buildCommand(rd); // ///////////////////////////////////////////////////////////////////////// // Create a process builder with the command line contained in the array ProcessBuilder pb = new ProcessBuilder(argsStrings); // Mix stdout and stderr (easier to put errors in context this way) pb.redirectErrorStream(true); // Create array containing environment Map<String, String> env = pb.environment(); for (int i = 0; i < rd.getEnvNames().size(); i++) { env.put(rd.getEnvNames().get(i), rd.getEnvValues().get(i)); } BufferedReader br = null; Writer output = null; try { // Start! log.debug("GO (" + rd.getSubMethod() + ")"); p = pb.start(); // Read output (err & out), write it to file InputStreamReader isr = new InputStreamReader(p.getInputStream(), encoding); br = new BufferedReader(isr); String line = null; int i = 0; LinkedHashMap<Integer, String> endBuffer = new LinkedHashMap<Integer, String>() { private static final long serialVersionUID = -6773540176968046737L; @Override protected boolean removeEldestEntry(java.util.Map.Entry<Integer, String> eldest) { return this.size() > Constants.MAX_RETURNED_BIG_LOG_END_LINES; } }; if (storeLogFile) { output = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(logFilePath), "UTF-8")); } line = br.readLine(); while (line != null) { i++; // Local log file gets all lines if (storeLogFile) { output.write(line + nl); } // Small log gets first 500 lines or 10000 characters (the smaller of the two) if (i < Constants.MAX_RETURNED_SMALL_LOG_LINES && res.logStart.length() < Constants.MAX_RETURNED_SMALL_LOG_CHARACTERS) { res.logStart += nl + line; } // Scheduler internal log gets first line only if (i == 1) { log.debug(String.format("Job running. First line of output is: %s", line)); } // Fuller log gets first 10k lines, then last 1k lines. if (returnFullerLog) { if (i < Constants.MAX_RETURNED_BIG_LOG_LINES) { res.fullerLog += line; } else { endBuffer.put(i, line); } } // Analysis: there may be a new variable definition in the line matcher.reset(line); if (matcher.find()) { log.debug("Key detected :" + matcher.group(1)); log.debug("Value detected :" + matcher.group(2)); res.newEnvVars.put(matcher.group(1), matcher.group(2)); } line = br.readLine(); } IOUtils.closeQuietly(br); if (i > Constants.MAX_RETURNED_BIG_LOG_LINES && i < Constants.MAX_RETURNED_BIG_LOG_LINES + Constants.MAX_RETURNED_BIG_LOG_END_LINES && returnFullerLog) { res.fullerLog += Arrays.toString(endBuffer.entrySet().toArray()); } if (i >= Constants.MAX_RETURNED_BIG_LOG_LINES + Constants.MAX_RETURNED_BIG_LOG_END_LINES && returnFullerLog) { res.fullerLog += "\n\n\n*******\n LOG TRUNCATED - See full log on server\n********\n\n\n" + Arrays.toString(endBuffer.entrySet().toArray()); } // Done: close log file if (storeLogFile) { IOUtils.closeQuietly(output); File f = new File(logFilePath); res.logSizeBytes = f.length(); } } catch (IOException e) { log.error("error occurred while running job", e); res.logStart = e.getMessage(); res.returnCode = -1; IOUtils.closeQuietly(br); IOUtils.closeQuietly(output); return res; } // Return res.returnCode = p.exitValue(); res.logPath = logFilePath; res.envtUser = System.getProperty("user.name"); try { res.envtServer = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { res.envtServer = "unknown"; } log.info(String.format("Job ended, RC is %s", res.returnCode)); return res; }
From source file:edu.uci.ics.asterix.event.service.AsterixEventServiceUtil.java
public static String executeLocalScript(String path, List<String> args) throws Exception { List<String> pargs = new ArrayList<String>(); pargs.add("/bin/bash"); pargs.add(path);//w w w . ja v a 2s .com if (args != null) { pargs.addAll(args); } ProcessBuilder pb = new ProcessBuilder(pargs); pb.environment().putAll(EventDriver.getEnvironment()); pb.environment().put("IP_LOCATION", EventDriver.CLIENT_NODE.getClusterIp()); Process p = pb.start(); BufferedInputStream bis = new BufferedInputStream(p.getInputStream()); StringWriter writer = new StringWriter(); IOUtils.copy(bis, writer, "UTF-8"); return writer.toString(); }
From source file:at.ac.tuwien.dsg.cloud.salsa.engine.utils.SystemFunctions.java
/** * Run a command and wait// w w w . j a va2 s. c o m * * @param cmd The command to run * @param workingDir The folder where the command is run * @param executeFrom For logging message to the center of where to execute the command. * @return */ public static Process executeCommandAndForget(String cmd, String workingDir, String executeFrom) { logger.debug("Execute command: " + cmd); if (workingDir == null) { workingDir = "/tmp"; } String[] splitStr = cmd.split("\\s+"); ProcessBuilder pb = new ProcessBuilder(splitStr); pb.directory(new File(workingDir)); pb = pb.redirectErrorStream(true); // this is important to redirect the error stream to output stream, prevent blocking with long output pb.redirectOutput(new File("/tmp/salsa.conductor.log")); Map<String, String> env = pb.environment(); String path = env.get("PATH"); path = path + File.pathSeparator + "/usr/bin:/usr/sbin"; logger.debug("PATH to execute command: " + pb.environment().get("PATH")); env.put("PATH", path); Process p; try { p = pb.start(); return p; } catch (IOException ex) { logger.debug("Cannot run the command: " + cmd); return null; } }