List of usage examples for java.lang ProcessBuilder environment
Map environment
To view the source code for java.lang ProcessBuilder environment.
Click Source Link
From source file:edu.cmu.cs.diamond.android.Filter.java
public Filter(int resourceId, Context context, String name, String[] args, byte[] blob) throws IOException { Resources r = context.getResources(); String resourceName = r.getResourceEntryName(resourceId); File f = context.getFileStreamPath(resourceName); if (!f.exists()) { InputStream ins = r.openRawResource(resourceId); byte[] buf = IOUtils.toByteArray(ins); FileOutputStream fos = context.openFileOutput(resourceName, Context.MODE_PRIVATE); IOUtils.write(buf, fos);/* www . ja va 2s . co m*/ context.getFileStreamPath(resourceName).setExecutable(true); fos.close(); } ProcessBuilder pb = new ProcessBuilder(f.getAbsolutePath()); Map<String, String> env = pb.environment(); tempDir = File.createTempFile("filter", null, context.getCacheDir()); tempDir.delete(); // Delete file and create directory. if (!tempDir.mkdir()) { throw new IOException("Unable to create temporary directory."); } env.put("TEMP", tempDir.getAbsolutePath()); env.put("TMPDIR", tempDir.getAbsolutePath()); proc = pb.start(); is = proc.getInputStream(); os = proc.getOutputStream(); sendInt(1); sendString(name); sendStringArray(args); sendBinary(blob); while (this.getNextToken().tag != TagEnum.INIT) ; Log.d(TAG, "Filter initialized."); }
From source file:com.cisco.dvbu.ps.common.util.ScriptExecutor.java
public int executeCommand(String errorFile) { int exitValue = -99; String prefix = "ScriptExecutor::"; String command = ""; try {/*from w ww.j ava 2s .c o m*/ // Print out the command and execution directory for (int i = 0; i < scriptArgsList.size(); i++) { command = command + scriptArgsList.get(i) + " "; } if (logger.isDebugEnabled()) { logger.debug(prefix + "-------------------------------------------------"); logger.debug(prefix + "Command: " + CommonUtils.maskCommand(command)); logger.debug(prefix + "Exec Dir: " + execFromDir.toString()); } // Build a new process to execute ProcessBuilder pb = new ProcessBuilder(scriptArgsList); // Setup the environment variables Map<String, String> env = pb.environment(); for (int i = 0; i < envList.size(); i++) { String envVar = envList.get(i).toString(); StringTokenizer st = new StringTokenizer(envVar, "="); if (st.hasMoreTokens()) { String property = st.nextToken(); String propertyVal = ""; try { propertyVal = st.nextToken(); } catch (Exception e) { } env.put(property, propertyVal); if (logger.isDebugEnabled()) { logger.debug(prefix + "Env Var: " + CommonUtils.maskCommand(envVar)); } } } if (logger.isDebugEnabled()) { logger.debug(prefix + "-------------------------------------------------"); } // Setup up the execute from directory File execDir = new File(execFromDir); pb.directory(execDir); if (logger.isDebugEnabled()) { logger.debug(""); logger.debug("ProcessBuilder::pb.command: " + CommonUtils.maskCommand(pb.command().toString())); logger.debug("ProcessBuilder::pb.directory: " + pb.directory().toString()); logger.debug("ProcessBuilder::pb.directory.getAbsolutePath: " + pb.directory().getAbsolutePath()); logger.debug("ProcessBuilder::pb.directory.getCanonicalPath: " + pb.directory().getCanonicalPath()); logger.debug(""); logger.debug("ProcessBuilder::pb.environment: " + CommonUtils.maskCommand(pb.environment().toString())); logger.debug(prefix + "-------------------------------------------------"); logger.debug(""); } // Execute the command Process process = pb.start(); OutputStream stdOutput = process.getOutputStream(); InputStream inputStream = process.getInputStream(); InputStream errorStream = process.getErrorStream(); inputStreamHandler = new ScriptStreamHandler(inputStream, stdOutput); errorStreamHandler = new ScriptStreamHandler(errorStream); inputStreamHandler.start(); errorStreamHandler.start(); exitValue = process.waitFor(); if (logger.isDebugEnabled()) { logger.debug(prefix + "exitValue for process.waitFor is: " + exitValue); } if (exitValue > 0) { logger.error("Error executing command=" + CommonUtils.maskCommand(command)); logger.error("Error=" + CommonUtils.maskCommand(getStandardErrorFromCommand().toString())); } else { if (logger.isInfoEnabled()) { logger.info("Successfully executed command:\n" + CommonUtils.maskCommand(command)); logger.info("Output:\n" + getStandardOutputFromCommand().toString()); } } } catch (IOException e) { CompositeLogger.logException(e, e.getMessage()); throw new CompositeException(e); } catch (InterruptedException e) { CompositeLogger.logException(e, e.getMessage()); throw new CompositeException(e); } return exitValue; }
From source file:com.netflix.genie.server.jobmanager.impl.YarnJobManagerImpl.java
/** * Set up the process with specific YARN properties. * * @param processBuilder The process builder to modify. * @throws GenieException If there are any issues. *///from w w w . ja v a2s . c o m private void setupYarnProcess(final ProcessBuilder processBuilder) throws GenieException { // setup the HADOOP specific variables final Map<String, String> processEnv = processBuilder.environment(); processEnv.put("HADOOP_CONF_DIR", this.getJobDir() + "/conf"); processEnv.put("HADOOP_USER_NAME", this.getJob().getUser()); processEnv.put("HADOOP_GROUP_NAME", this.getGroupName()); // set the variables to be added to the core-site xml. Format of this variable is: // key1=value1;key2=value2;key3=value3 final String genieJobIDProp = GENIE_JOB_ID + "=" + this.getJob().getId(); final String netflixEnvProp = EXECUTION_ENVIRONMENT + "=" + ConfigurationManager.getConfigInstance().getString("netflix.environment"); final String lipstickUuidPropName = ConfigurationManager.getConfigInstance() .getString("com.netflix.genie.server.lipstick.uuid.prop.name", "lipstick.uuid.prop.name"); final String lipstickUuidProp; if (ConfigurationManager.getConfigInstance().getBoolean("com.netflix.genie.server.lipstick.enable", false)) { lipstickUuidProp = lipstickUuidPropName + "=" + GENIE_JOB_ID; } else { lipstickUuidProp = ""; } processEnv.put("CORE_SITE_XML_ARGS", StringUtils.join( new String[] { genieJobIDProp, netflixEnvProp, lipstickUuidProp, }, JobManagerImpl.SEMI_COLON)); // if the cluster version is provided, overwrite the HADOOP_HOME // environment variable String hadoopHome; if (this.getCluster().getVersion() != null) { String hadoopVersion = this.getCluster().getVersion(); LOG.debug("Hadoop Version of the cluster: " + hadoopVersion); // try extract version first hadoopHome = ConfigurationManager.getConfigInstance() .getString("com.netflix.genie.server.hadoop." + hadoopVersion + ".home"); // if not, trim to 3 most significant digits if (hadoopHome == null) { hadoopVersion = StringUtil.trimVersion(hadoopVersion); hadoopHome = ConfigurationManager.getConfigInstance() .getString("com.netflix.genie.server.hadoop." + hadoopVersion + ".home"); } if (hadoopHome == null || !new File(hadoopHome).exists()) { final String msg = "This genie instance doesn't support Hadoop version: " + hadoopVersion; LOG.error(msg); throw new GenieServerException(msg); } LOG.info("Overriding HADOOP_HOME from cluster config to: " + hadoopHome); processEnv.put("HADOOP_HOME", hadoopHome); } else { // set the default hadoop home hadoopHome = ConfigurationManager.getConfigInstance().getString("com.netflix.genie.server.hadoop.home"); if (hadoopHome == null || !new File(hadoopHome).exists()) { final String msg = "Property com.netflix.genie.server.hadoop.home is not set correctly"; LOG.error(msg); throw new GenieServerException(msg); } processEnv.put("HADOOP_HOME", hadoopHome); } processEnv.put("CP_TIMEOUT", ConfigurationManager.getConfigInstance() .getString("com.netflix.genie.server.hadoop.s3cp.timeout", "1800")); final String copyCommand = ConfigurationManager.getConfigInstance().getString(COPY_COMMAND_KEY); if (StringUtils.isBlank(copyCommand)) { final String msg = "Required property " + COPY_COMMAND_KEY + " isn't set"; LOG.error(msg); throw new GenieServerException(msg); } processEnv.put("COPY_COMMAND", copyCommand); // Force flag to overwrite required in Hadoop2 processEnv.put("FORCE_COPY_FLAG", "-f"); final String makeDirCommand = ConfigurationManager.getConfigInstance() .getString(MAKE_DIRECTORY_COMMAND_KEY); if (StringUtils.isBlank(makeDirCommand)) { final String msg = "Required property " + MAKE_DIRECTORY_COMMAND_KEY + " isn't set"; LOG.error(msg); throw new GenieServerException(msg); } processEnv.put("MKDIR_COMMAND", makeDirCommand); }
From source file:azkaban.jobExecutor.utils.process.AzkabanProcess.java
/** * Execute this process, blocking until it has completed. *//* w w w .j a v a2 s. c om*/ public void run() throws IOException { if (this.isStarted() || this.isComplete()) { throw new IllegalStateException("The process can only be used once."); } ProcessBuilder builder = new ProcessBuilder(cmd); builder.directory(new File(workingDir)); builder.environment().putAll(env); builder.redirectErrorStream(true); this.process = builder.start(); try { this.processId = processId(process); if (processId == 0) { logger.debug("Spawned thread with unknown process id"); } else { logger.debug("Spawned thread with process id " + processId); } this.startupLatch.countDown(); LogGobbler outputGobbler = new LogGobbler(new InputStreamReader(process.getInputStream()), logger, Level.INFO, 30); LogGobbler errorGobbler = new LogGobbler(new InputStreamReader(process.getErrorStream()), logger, Level.ERROR, 30); outputGobbler.start(); errorGobbler.start(); int exitCode = -1; try { exitCode = process.waitFor(); } catch (InterruptedException e) { logger.info("Process interrupted. Exit code is " + exitCode, e); } completeLatch.countDown(); // try to wait for everything to get logged out before exiting outputGobbler.awaitCompletion(5000); errorGobbler.awaitCompletion(5000); if (exitCode != 0) { String output = new StringBuilder().append("Stdout:\n").append(outputGobbler.getRecentLog()) .append("\n\n").append("Stderr:\n").append(errorGobbler.getRecentLog()).append("\n") .toString(); throw new ProcessFailureException(exitCode, output); } } finally { IOUtils.closeQuietly(process.getInputStream()); IOUtils.closeQuietly(process.getOutputStream()); IOUtils.closeQuietly(process.getErrorStream()); } }
From source file:hudson.slaves.CommandLauncher.java
@Override public void launch(SlaveComputer computer, final TaskListener listener) { EnvVars _cookie = null;/*w w w .jav a2s. c om*/ Process _proc = null; try { Slave node = computer.getNode(); if (node == null) { throw new AbortException("Cannot launch commands on deleted nodes"); } listener.getLogger().println(hudson.model.Messages.Slave_Launching(getTimestamp())); if (getCommand().trim().length() == 0) { listener.getLogger().println(Messages.CommandLauncher_NoLaunchCommand()); return; } listener.getLogger().println("$ " + getCommand()); ProcessBuilder pb = new ProcessBuilder(Util.tokenize(getCommand())); final EnvVars cookie = _cookie = EnvVars.createCookie(); pb.environment().putAll(cookie); pb.environment().put("WORKSPACE", StringUtils.defaultString(computer.getAbsoluteRemoteFs(), node.getRemoteFS())); //path for local slave log {// system defined variables String rootUrl = Jenkins.getInstance().getRootUrl(); if (rootUrl != null) { pb.environment().put("HUDSON_URL", rootUrl); // for backward compatibility pb.environment().put("JENKINS_URL", rootUrl); pb.environment().put("SLAVEJAR_URL", rootUrl + "/jnlpJars/slave.jar"); } } if (env != null) { pb.environment().putAll(env); } final Process proc = _proc = pb.start(); // capture error information from stderr. this will terminate itself // when the process is killed. new StreamCopyThread("stderr copier for remote agent on " + computer.getDisplayName(), proc.getErrorStream(), listener.getLogger()).start(); computer.setChannel(proc.getInputStream(), proc.getOutputStream(), listener.getLogger(), new Channel.Listener() { @Override public void onClosed(Channel channel, IOException cause) { reportProcessTerminated(proc, listener); try { ProcessTree.get().killAll(proc, cookie); } catch (InterruptedException e) { LOGGER.log(Level.INFO, "interrupted", e); } } }); LOGGER.info("slave agent launched for " + computer.getDisplayName()); } catch (InterruptedException e) { e.printStackTrace(listener.error(Messages.ComputerLauncher_abortedLaunch())); } catch (RuntimeException e) { e.printStackTrace(listener.error(Messages.ComputerLauncher_unexpectedError())); } catch (Error e) { e.printStackTrace(listener.error(Messages.ComputerLauncher_unexpectedError())); } catch (IOException e) { Util.displayIOException(e, listener); String msg = Util.getWin32ErrorMessage(e); if (msg == null) { msg = ""; } else { msg = " : " + msg; } msg = hudson.model.Messages.Slave_UnableToLaunch(computer.getDisplayName(), msg); LOGGER.log(Level.SEVERE, msg, e); e.printStackTrace(listener.error(msg)); if (_proc != null) { reportProcessTerminated(_proc, listener); try { ProcessTree.get().killAll(_proc, _cookie); } catch (InterruptedException x) { x.printStackTrace(listener.error(Messages.ComputerLauncher_abortedLaunch())); } } } }
From source file:org.springframework.cloud.deployer.spi.local.AbstractLocalDeployerSupport.java
/** * Builds the process builder./*from w w w .ja va 2s .c o m*/ * * @param request the request * @param appInstanceEnv the instance environment variables * @param appProperties the app properties * @return the process builder */ protected ProcessBuilder buildProcessBuilder(AppDeploymentRequest request, Map<String, String> appInstanceEnv, Map<String, String> appProperties, Optional<Integer> appInstanceNumber) { Assert.notNull(request, "AppDeploymentRequest must be set"); Assert.notNull(appProperties, "Args must be set"); String[] commands = null; Map<String, String> appInstanceEnvToUse = new HashMap<>(appInstanceEnv); Map<String, String> appPropertiesToUse = new HashMap<>(); handleAppPropertiesPassing(request, appProperties, appInstanceEnvToUse, appPropertiesToUse); if (request.getResource() instanceof DockerResource) { commands = this.dockerCommandBuilder.buildExecutionCommand(request, appInstanceEnvToUse, appPropertiesToUse, appInstanceNumber); } else { commands = this.javaCommandBuilder.buildExecutionCommand(request, appInstanceEnvToUse, appPropertiesToUse, appInstanceNumber); } // tweak escaping double quotes needed for windows if (LocalDeployerUtils.isWindows()) { for (int i = 0; i < commands.length; i++) { commands[i] = commands[i].replace("\"", "\\\""); } } ProcessBuilder builder = new ProcessBuilder(commands); if (!(request.getResource() instanceof DockerResource)) { builder.environment().putAll(appInstanceEnv); } retainEnvVars(builder.environment().keySet()); return builder; }
From source file:org.apache.hive.spark.client.SparkSubmitSparkClient.java
@Override protected Future<Void> launchDriver(String isTesting, RpcServer rpcServer, String clientId) throws IOException { Callable<Void> runnable; String cmd = Joiner.on(" ").join(argv); LOG.info("Running client driver with argv: {}", cmd); ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd); // Prevent hive configurations from being visible in Spark. pb.environment().remove("HIVE_HOME"); pb.environment().remove("HIVE_CONF_DIR"); // Add credential provider password to the child process's environment // In case of Spark the credential provider location is provided in the jobConf when the job is submitted String password = getSparkJobCredentialProviderPassword(); if (password != null) { pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password); }// w ww .ja v a 2s .c o m if (isTesting != null) { pb.environment().put("SPARK_TESTING", isTesting); } final Process child = pb.start(); String threadName = Thread.currentThread().getName(); final List<String> childErrorLog = Collections.synchronizedList(new ArrayList<String>()); final LogRedirector.LogSourceCallback callback = () -> isAlive; LogRedirector.redirect("spark-submit-stdout-redir-" + threadName, new LogRedirector(child.getInputStream(), LOG, callback)); LogRedirector.redirect("spark-submit-stderr-redir-" + threadName, new LogRedirector(child.getErrorStream(), LOG, childErrorLog, callback)); runnable = () -> { try { int exitCode = child.waitFor(); if (exitCode != 0) { List<String> errorMessages = new ArrayList<>(); synchronized (childErrorLog) { for (String line : childErrorLog) { if (StringUtils.containsIgnoreCase(line, "Error")) { errorMessages.add("\"" + line + "\""); } } } String errStr = errorMessages.isEmpty() ? "?" : Joiner.on(',').join(errorMessages); rpcServer.cancelClient(clientId, new RuntimeException("spark-submit process failed " + "with exit code " + exitCode + " and error " + errStr)); } } catch (InterruptedException ie) { LOG.warn( "Thread waiting on the child process (spark-submit) is interrupted, killing the child process."); rpcServer.cancelClient(clientId, "Thread waiting on the child process (spark-submit) is interrupted"); Thread.interrupted(); child.destroy(); } catch (Exception e) { String errMsg = "Exception while waiting for child process (spark-submit)"; LOG.warn(errMsg, e); rpcServer.cancelClient(clientId, errMsg); } return null; }; FutureTask<Void> futureTask = new FutureTask<>(runnable); Thread driverThread = new Thread(futureTask); driverThread.setDaemon(true); driverThread.setName("SparkSubmitMonitor"); driverThread.start(); return futureTask; }
From source file:com.thoughtworks.gauge.maven.GaugeExecutionMojo.java
private ProcessBuilder createProcessBuilder() { ProcessBuilder builder = new ProcessBuilder(); builder.command(createGaugeCommand()); String customClasspath = createCustomClasspath(); debug("Setting Custom classpath => %s", customClasspath); builder.environment().put(GAUGE_CUSTOM_CLASSPATH_ENV, customClasspath); return builder; }
From source file:com.tw.go.plugin.task.GoPluginImpl.java
private int executeCommand(String workingDirectory, Map<String, String> environmentVariables, String... command) throws IOException, InterruptedException { ProcessBuilder processBuilder = new ProcessBuilder(command); processBuilder.directory(new File(workingDirectory)); if (environmentVariables != null && !environmentVariables.isEmpty()) { processBuilder.environment().putAll(environmentVariables); }/*from w w w .ja v a 2 s . c o m*/ Process process = processBuilder.start(); JobConsoleLogger.getConsoleLogger().readOutputOf(process.getInputStream()); JobConsoleLogger.getConsoleLogger().readErrorOf(process.getErrorStream()); return process.waitFor(); }
From source file:org.sonar.api.utils.command.CommandExecutor.java
/** * @throws org.sonar.api.utils.command.TimeoutException on timeout, since 4.4 * @throws CommandException on any other error * @param timeoutMilliseconds any negative value means no timeout. * @since 3.0/* ww w . j a v a2 s. c om*/ */ public int execute(Command command, StreamConsumer stdOut, StreamConsumer stdErr, long timeoutMilliseconds) { ExecutorService executorService = null; Process process = null; StreamGobbler outputGobbler = null; StreamGobbler errorGobbler = null; try { ProcessBuilder builder = new ProcessBuilder(command.toStrings(false)); if (command.getDirectory() != null) { builder.directory(command.getDirectory()); } builder.environment().putAll(command.getEnvironmentVariables()); process = builder.start(); outputGobbler = new StreamGobbler(process.getInputStream(), stdOut); errorGobbler = new StreamGobbler(process.getErrorStream(), stdErr); outputGobbler.start(); errorGobbler.start(); executorService = Executors.newSingleThreadExecutor(); Future<Integer> ft = executorService.submit((Callable<Integer>) process::waitFor); int exitCode; if (timeoutMilliseconds < 0) { exitCode = ft.get(); } else { exitCode = ft.get(timeoutMilliseconds, TimeUnit.MILLISECONDS); } waitUntilFinish(outputGobbler); waitUntilFinish(errorGobbler); verifyGobbler(command, outputGobbler, "stdOut"); verifyGobbler(command, errorGobbler, "stdErr"); return exitCode; } catch (java.util.concurrent.TimeoutException te) { throw new TimeoutException(command, "Timeout exceeded: " + timeoutMilliseconds + " ms", te); } catch (CommandException e) { throw e; } catch (Exception e) { throw new CommandException(command, e); } finally { if (process != null) { process.destroy(); } waitUntilFinish(outputGobbler); waitUntilFinish(errorGobbler); closeStreams(process); if (executorService != null) { executorService.shutdown(); } } }