Example usage for java.lang Process destroy

List of usage examples for java.lang Process destroy

Introduction

In this page you can find the example usage for java.lang Process destroy.

Prototype

public abstract void destroy();

Source Link

Document

Kills the process.

Usage

From source file:com.ibm.bi.dml.test.integration.AutomatedTestBase.java

/**
 * Runs an R script in the old or the new way
 *//*from   www.java  2s  . c o m*/
protected void runRScript(boolean newWay) {

    String executionFile = sourceDirectory + selectedTest + ".R";

    // *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
    // Some of the R scripts will fail if the "expected" directory doesn't exist.
    // Make sure the directory exists.
    File expectedDir = new File(baseDirectory, "expected" + "/" + cacheDir);
    expectedDir.mkdirs();
    // *** END HACK ***

    String cmd;
    if (!newWay) {
        executionFile = executionFile + "t";
        cmd = "R -f " + executionFile;
    } else {
        // *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
        // Rscript does *not* load the "methods" package by default
        // to save on start time. The "Matrix" package used in the
        // tests requires the "methods" package and should still
        // load and attach it, but in R 3.2 with the latest version
        // of the "Matrix" package, "methods" is loaded *but not
        // attached* when run with Rscript.  Therefore, we need to
        // explicitly load it with Rscript.
        cmd = rCmd.replaceFirst("Rscript",
                "Rscript --default-packages=methods,datasets,graphics,grDevices,stats,utils");
        // *** END HACK ***
    }

    if (System.getProperty("os.name").contains("Windows")) {
        cmd = cmd.replace('/', '\\');
        executionFile = executionFile.replace('/', '\\');
    }
    if (DEBUG) {
        if (!newWay) { // not sure why have this condition
            TestUtils.printRScript(executionFile);
        }
    }
    if (!newWay) {
        ParameterBuilder.setVariablesInScript(sourceDirectory, selectedTest + ".R", testVariables);
    }

    if (cacheDir.length() > 0) {
        File expectedFile = null;
        String[] outputFiles = null;
        TestConfiguration testConfig = getTestConfiguration(selectedTest);
        if (testConfig != null) {
            outputFiles = testConfig.getOutputFiles();
        }

        if (outputFiles != null && outputFiles.length > 0) {
            expectedFile = new File(expectedDir.getPath() + "/" + outputFiles[0]);
            if (expectedFile.canRead()) {
                System.out.println("Skipping R script cmd: " + cmd);
                return;
            }
        }
    }

    try {
        long t0 = System.nanoTime();
        System.out.println("starting R script");
        System.out.println("cmd: " + cmd);
        Process child = Runtime.getRuntime().exec(cmd);

        String outputR = IOUtils.toString(child.getInputStream());
        System.out.println("Standard Output from R:" + outputR);
        String errorString = IOUtils.toString(child.getErrorStream());
        System.err.println("Standard Error from R:" + errorString);

        //
        // To give any stream enough time to print all data, otherwise there
        // are situations where the test case fails, even before everything
        // has been printed
        //
        child.waitFor();
        //      Thread.sleep(30000);

        try {
            if (child.exitValue() != 0) {
                throw new Exception(
                        "ERROR: R has ended irregularly\n" + outputR + "\nscript file: " + executionFile);
            }
        } catch (IllegalThreadStateException ie) {
            //
            // In UNIX JVM does not seem to be able to close threads
            // correctly. However, give it a try, since R processed the
            // script, therefore we can terminate the process.
            //
            child.destroy();
        }

        long t1 = System.nanoTime();
        System.out.println("R is finished (in " + ((double) t1 - t0) / 1000000000 + " sec)");

    } catch (Exception e) {
        e.printStackTrace();
        StringBuilder errorMessage = new StringBuilder();
        errorMessage.append("failed to run script " + executionFile);
        errorMessage.append("\nexception: " + e.toString());
        errorMessage.append("\nmessage: " + e.getMessage());
        errorMessage.append("\nstack trace:");
        for (StackTraceElement ste : e.getStackTrace()) {
            errorMessage.append("\n>" + ste);
        }
        fail(errorMessage.toString());
    }
}

From source file:com.jpmorgan.cakeshop.bean.QuorumConfigBean.java

public void createKeys(final String keyName, final String destination)
        throws IOException, InterruptedException {
    constellationConfig = destination;/*from  w  w w  .j  a va  2  s  . c o m*/
    File dir = new File(destination);
    Boolean createKeys = true;

    if (!dir.exists()) {
        dir.mkdirs();
    } else {
        String[] fileNames = dir.list();
        if (fileNames.length >= 4) {
            for (String fileName : fileNames) {
                if (fileName.endsWith(".key") || fileName.endsWith(".pub")) {
                    createKeys = false;
                    break;
                }
            }
        }
    }

    if (createKeys) {
        //create keys
        ProcessBuilder pb = new ProcessBuilder(getKeyGen(), destination.concat(keyName));
        Process process = pb.start();
        try (Scanner scanner = new Scanner(process.getInputStream())) {
            boolean flag = scanner.hasNext();
            try (BufferedWriter writer = new BufferedWriter(
                    new OutputStreamWriter(process.getOutputStream()))) {
                while (flag) {
                    String line = scanner.next();
                    if (line.isEmpty()) {
                        continue;
                    }
                    if (line.contains("[none]:")) {
                        writer.newLine();
                        writer.flush();
                        writer.newLine();
                        writer.flush();
                        flag = false;
                    }
                }
            }
        }

        int ret = process.waitFor();
        if (ret != 0) {
            LOG.error(
                    "Failed to generate keys. Please make sure that berkeley db is installed properly. Version of berkeley db is 6.2.23");
        } else {
            //create archive keys
            pb = new ProcessBuilder(getKeyGen(), destination.concat(keyName.concat("a")));
            process = pb.start();
            try (Scanner scanner = new Scanner(process.getInputStream())) {
                boolean flag = scanner.hasNext();
                try (BufferedWriter writer = new BufferedWriter(
                        new OutputStreamWriter(process.getOutputStream()))) {
                    while (flag) {
                        String line = scanner.next();
                        if (line.isEmpty()) {
                            continue;
                        }
                        if (line.contains("[none]:")) {
                            writer.write(" ");
                            writer.flush();
                            writer.newLine();
                            writer.flush();
                            flag = false;
                        }
                    }
                }
            }

            ret = process.waitFor();
            if (ret != 0) {
                LOG.error(
                        "Failed to generate keys. Please make sure that berkeley db is installed properly. Version of berkeley db is 6.2.23");
            }
        }

        if (process.isAlive()) {
            process.destroy();
        }
    }
}

From source file:UnmanagedAMLauncher.java

public void launchAM(ApplicationAttemptId attemptId) throws IOException, YarnException {
    Credentials credentials = new Credentials();
    Token<AMRMTokenIdentifier> token = rmClient.getAMRMToken(attemptId.getApplicationId());
    // Service will be empty but that's okay, we are just passing down only
    // AMRMToken down to the real AM which eventually sets the correct
    // service-address.
    credentials.addToken(token.getService(), token);
    File tokenFile = File.createTempFile("unmanagedAMRMToken", "", new File(System.getProperty("user.dir")));
    try {// w  ww  .j  ava  2s  .co  m
        FileUtil.chmod(tokenFile.getAbsolutePath(), "600");
    } catch (InterruptedException ex) {
        throw new RuntimeException(ex);
    }
    tokenFile.deleteOnExit();
    DataOutputStream os = new DataOutputStream(new FileOutputStream(tokenFile, true));
    credentials.writeTokenStorageToStream(os);
    os.close();

    Map<String, String> env = System.getenv();
    ArrayList<String> envAMList = new ArrayList<String>();
    boolean setClasspath = false;
    for (Map.Entry<String, String> entry : env.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();
        if (key.equals("CLASSPATH")) {
            setClasspath = true;
            if (classpath != null) {
                value = value + File.pathSeparator + classpath;
            }
        }
        envAMList.add(key + "=" + value);
    }

    if (!setClasspath && classpath != null) {
        envAMList.add("CLASSPATH=" + classpath);
    }
    ContainerId containerId = ContainerId.newContainerId(attemptId, 0);

    String hostname = InetAddress.getLocalHost().getHostName();
    envAMList.add(Environment.CONTAINER_ID.name() + "=" + containerId);
    envAMList.add(Environment.NM_HOST.name() + "=" + hostname);
    envAMList.add(Environment.NM_HTTP_PORT.name() + "=0");
    envAMList.add(Environment.NM_PORT.name() + "=0");
    envAMList.add(Environment.LOCAL_DIRS.name() + "= /tmp");
    envAMList.add(ApplicationConstants.APP_SUBMIT_TIME_ENV + "=" + System.currentTimeMillis());

    envAMList.add(ApplicationConstants.CONTAINER_TOKEN_FILE_ENV_NAME + "=" + tokenFile.getAbsolutePath());

    String[] envAM = new String[envAMList.size()];
    Process amProc = Runtime.getRuntime().exec(amCmd, envAMList.toArray(envAM));

    final BufferedReader errReader = new BufferedReader(new InputStreamReader(amProc.getErrorStream()));
    final BufferedReader inReader = new BufferedReader(new InputStreamReader(amProc.getInputStream()));

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    System.err.println(line);
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    Thread outThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = inReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    System.out.println(line);
                    line = inReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the out stream", ioe);
            }
        }
    };
    try {
        errThread.start();
        outThread.start();
    } catch (IllegalStateException ise) {
    }

    // wait for the process to finish and check the exit code
    try {
        int exitCode = amProc.waitFor();
        LOG.info("AM process exited with value: " + exitCode);
    } catch (InterruptedException e) {
        e.printStackTrace();
    } finally {
        amCompleted = true;
    }

    try {
        // make sure that the error thread exits
        // on Windows these threads sometimes get stuck and hang the execution
        // timeout and join later after destroying the process.
        errThread.join();
        outThread.join();
        errReader.close();
        inReader.close();
    } catch (InterruptedException ie) {
        LOG.info("ShellExecutor: Interrupted while reading the error/out stream", ie);
    } catch (IOException ioe) {
        LOG.warn("Error while closing the error/out stream", ioe);
    }
    amProc.destroy();
}

From source file:origin.hadoop.yarn.unmanagedamlauncher.UnmanagedAMLauncher.java

public void launchAM(ApplicationAttemptId attemptId) throws IOException, YarnException {
    Credentials credentials = new Credentials();
    Token<AMRMTokenIdentifier> token = rmClient.getAMRMToken(attemptId.getApplicationId());
    // Service will be empty but that's okay, we are just passing down only
    // AMRMToken down to the real AM which eventually sets the correct
    // service-address.
    credentials.addToken(token.getService(), token);
    File tokenFile = File.createTempFile("unmanagedAMRMToken", "", new File(System.getProperty("user.dir")));
    try {//from w w  w. j  a  va  2  s.  co m
        FileUtil.chmod(tokenFile.getAbsolutePath(), "600");
    } catch (InterruptedException ex) {
        throw new RuntimeException(ex);
    }
    tokenFile.deleteOnExit();
    DataOutputStream os = new DataOutputStream(new FileOutputStream(tokenFile, true));
    credentials.writeTokenStorageToStream(os);
    os.close();

    Map<String, String> env = System.getenv();
    ArrayList<String> envAMList = new ArrayList<String>();
    boolean setClasspath = false;
    for (Map.Entry<String, String> entry : env.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();
        if (key.equals("CLASSPATH")) {
            setClasspath = true;
            if (classpath != null) {
                value = value + File.pathSeparator + classpath;
            }
        }
        envAMList.add(key + "=" + value);
    }

    if (!setClasspath && classpath != null) {
        envAMList.add("CLASSPATH=" + classpath);
    }
    ContainerId containerId = ContainerId.newInstance(attemptId, 0);

    String hostname = InetAddress.getLocalHost().getHostName();
    envAMList.add(Environment.CONTAINER_ID.name() + "=" + containerId);
    envAMList.add(Environment.NM_HOST.name() + "=" + hostname);
    envAMList.add(Environment.NM_HTTP_PORT.name() + "=0");
    envAMList.add(Environment.NM_PORT.name() + "=0");
    envAMList.add(Environment.LOCAL_DIRS.name() + "= /tmp");
    envAMList.add(ApplicationConstants.APP_SUBMIT_TIME_ENV + "=" + System.currentTimeMillis());

    envAMList.add(ApplicationConstants.CONTAINER_TOKEN_FILE_ENV_NAME + "=" + tokenFile.getAbsolutePath());

    String[] envAM = new String[envAMList.size()];
    Process amProc = Runtime.getRuntime().exec(amCmd, envAMList.toArray(envAM));

    final BufferedReader errReader = new BufferedReader(new InputStreamReader(amProc.getErrorStream()));
    final BufferedReader inReader = new BufferedReader(new InputStreamReader(amProc.getInputStream()));

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    System.err.println(line);
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    Thread outThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = inReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    System.out.println(line);
                    line = inReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the out stream", ioe);
            }
        }
    };
    try {
        errThread.start();
        outThread.start();
    } catch (IllegalStateException ise) {
    }

    // wait for the process to finish and check the exit code
    try {
        int exitCode = amProc.waitFor();
        LOG.info("AM process exited with value: " + exitCode);
    } catch (InterruptedException e) {
        e.printStackTrace();
    } finally {
        amCompleted = true;
    }

    try {
        // make sure that the error thread exits
        // on Windows these threads sometimes get stuck and hang the execution
        // timeout and join later after destroying the process.
        errThread.join();
        outThread.join();
        errReader.close();
        inReader.close();
    } catch (InterruptedException ie) {
        LOG.info("ShellExecutor: Interrupted while reading the error/out stream", ie);
    } catch (IOException ioe) {
        LOG.warn("Error while closing the error/out stream", ioe);
    }
    amProc.destroy();
}

From source file:org.apache.hive.spark.client.SparkClientImpl.java

private Thread startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    Runnable runnable;//  w  w  w  .j av a2  s . c  om
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    if (conf.containsKey(SparkClientFactory.CONF_KEY_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        // when invoked in-process it inherits the environment variables of the parent
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        runnable = new Runnable() {
            @Override
            public void run() {
                List<String> args = Lists.newArrayList();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);

                for (Map.Entry<String, String> e : conf.entrySet()) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        String sparkHome = Strings.emptyToNull(conf.get(SPARK_HOME_KEY));
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getenv(SPARK_HOME_ENV));
        }
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getProperty(SPARK_HOME_KEY));
        }
        String sparkLogDir = conf.get("hive.spark.log.dir");
        if (sparkLogDir == null) {
            if (sparkHome == null) {
                sparkLogDir = "./target/";
            } else {
                sparkLogDir = sparkHome + "/logs/";
            }
        }

        String osxTestOpts = "";
        if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
            osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
        }

        String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(DRIVER_OPTS_KEY));
        String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

        // Create a file with all the job properties to be read by spark-submit. Change the
        // file's permissions so that only the owner can read it. This avoid having the
        // connection secret show up in the child process's command line.
        File properties = File.createTempFile("spark-submit.", ".properties");
        if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
            throw new IOException("Cannot change permissions of job properties file.");
        }
        properties.deleteOnExit();

        Properties allProps = new Properties();
        // first load the defaults from spark-defaults.conf if available
        try {
            URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                    .getResource("spark-defaults.conf");
            if (sparkDefaultsUrl != null) {
                LOG.info("Loading spark defaults: " + sparkDefaultsUrl);
                allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
            }
        } catch (Exception e) {
            String msg = "Exception trying to load spark-defaults.conf: " + e;
            throw new IOException(msg, e);
        }
        // then load the SparkClientImpl config
        for (Map.Entry<String, String> e : conf.entrySet()) {
            allProps.put(e.getKey(), conf.get(e.getKey()));
        }
        allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
        allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
        allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
        allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

        String isTesting = conf.get("spark.testing");
        if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
            String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
            if (!hiveHadoopTestClasspath.isEmpty()) {
                String extraDriverClasspath = Strings
                        .nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
                if (extraDriverClasspath.isEmpty()) {
                    allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator)
                            ? extraDriverClasspath
                            : extraDriverClasspath + File.pathSeparator;
                    allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
                }

                String extraExecutorClasspath = Strings
                        .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
                if (extraExecutorClasspath.isEmpty()) {
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                            ? extraExecutorClasspath
                            : extraExecutorClasspath + File.pathSeparator;
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
                }
            }
        }

        Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
        try {
            allProps.store(writer, "Spark Context configuration");
        } finally {
            writer.close();
        }

        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        String deployMode = conf.get("spark.submit.deployMode");

        List<String> argv = Lists.newLinkedList();

        if (sparkHome != null) {
            argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
        } else {
            LOG.info("No spark.home provided, calling SparkSubmit directly.");
            argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath());

            if (master.startsWith("local") || master.startsWith("mesos")
                    || SparkClientUtilities.isYarnClientMode(master, deployMode)
                    || master.startsWith("spark")) {
                String mem = conf.get("spark.driver.memory");
                if (mem != null) {
                    argv.add("-Xms" + mem);
                    argv.add("-Xmx" + mem);
                }

                String cp = conf.get("spark.driver.extraClassPath");
                if (cp != null) {
                    argv.add("-classpath");
                    argv.add(cp);
                }

                String libPath = conf.get("spark.driver.extraLibPath");
                if (libPath != null) {
                    argv.add("-Djava.library.path=" + libPath);
                }

                String extra = conf.get(DRIVER_OPTS_KEY);
                if (extra != null) {
                    for (String opt : extra.split("[ ]")) {
                        if (!opt.trim().isEmpty()) {
                            argv.add(opt.trim());
                        }
                    }
                }
            }

            argv.add("org.apache.spark.deploy.SparkSubmit");
        }

        if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
            String executorCores = conf.get("spark.executor.cores");
            if (executorCores != null) {
                argv.add("--executor-cores");
                argv.add(executorCores);
            }

            String executorMemory = conf.get("spark.executor.memory");
            if (executorMemory != null) {
                argv.add("--executor-memory");
                argv.add(executorMemory);
            }

            String numOfExecutors = conf.get("spark.executor.instances");
            if (numOfExecutors != null) {
                argv.add("--num-executors");
                argv.add(numOfExecutors);
            }
        }
        // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
        // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
        // delegation token renewal, but not both. Since doAs is a more common case, if both
        // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
        // otherwise, we pass the principal/keypad to spark to support the token renewal for
        // long-running application.
        if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
            String principal = SecurityUtil
                    .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
            String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
            if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
                if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
                    List<String> kinitArgv = Lists.newLinkedList();
                    kinitArgv.add("kinit");
                    kinitArgv.add(principal);
                    kinitArgv.add("-k");
                    kinitArgv.add("-t");
                    kinitArgv.add(keyTabFile + ";");
                    kinitArgv.addAll(argv);
                    argv = kinitArgv;
                } else {
                    // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to
                    // support the possible delegation token renewal in Spark
                    argv.add("--principal");
                    argv.add(principal);
                    argv.add("--keytab");
                    argv.add(keyTabFile);
                }
            }
        }
        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            try {
                String currentUser = Utils.getUGI().getShortUserName();
                // do not do impersonation in CLI mode
                if (!currentUser.equals(System.getProperty("user.name"))) {
                    LOG.info("Attempting impersonation of " + currentUser);
                    argv.add("--proxy-user");
                    argv.add(currentUser);
                }
            } catch (Exception e) {
                String msg = "Cannot obtain username: " + e;
                throw new IllegalStateException(msg, e);
            }
        }

        argv.add("--properties-file");
        argv.add(properties.getAbsolutePath());
        argv.add("--class");
        argv.add(RemoteDriver.class.getName());

        String jar = "spark-internal";
        if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
            jar = SparkContext.jarOfClass(this.getClass()).get();
        }
        argv.add(jar);

        argv.add("--remote-host");
        argv.add(serverAddress);
        argv.add("--remote-port");
        argv.add(serverPort);

        //hive.spark.* keys are passed down to the RemoteDriver via --conf,
        //as --properties-file contains the spark.* keys that are meant for SparkConf object.
        for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
            String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
            argv.add("--conf");
            argv.add(String.format("%s=%s", hiveSparkConfKey, value));
        }

        String cmd = Joiner.on(" ").join(argv);
        LOG.info("Running client driver with argv: {}", cmd);
        ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd);

        // Prevent hive configurations from being visible in Spark.
        pb.environment().remove("HIVE_HOME");
        pb.environment().remove("HIVE_CONF_DIR");
        // Add credential provider password to the child process's environment
        // In case of Spark the credential provider location is provided in the jobConf when the job is submitted
        String password = getSparkJobCredentialProviderPassword();
        if (password != null) {
            pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password);
        }
        if (isTesting != null) {
            pb.environment().put("SPARK_TESTING", isTesting);
        }

        final Process child = pb.start();
        String threadName = Thread.currentThread().getName();
        final List<String> childErrorLog = Collections.synchronizedList(new ArrayList<String>());
        redirect("RemoteDriver-stdout-redir-" + threadName, new Redirector(child.getInputStream()));
        redirect("RemoteDriver-stderr-redir-" + threadName,
                new Redirector(child.getErrorStream(), childErrorLog));

        runnable = new Runnable() {
            @Override
            public void run() {
                try {
                    int exitCode = child.waitFor();
                    if (exitCode != 0) {
                        StringBuilder errStr = new StringBuilder();
                        synchronized (childErrorLog) {
                            Iterator iter = childErrorLog.iterator();
                            while (iter.hasNext()) {
                                errStr.append(iter.next());
                                errStr.append('\n');
                            }
                        }

                        LOG.warn("Child process exited with code {}", exitCode);
                        rpcServer.cancelClient(clientId,
                                "Child process (spark-submit) exited before connecting back with error log "
                                        + errStr.toString());
                    }
                } catch (InterruptedException ie) {
                    LOG.warn(
                            "Thread waiting on the child process (spark-submit) is interrupted, killing the child process.");
                    rpcServer.cancelClient(clientId,
                            "Thread waiting on the child porcess (spark-submit) is interrupted");
                    Thread.interrupted();
                    child.destroy();
                } catch (Exception e) {
                    String errMsg = "Exception while waiting for child process (spark-submit)";
                    LOG.warn(errMsg, e);
                    rpcServer.cancelClient(clientId, errMsg);
                }
            }
        };
    }

    Thread thread = new Thread(runnable);
    thread.setDaemon(true);
    thread.setName("Driver");
    thread.start();
    return thread;
}

From source file:fr.inria.ucn.collectors.NetworkStateCollector.java

private JSONArray getIpAddr(Map<String, JSONObject> stats) throws JSONException {
    JSONArray ifaces = new JSONArray();

    // make sure the stats is read
    networkStats();//from  w  w  w  . j a va 2s. c  om

    Process process = null;
    BufferedReader in = null;
    try {
        process = Runtime.getRuntime().exec("ip addr show");
        in = new BufferedReader(new InputStreamReader(process.getInputStream()));
        JSONObject iface = null;

        String line = null;
        while ((line = in.readLine()) != null) {
            line = line.trim();
            String[] tmp = line.split(" ");
            if (line.contains("mtu")) {
                if (iface != null)
                    ifaces.put(iface);
                iface = new JSONObject();

                String name = tmp[1].replace(":", "");
                iface.put("name", name);
                iface.put("stats", stats.get(name));

                String[] flags = tmp[2].replaceAll("[<>]", "").split(",");
                JSONArray fo = new JSONArray();
                for (String f : flags)
                    fo.put(f);
                iface.put("flags", fo);

                iface.put("mtu", Integer.parseInt(tmp[4]));
                iface.put("qdisc", tmp[6]);
                iface.put("state", tmp[8]);

            } else if (line.contains("ether")) {
                iface.put("mac", tmp[1]);
            } else if (line.startsWith("inet6")) {
                JSONObject ipv6 = new JSONObject();
                ipv6.put("ip", tmp[1].substring(0, tmp[1].indexOf('/')));
                ipv6.put("mask", tmp[1].substring(tmp[1].indexOf('/') + 1));
                ipv6.put("scope", tmp[3]);
                iface.put("ipv6", ipv6);
            } else if (line.startsWith("inet")) {
                JSONObject ipv4 = new JSONObject();
                ipv4.put("ip", tmp[1].substring(0, tmp[1].indexOf('/')));
                ipv4.put("mask", tmp[1].substring(tmp[1].indexOf('/') + 1));
                int i = 2;
                while (i < tmp.length - 1) {
                    ipv4.put(tmp[i], tmp[i + 1]);
                    i = i + 2;
                }
                iface.put("ipv4", ipv4);
            }
        }

        // last object
        if (iface != null)
            ifaces.put(iface);

    } catch (IOException e) {
        Log.d(Constants.LOGTAG, "failed to execute \"ip addr show\"", e);
    } finally {
        if (process != null)
            process.destroy();
    }

    return ifaces;
}

From source file:org.wildfly.swarm.proc.Monitor.java

/**
 * Main test execution. Spawns an external process
 * @param iteration//from w ww .j  a  va 2 s  .co  m
 * @param file
 * @param httpCheck
 * @param collector
 */
private void runTest(int iteration, File file, String httpCheck, final Collector collector) {

    System.out.println("Testing " + file.getAbsolutePath() + ", iteration " + iteration);
    String id = file.getAbsolutePath();

    String uid = UUID.randomUUID().toString();
    Process process = null;
    int attempts = 0;

    try {
        Path workDir = Files.createDirectories(
                this.workDir.toPath().resolve(Paths.get(file.getName(), "iteration-" + iteration)));
        Path tmp = Files.createDirectory(workDir.resolve("tmp"));

        ProcessBuilder pb = new ProcessBuilder("java", "-Duid=" + uid,
                "-Djava.io.tmpdir=" + tmp.toAbsolutePath().toString(), "-jar", file.getAbsolutePath())
                        .redirectOutput(workDir.resolve("stdout.txt").toFile())
                        .redirectError(workDir.resolve("stderr.txt").toFile());

        final long s0 = System.currentTimeMillis();
        process = pb.start();

        final CloseableHttpClient httpClient = HttpClients.createDefault();

        while (true) {
            if (attempts >= NUM_CONNECTION_ATTEMPTS) {
                System.out.println("Max attempts reached, escaping sequence");
                break;
            }

            CloseableHttpResponse response = null;
            try {
                HttpGet request = new HttpGet(httpCheck);
                response = httpClient.execute(request);
                int statusCode = response.getStatusLine().getStatusCode();

                if (statusCode == 200) {
                    collector.onMeasurement(id, Measure.STARTUP_TIME,
                            (double) (System.currentTimeMillis() - s0));
                    warmup(httpClient, httpCheck);
                    measureMemory(id, uid, collector);
                    measureJarSize(id, file, collector);
                    measureTmpDirSize(id, tmp, collector);
                    break;
                } else if (statusCode == 404) {
                    // this can happen during server boot, when the HTTP endpoint is already exposed
                    // but the application is not yet deployed
                } else {
                    System.err.println("Failed to execute HTTP check: " + statusCode);
                    break;
                }
            } catch (HttpHostConnectException e) {
                // server not running yet
            } finally {
                if (response != null) {
                    response.close();
                }
            }

            attempts++;
            Thread.sleep(MS_BETWEEN_ATTEMPTS);
        }

        httpClient.close();

        final long s1 = System.currentTimeMillis();
        process.destroy();
        boolean finished = process.waitFor(2, TimeUnit.SECONDS);
        if (finished) {
            collector.onMeasurement(id, Measure.SHUTDOWN_TIME, (double) (System.currentTimeMillis() - s1));
        }
    } catch (Throwable t) {
        t.printStackTrace();
    } finally {
        if (process != null && process.isAlive()) {
            process.destroyForcibly();
            try {
                process.waitFor(2, TimeUnit.SECONDS);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

}

From source file:org.apache.hadoop.yarn.applications.unmanagedamlauncher.UnmanagedAMLauncher.java

public void launchAM(ApplicationAttemptId attemptId) throws IOException, YarnException {
    Credentials credentials = new Credentials();
    Token<AMRMTokenIdentifier> token = rmClient.getAMRMToken(attemptId.getApplicationId());
    // Service will be empty but that's okay, we are just passing down only
    // AMRMToken down to the real AM which eventually sets the correct
    // service-address.
    credentials.addToken(token.getService(), token);
    File tokenFile = File.createTempFile("unmanagedAMRMToken", "", new File(System.getProperty("user.dir")));
    try {/*from  w ww .j a  v  a 2  s.  c  om*/
        FileUtil.chmod(tokenFile.getAbsolutePath(), "600");
    } catch (InterruptedException ex) {
        throw new RuntimeException(ex);
    }
    tokenFile.deleteOnExit();
    DataOutputStream os = new DataOutputStream(new FileOutputStream(tokenFile, true));
    credentials.writeTokenStorageToStream(os);
    os.close();

    Map<String, String> env = System.getenv();
    ArrayList<String> envAMList = new ArrayList<String>();
    boolean setClasspath = false;
    for (Map.Entry<String, String> entry : env.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();
        if (key.equals("CLASSPATH")) {
            setClasspath = true;
            if (classpath != null) {
                value = value + File.pathSeparator + classpath;
            }
        }
        envAMList.add(key + "=" + value);
    }

    if (!setClasspath && classpath != null) {
        envAMList.add("CLASSPATH=" + classpath);
    }
    ContainerId containerId = ContainerId.newContainerId(attemptId, 0);

    String hostname = InetAddress.getLocalHost().getHostName();
    envAMList.add(Environment.CONTAINER_ID.name() + "=" + containerId);
    envAMList.add(Environment.NM_HOST.name() + "=" + hostname);
    envAMList.add(Environment.NM_HTTP_PORT.name() + "=0");
    envAMList.add(Environment.NM_PORT.name() + "=0");
    envAMList.add(Environment.LOCAL_DIRS.name() + "= /tmp");
    envAMList.add(ApplicationConstants.APP_SUBMIT_TIME_ENV + "=" + System.currentTimeMillis());

    envAMList.add(ApplicationConstants.CONTAINER_TOKEN_FILE_ENV_NAME + "=" + tokenFile.getAbsolutePath());

    String[] envAM = new String[envAMList.size()];
    Process amProc = Runtime.getRuntime().exec(amCmd, envAMList.toArray(envAM));

    final BufferedReader errReader = new BufferedReader(
            new InputStreamReader(amProc.getErrorStream(), Charset.forName("UTF-8")));
    final BufferedReader inReader = new BufferedReader(
            new InputStreamReader(amProc.getInputStream(), Charset.forName("UTF-8")));

    // read error and input streams as this would free up the buffers
    // free the error stream buffer
    Thread errThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = errReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    System.err.println(line);
                    line = errReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the error stream", ioe);
            }
        }
    };
    Thread outThread = new Thread() {
        @Override
        public void run() {
            try {
                String line = inReader.readLine();
                while ((line != null) && !isInterrupted()) {
                    System.out.println(line);
                    line = inReader.readLine();
                }
            } catch (IOException ioe) {
                LOG.warn("Error reading the out stream", ioe);
            }
        }
    };
    try {
        errThread.start();
        outThread.start();
    } catch (IllegalStateException ise) {
    }

    // wait for the process to finish and check the exit code
    try {
        int exitCode = amProc.waitFor();
        LOG.info("AM process exited with value: " + exitCode);
    } catch (InterruptedException e) {
        e.printStackTrace();
    } finally {
        amCompleted = true;
    }

    try {
        // make sure that the error thread exits
        // on Windows these threads sometimes get stuck and hang the execution
        // timeout and join later after destroying the process.
        errThread.join();
        outThread.join();
        errReader.close();
        inReader.close();
    } catch (InterruptedException ie) {
        LOG.info("ShellExecutor: Interrupted while reading the error/out stream", ie);
    } catch (IOException ioe) {
        LOG.warn("Error while closing the error/out stream", ioe);
    }
    amProc.destroy();
}

From source file:com.netxforge.oss2.core.utilsII.ExecRunner.java

/**
 * The <code>exec(String, PrintWriter, PrintWriter)</code> method runs a
 * process inside of a watched thread. It returns the client's exit code and
 * feeds its STDOUT and STDERR to the passed-in streams.
 *
 * @return The command's return code/* w  ww .j  a v a 2s.  com*/
 * @param command
 *            The program or command to run
 * @param stdoutWriter
 *            java.io.PrintWriter
 * @param stderrWriter
 *            java.io.PrintWriter
 * @throws java.io.IOException
 *             thrown if a problem occurs
 * @throws java.lang.InterruptedException
 *             thrown if a problem occurs
 */
public int exec(final String command, final PrintWriter stdoutWriter, final PrintWriter stderrWriter)
        throws IOException, InterruptedException {

    // Default exit value is non-zero to indicate a problem.
    int exitVal = 1;

    // //////////////////////////////////////////////////////////////
    final Runtime rt = Runtime.getRuntime();
    Process proc;
    String[] cmd = null;

    // First get the start time & calculate comparison numbers
    final Date startTime = new Date();
    final long startTimeMs = startTime.getTime();
    final long maxTimeMs = startTimeMs + (maxRunTimeSecs * 1000);

    // //////////////////////////////////////////////////////////////
    // First determine the OS to build the right command string
    final String osName = System.getProperty("os.name");
    if (osName.equals("Windows 95") || osName.equals("Windows 98") || osName.equals("Windows ME")) {
        cmd = new String[3];
        cmd[0] = WINDOWS_9X_ME_COMMAND_1;
        cmd[1] = WINDOWS_9X_ME_COMMAND_2;
        cmd[2] = command;
    } else if (osName.contains("Windows")) { // "Windows NT", "Windows 2000", etc.
        cmd = new String[3];
        cmd[0] = WINDOWS_NT_2000_COMMAND_1;
        cmd[1] = WINDOWS_NT_2000_COMMAND_2;
        cmd[2] = command;
    } else {
        // Linux (and probably other *nixes) prefers to be called
        // with each argument supplied separately, so we first
        // Tokenize it across spaces as the boundary.
        final StringTokenizer st = new StringTokenizer(command, " ");
        cmd = new String[st.countTokens()];
        int token = 0;
        while (st.hasMoreTokens()) {
            String tokenString = st.nextToken();
            cmd[token++] = tokenString;
        }
    }

    // Execute the command and start the two output gobblers
    if (cmd != null && cmd.length > 0) {
        proc = rt.exec(cmd);
    } else {
        throw new IOException("Insufficient commands!");
    }

    final StreamGobbler outputGobbler = new StreamGobbler(proc.getInputStream(), stdoutWriter);
    final StreamGobbler errorGobbler = new StreamGobbler(proc.getErrorStream(), stderrWriter);
    outputGobbler.start();
    errorGobbler.start();

    // Wait for the program to finish running and return the
    // exit value obtained from the executable
    while (true) {

        try {
            exitVal = proc.exitValue();
            break;
        } catch (final IllegalThreadStateException e) {

            // If we get this exception, then the process isn't
            // done executing and we determine if our time is up.
            if (maxRunTimeSecs > 0) {

                final Date endTime = new Date();
                final long endTimeMs = endTime.getTime();
                if (endTimeMs > maxTimeMs) {
                    // Time's up - kill the process and the gobblers and
                    // return
                    proc.destroy();
                    maxRunTimeExceeded = true;
                    stderrWriter.println(MAX_RUN_TIME_EXCEEDED_STRING);
                    outputGobbler.quit();
                    errorGobbler.quit();
                    return exitVal;

                } else {
                    // Time is not up yet so wait 100 ms before testing
                    // again
                    Thread.sleep(POLL_DELAY_MS);
                }

            }

        }

    }

    // //////////////////////////////////////////////////////////////
    // Wait for output gobblers to finish forwarding the output
    while (outputGobbler.isAlive() || errorGobbler.isAlive()) {
    }

    // //////////////////////////////////////////////////////////////
    // All done, flush the streams and return the exit value
    stdoutWriter.flush();
    stderrWriter.flush();
    return exitVal;

}

From source file:com.att.android.arodatacollector.main.AROCollectorService.java

/**
 * issue the kill -9 command if ffmpeg couldn't be stopped with kill -15
 *//*from   w  w w  .  java 2s. c o  m*/
private void kill9Ffmpeg() {

    Process sh = null;
    DataOutputStream os = null;

    int pid = 0, exitValue = -1;
    try {
        //have a 1 sec delay since it takes some time for the kill -15 to end ffmpeg
        Thread.sleep(1000);
        pid = mAroUtils.getProcessID("ffmpeg");

        if (pid != 0) {
            //ffmpeg still running
            if (DEBUG) {
                Log.i(TAG, "ffmpeg still running after kill -15. Will issue kill -9 " + pid);
            }

            sh = Runtime.getRuntime().exec("su");
            os = new DataOutputStream(sh.getOutputStream());
            String Command = "kill -9 " + pid + "\n";
            os.writeBytes(Command);

            Command = "exit\n";
            os.writeBytes(Command);
            os.flush();

            //clear the streams so that it doesnt block the process
            //sh.inputStream is actually the output from the process
            StreamClearer stdoutClearer = new StreamClearer(sh.getInputStream(), "stdout", false);
            new Thread(stdoutClearer).start();
            StreamClearer stderrClearer = new StreamClearer(sh.getErrorStream(), "stderr", true);
            new Thread(stderrClearer).start();

            exitValue = sh.waitFor();
            if (exitValue == 0) {
                mVideoRecording = false;
            } else {
                Log.e(TAG, "could not kill ffmpeg in kill9Ffmpeg, exitValue=" + exitValue);
            }

        } else {
            mVideoRecording = false;
            if (DEBUG) {
                Log.i(TAG, "ffmpeg had been ended successfully by kill -15");
            }
        }
    } catch (Exception e1) {
        Log.e(TAG, "exception in kill9Ffmpeg", e1);
    } finally {
        try {
            if (os != null) {
                os.close();
            }

            if (sh != null) {
                sh.destroy();
            }
        } catch (Exception e) {
            Log.e(TAG, "exception in kill9Ffmpeg DataOutputStream close", e);
        }
    }
}