Example usage for java.lang Process exitValue

List of usage examples for java.lang Process exitValue

Introduction

In this page you can find the example usage for java.lang Process exitValue.

Prototype

public abstract int exitValue();

Source Link

Document

Returns the exit value for the process.

Usage

From source file:org.cloudifysource.shell.installer.LocalhostGridAgentBootstrapper.java

private void runCommand(final String[] command, final String[] args, final String securityProfile,
        final String securityFilePath, final String keystoreFilePath, final String keystorePassword)
        throws CLIException, InterruptedException {

    final File directory = new File(Environment.getHomeDirectory(), "/bin").getAbsoluteFile();

    // gs-agent.sh/bat need full path
    command[command.length - 1] = new File(directory, command[command.length - 1]).getAbsolutePath();

    final List<String> commandLine = new ArrayList<String>();
    commandLine.addAll(Arrays.asList(command));
    commandLine.addAll(Arrays.asList(args));

    final String commandString = StringUtils.collectionToDelimitedString(commandLine, " ");
    final File filename = createScript(commandString);
    final ProcessBuilder pb = new ProcessBuilder().command(filename.getAbsolutePath()).directory(directory);

    String localCloudOptions = "-Xmx" + CloudifyConstants.DEFAULT_LOCALCLOUD_GSA_GSM_ESM_LUS_MEMORY_IN_MB + "m"
            + " -D" + CloudifyConstants.LUS_PORT_CONTEXT_PROPERTY + "=" + lusPort + " -D"
            + GSM_EXCLUDE_GSC_ON_FAILED_INSTANCE + "=" + GSM_EXCLUDE_GSC_ON_FAILED_INSTACE_BOOL + " "
            + GSM_PENDING_REQUESTS_DELAY + " -D" + ZONES_PROPERTY + "=" + LOCALCLOUD_GSA_ZONES + " -D"
            + CloudifyConstants.SYSTEM_PROPERTY_ESM_DISCOVERY_POLLING_INTERVAL_SECONDS + "=1";

    final Map<String, String> environment = pb.environment();
    if (lookupGroups != null) {
        environment.put("LOOKUPGROUPS", lookupGroups);
    }/*from w ww .jav  a2 s .co m*/

    if (lookupLocators != null) {
        final String disableMulticast = "-Dcom.gs.multicast.enabled=false";
        environment.put("LOOKUPLOCATORS", lookupLocators);
        localCloudOptions += " " + disableMulticast;
    }

    if (isLocalCloud) {
        logger.fine("Setting env vars COMPONENT_JAVA_OPTIONS: " + localCloudOptions);
        environment.put("COMPONENT_JAVA_OPTIONS", localCloudOptions);
        environment.put(CloudifyConstants.GIGASPACES_CLOUD_HARDWARE_ID, "localcloud");
        environment.put(CloudifyConstants.GIGASPACES_CLOUD_IMAGE_ID, "localcloud");
        environment.put(CloudifyConstants.GIGASPACES_CLOUD_TEMPLATE_NAME, "localcloud");
        environment.put(CloudifyConstants.GIGASPACES_CLOUD_MACHINE_ID, "localcloud");
        final String springProfiles = createSpringProfilesList(securityProfile);
        environment.put(CloudifyConstants.SPRING_ACTIVE_PROFILE_ENV_VAR, springProfiles);
        if (ShellUtils.isSecureConnection(securityProfile)) {
            environment.put(CloudifyConstants.KEYSTORE_FILE_ENV_VAR, keystoreFilePath);
            environment.put(CloudifyConstants.KEYSTORE_PASSWORD_ENV_VAR, keystorePassword);
        }
        environment.put(CloudifyConstants.SPRING_SECURITY_CONFIG_FILE_ENV_VAR, securityFilePath);
        if (nicAddress != null) {
            environment.put(CloudifyConstants.GIGASPACES_AGENT_ENV_PRIVATE_IP, nicAddress);
            environment.put(CloudifyConstants.GIGASPACES_AGENT_ENV_PUBLIC_IP, nicAddress);

            environment.put(CloudifyConstants.CLOUDIFY_AGENT_ENV_PRIVATE_IP, nicAddress);
            environment.put(CloudifyConstants.CLOUDIFY_AGENT_ENV_PUBLIC_IP, nicAddress);
            environment.put("NIC_ADDR", nicAddress);

        }
    }

    // start process
    // there is no need to redirect output, since the process suppresses
    // output
    try {
        logger.fine("Executing command: " + commandString);
        final Process proc = pb.start();
        Thread.sleep(MIN_PROC_ERROR_TIME);
        try {
            // The assumption is that if the script contains errors,
            // the processBuilder will finish by the end of the above sleep
            // period.
            if (proc.exitValue() != 0) {
                String errorMessage = "Error while starting agent. "
                        + "Please make sure that another agent is not already running. ";
                if (verbose) {
                    errorMessage = errorMessage.concat("Command executed: " + commandString);
                }
                throw new CLIException(errorMessage);
            }
            // ProcessBuilder is still running. We assume the agent script
            // is running fine.
        } catch (final IllegalThreadStateException e) {
            logger.fine("agent is starting...");
        }
    } catch (final IOException e) {
        throw new CLIException("Error while starting agent", e);
    }
}

From source file:net.sf.jasperreports.customvisualization.export.CVElementPhantomJSImageDataProvider.java

/**
 * Executes a command within the given timeout.
 * //from www .ja  v  a 2 s .  co  m
 * @param args
 * @param currentDirectory
 * @param timeout
 */
private static void runCommand(String[] args, File currentDirectory, final int timeout) {
    Thread loggingThread = null;
    Thread interruptingThread = null;

    try {
        String cmd = "";
        for (String arg : args) {
            cmd += " " + arg;
        }

        if (log.isDebugEnabled()) {
            log.debug("Executing external command: " + cmd);
        }
        //System.out.println(cmd);

        ProcessBuilder pb = new ProcessBuilder(Arrays.asList(args));
        pb.directory(currentDirectory);

        final Process externalProcess = pb.start();
        final StringBuilder processOutput = new StringBuilder();

        final boolean[] success = new boolean[1];
        success[0] = false;

        loggingThread = new Thread(new Runnable() {
            @Override
            public void run() {
                BufferedReader br = null;
                try {
                    br = new BufferedReader(new InputStreamReader(externalProcess.getInputStream()));
                    String line;
                    while ((line = br.readLine()) != null) {
                        processOutput.append(line).append("\n");

                        if (line.indexOf("SCRIPT_SUCCESS") >= 0) {
                            success[0] = true;
                            killProcess(externalProcess, 100);
                        } else if (line.indexOf("SCRIPT_ERROR") >= 0) {
                            success[0] = false;
                            killProcess(externalProcess, 100);
                        }
                    }

                    if (log.isDebugEnabled()) {
                        log.debug("External process output:\n" + processOutput.toString());
                    }
                } catch (IOException e) {
                    if (log.isDebugEnabled()) {
                        log.debug(e.getMessage());
                    }
                } finally {
                    if (br != null) {
                        try {
                            br.close();
                        } catch (IOException e) {
                            if (log.isWarnEnabled()) {
                                log.warn("Failed to close phantomjs process' inputstream", e);
                            }
                        }
                    }
                }
            }
        });

        interruptingThread = new Thread(new Runnable() {
            @Override
            public void run() {
                if (killProcess(externalProcess, timeout)) {
                    success[0] = false;
                }
            }

        });
        loggingThread.start();
        interruptingThread.start();
        externalProcess.waitFor();

        // We should not care if the phantomjs process does not end on time if it succeeds in producing the desired output.
        if (externalProcess.exitValue() != 0 && !success[0]) {
            // FIXME we should do loggingThread.join(millis) because the
            // process might end before its output if fully processed

            throw new JRRuntimeException("External process did not end properly; exit value: "
                    + externalProcess.exitValue()
                    + (processOutput.length() > 0 ? "; process output:\n" + processOutput + "\n" : "."));
        }

    } catch (IOException e) {
        throw new JRRuntimeException(e);
    } catch (InterruptedException e) {
        throw new JRRuntimeException(e);
    } finally {

        if (interruptingThread != null && interruptingThread.isAlive()) {
            try {
                interruptingThread.interrupt();
            } catch (Exception ex) {
            }
        }
        if (loggingThread != null && loggingThread.isAlive()) {
            try {
                loggingThread.interrupt();
            } catch (Exception ex) {
            }
        }
    }
}

From source file:com.thinkbiganalytics.nifi.v2.spark.ExecuteSparkJob.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();//from  w w  w  .  ja va2s. c  om
    if (flowFile == null) {
        return;
    }
    String PROVENANCE_JOB_STATUS_KEY = "Job Status";
    String PROVENANCE_SPARK_EXIT_CODE_KEY = "Spark Exit Code";

    try {

        PROVENANCE_JOB_STATUS_KEY = context.getName() + " Job Status";
        PROVENANCE_SPARK_EXIT_CODE_KEY = context.getName() + " Spark Exit Code";

        /* Configuration parameters for spark launcher */
        String appJar = context.getProperty(APPLICATION_JAR).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String extraJars = context.getProperty(EXTRA_JARS).evaluateAttributeExpressions(flowFile).getValue();
        String yarnQueue = context.getProperty(YARN_QUEUE).evaluateAttributeExpressions(flowFile).getValue();
        String mainClass = context.getProperty(MAIN_CLASS).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String sparkMaster = context.getProperty(SPARK_MASTER).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String appArgs = context.getProperty(MAIN_ARGS).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String driverMemory = context.getProperty(DRIVER_MEMORY).evaluateAttributeExpressions(flowFile)
                .getValue();
        String executorMemory = context.getProperty(EXECUTOR_MEMORY).evaluateAttributeExpressions(flowFile)
                .getValue();
        String numberOfExecutors = context.getProperty(NUMBER_EXECUTORS).evaluateAttributeExpressions(flowFile)
                .getValue();
        String sparkApplicationName = context.getProperty(SPARK_APPLICATION_NAME)
                .evaluateAttributeExpressions(flowFile).getValue();
        String executorCores = context.getProperty(EXECUTOR_CORES).evaluateAttributeExpressions(flowFile)
                .getValue();
        String networkTimeout = context.getProperty(NETWORK_TIMEOUT).evaluateAttributeExpressions(flowFile)
                .getValue();
        String principal = context.getProperty(kerberosPrincipal).getValue();
        String keyTab = context.getProperty(kerberosKeyTab).getValue();
        String hadoopConfigurationResources = context.getProperty(HADOOP_CONFIGURATION_RESOURCES).getValue();
        String sparkConfs = context.getProperty(SPARK_CONFS).evaluateAttributeExpressions(flowFile).getValue();
        String extraFiles = context.getProperty(EXTRA_SPARK_FILES).evaluateAttributeExpressions(flowFile)
                .getValue();
        Integer sparkProcessTimeout = context.getProperty(PROCESS_TIMEOUT)
                .evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.SECONDS).intValue();
        String datasourceIds = context.getProperty(DATASOURCES).evaluateAttributeExpressions(flowFile)
                .getValue();
        MetadataProviderService metadataService = context.getProperty(METADATA_SERVICE)
                .asControllerService(MetadataProviderService.class);

        String[] confs = null;
        if (!StringUtils.isEmpty(sparkConfs)) {
            confs = sparkConfs.split("\\|");
        }

        String[] args = null;
        if (!StringUtils.isEmpty(appArgs)) {
            args = appArgs.split(",");
        }

        final List<String> extraJarPaths = new ArrayList<>();
        if (!StringUtils.isEmpty(extraJars)) {
            extraJarPaths.addAll(Arrays.asList(extraJars.split(",")));
        } else {
            getLog().info("No extra jars to be added to class path");
        }

        // If all 3 fields are filled out then assume kerberos is enabled, and user should be authenticated
        boolean authenticateUser = false;
        if (!StringUtils.isEmpty(principal) && !StringUtils.isEmpty(keyTab)
                && !StringUtils.isEmpty(hadoopConfigurationResources)) {
            authenticateUser = true;
        }

        if (authenticateUser) {
            ApplySecurityPolicy applySecurityObject = new ApplySecurityPolicy();
            Configuration configuration;
            try {
                getLog().info("Getting Hadoop configuration from " + hadoopConfigurationResources);
                configuration = ApplySecurityPolicy.getConfigurationFromResources(hadoopConfigurationResources);

                if (SecurityUtil.isSecurityEnabled(configuration)) {
                    getLog().info("Security is enabled");

                    if (principal.equals("") && keyTab.equals("")) {
                        getLog().error(
                                "Kerberos Principal and Kerberos KeyTab information missing in Kerboeros enabled cluster. {} ",
                                new Object[] { flowFile });
                        session.transfer(flowFile, REL_FAILURE);
                        return;
                    }

                    try {
                        getLog().info("User authentication initiated");

                        boolean authenticationStatus = applySecurityObject.validateUserWithKerberos(logger,
                                hadoopConfigurationResources, principal, keyTab);
                        if (authenticationStatus) {
                            getLog().info("User authenticated successfully.");
                        } else {
                            getLog().error("User authentication failed.  {} ", new Object[] { flowFile });
                            session.transfer(flowFile, REL_FAILURE);
                            return;
                        }

                    } catch (Exception unknownException) {
                        getLog().error("Unknown exception occurred while validating user : {}.  {} ",
                                new Object[] { unknownException.getMessage(), flowFile });
                        session.transfer(flowFile, REL_FAILURE);
                        return;
                    }

                }
            } catch (IOException e1) {
                getLog().error("Unknown exception occurred while authenticating user : {} and flow file: {}",
                        new Object[] { e1.getMessage(), flowFile });
                session.transfer(flowFile, REL_FAILURE);
                return;
            }
        }

        String sparkHome = context.getProperty(SPARK_HOME).evaluateAttributeExpressions(flowFile).getValue();

        // Build environment
        final Map<String, String> env = new HashMap<>();

        if (StringUtils.isNotBlank(datasourceIds)) {
            final StringBuilder datasources = new StringBuilder(10240);
            final ObjectMapper objectMapper = new ObjectMapper();
            final MetadataProvider provider = metadataService.getProvider();

            for (final String id : datasourceIds.split(",")) {
                datasources.append((datasources.length() == 0) ? '[' : ',');

                final Optional<Datasource> datasource = provider.getDatasource(id);
                if (datasource.isPresent()) {
                    if (datasource.get() instanceof JdbcDatasource && StringUtils
                            .isNotBlank(((JdbcDatasource) datasource.get()).getDatabaseDriverLocation())) {
                        final String[] databaseDriverLocations = ((JdbcDatasource) datasource.get())
                                .getDatabaseDriverLocation().split(",");
                        extraJarPaths.addAll(Arrays.asList(databaseDriverLocations));
                    }
                    datasources.append(objectMapper.writeValueAsString(datasource.get()));
                } else {
                    logger.error("Required datasource {} is missing for Spark job: {}",
                            new Object[] { id, flowFile });
                    flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY,
                            "Invalid data source: " + id);
                    session.transfer(flowFile, REL_FAILURE);
                    return;
                }
            }

            datasources.append(']');
            env.put("DATASOURCES", datasources.toString());
        }

        /* Launch the spark job as a child process */
        SparkLauncher launcher = new SparkLauncher(env).setAppResource(appJar).setMainClass(mainClass)
                .setMaster(sparkMaster).setConf(SparkLauncher.DRIVER_MEMORY, driverMemory)
                .setConf(SPARK_NUM_EXECUTORS, numberOfExecutors)
                .setConf(SparkLauncher.EXECUTOR_MEMORY, executorMemory)
                .setConf(SparkLauncher.EXECUTOR_CORES, executorCores)
                .setConf(SPARK_NETWORK_TIMEOUT_CONFIG_NAME, networkTimeout).setSparkHome(sparkHome)
                .setAppName(sparkApplicationName);

        if (authenticateUser) {
            launcher.setConf(SPARK_YARN_KEYTAB, keyTab);
            launcher.setConf(SPARK_YARN_PRINCIPAL, principal);
        }
        if (args != null) {
            launcher.addAppArgs(args);
        }

        if (confs != null) {
            for (String conf : confs) {
                getLog().info("Adding sparkconf '" + conf + "'");
                launcher.addSparkArg(SPARK_CONFIG_NAME, conf);
            }
        }

        if (!extraJarPaths.isEmpty()) {
            for (String path : extraJarPaths) {
                getLog().info("Adding to class path '" + path + "'");
                launcher.addJar(path);
            }
        }
        if (StringUtils.isNotEmpty(yarnQueue)) {
            launcher.setConf(SPARK_YARN_QUEUE, yarnQueue);
        }
        if (StringUtils.isNotEmpty(extraFiles)) {
            launcher.addSparkArg(SPARK_EXTRA_FILES_CONFIG_NAME, extraFiles);
        }

        Process spark = launcher.launch();

        /* Read/clear the process input stream */
        InputStreamReaderRunnable inputStreamReaderRunnable = new InputStreamReaderRunnable(LogLevel.INFO,
                logger, spark.getInputStream());
        Thread inputThread = new Thread(inputStreamReaderRunnable, "stream input");
        inputThread.start();

        /* Read/clear the process error stream */
        InputStreamReaderRunnable errorStreamReaderRunnable = new InputStreamReaderRunnable(LogLevel.INFO,
                logger, spark.getErrorStream());
        Thread errorThread = new Thread(errorStreamReaderRunnable, "stream error");
        errorThread.start();

        logger.info("Waiting for Spark job to complete");

        /* Wait for job completion */
        boolean completed = spark.waitFor(sparkProcessTimeout, TimeUnit.SECONDS);
        if (!completed) {
            spark.destroyForcibly();
            getLog().error("Spark process timed out after {} seconds using flow file: {}  ",
                    new Object[] { sparkProcessTimeout, flowFile });
            session.transfer(flowFile, REL_FAILURE);
            return;
        }

        int exitCode = spark.exitValue();

        flowFile = session.putAttribute(flowFile, PROVENANCE_SPARK_EXIT_CODE_KEY, exitCode + "");
        if (exitCode != 0) {
            logger.error("ExecuteSparkJob for {} and flowfile: {} completed with failed status {} ",
                    new Object[] { context.getName(), flowFile, exitCode });
            flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Failed");
            session.transfer(flowFile, REL_FAILURE);
        } else {
            logger.info("ExecuteSparkJob for {} and flowfile: {} completed with success status {} ",
                    new Object[] { context.getName(), flowFile, exitCode });
            flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Success");
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (final Exception e) {
        logger.error("Unable to execute Spark job {},{}", new Object[] { flowFile, e.getMessage() }, e);
        flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Failed With Exception");
        flowFile = session.putAttribute(flowFile, "Spark Exception:", e.getMessage());
        session.transfer(flowFile, REL_FAILURE);
    }
}

From source file:com.flexive.shared.FxSharedUtils.java

/**
 * Execute a command on the operating system
 *
 * @param command   name of the command//from  w w w .  java  2 s .  c  om
 * @param arguments arguments to pass to the command (one argument per String!)
 * @return result
 */

public static ProcessResult executeCommand(String command, String... arguments) {
    Runtime r = Runtime.getRuntime();
    String[] cmd = new String[arguments.length + (WINDOWS ? 3 : 1)];
    if (WINDOWS) {
        //have to run a shell on windows
        cmd[0] = "cmd";
        cmd[1] = "/c";
    }

    cmd[WINDOWS ? 2 : 0] = command;
    System.arraycopy(arguments, 0, cmd, (WINDOWS ? 3 : 1), arguments.length);
    StringBuilder cmdline = new StringBuilder(200);
    cmdline.append(command);
    for (String argument : arguments)
        cmdline.append(" ").append(argument);
    Process p = null;
    AsyncStreamBuffer out = null;
    AsyncStreamBuffer err = null;
    try {
        p = r.exec(cmd);
        //            p = r.exec(cmdline);
        out = new AsyncStreamBuffer(p.getInputStream());
        err = new AsyncStreamBuffer(p.getErrorStream());
        out.start();
        err.start();
        p.waitFor();
        while (out.isAlive())
            Thread.sleep(10);
        while (err.isAlive())
            Thread.sleep(10);
    } catch (Exception e) {
        String error = e.getMessage();
        if (err != null && err.getResult() != null && err.getResult().trim().length() > 0)
            error = error + "(" + err.getResult() + ")";
        return new ProcessResult(cmdline.toString(), (p == null ? -1 : p.exitValue()),
                (out == null ? "" : out.getResult()), error);
    } finally {
        if (p != null) {
            try {
                p.getInputStream().close();
            } catch (Exception e1) {
                //bad luck
            }
            try {
                p.getErrorStream().close();
            } catch (Exception e1) {
                //bad luck
            }
            try {
                p.getOutputStream().close();
            } catch (Exception e1) {
                //bad luck
            }
        }
    }
    return new ProcessResult(cmdline.toString(), p.exitValue(), out.getResult(), err.getResult());
}

From source file:org.pentaho.di.core.Const.java

private static final boolean procDone(Process p) {
    try {//from   w ww.  j a va  2s . c o m
        p.exitValue();
        return true;
    } catch (IllegalThreadStateException e) {
        return false;
    }
}

From source file:net.pms.PMS.java

/**
 * Executes a new Process and creates a fork that waits for its results.
 * TODO Extend explanation on where this is being used.
 * @param name Symbolic name for the process to be launched, only used in the trace log
 * @param error (boolean) Set to true if you want PMS to add error messages to the trace pane
 * @param workDir (File) optional working directory to run the process in
 * @param params (array of Strings) array containing the command to call and its arguments
 * @return Returns true if the command exited as expected
 * @throws Exception TODO: Check which exceptions to use
 *///from  ww  w.  j  a va2s  .c o  m
private boolean checkProcessExistence(String name, boolean error, File workDir, String... params)
        throws Exception {
    logger.debug("launching: " + params[0]);

    try {
        ProcessBuilder pb = new ProcessBuilder(params);
        if (workDir != null) {
            pb.directory(workDir);
        }
        final Process process = pb.start();

        OutputTextConsumer stderrConsumer = new OutputTextConsumer(process.getErrorStream(), false);
        stderrConsumer.start();

        OutputTextConsumer outConsumer = new OutputTextConsumer(process.getInputStream(), false);
        outConsumer.start();

        Runnable r = new Runnable() {
            public void run() {
                ProcessUtil.waitFor(process);
            }
        };

        Thread checkThread = new Thread(r, "PMS Checker");
        checkThread.start();
        checkThread.join(60000);
        checkThread.interrupt();
        checkThread = null;

        // XXX no longer used
        if (params[0].equals("vlc") && stderrConsumer.getResults().get(0).startsWith("VLC")) {
            return true;
        }

        // XXX no longer used
        if (params[0].equals("ffmpeg") && stderrConsumer.getResults().get(0).startsWith("FF")) {
            return true;
        }

        int exit = process.exitValue();
        if (exit != 0) {
            if (error) {
                logger.info("[" + exit + "] Cannot launch " + name + " / Check the presence of " + params[0]
                        + " ...");
            }
            return false;
        }
        return true;
    } catch (Exception e) {
        if (error) {
            logger.error("Cannot launch " + name + " / Check the presence of " + params[0] + " ...", e);
        }
        return false;
    }
}

From source file:org.freewheelschedule.freewheel.remoteworker.CommandLineExecution.java

@Override
public void run() {

    String message;//www  .  j  a v  a 2  s .  co  m
    PrintWriter stdoutOutput = null;
    PrintWriter stderrOutput = null;
    JobResponseMessage responseMessage = new JobResponseMessage();

    ObjectMapper mapper = new ObjectMapper();

    String hostname;
    try {
        hostname = (InetAddress.getLocalHost()).getCanonicalHostName();
    } catch (UnknownHostException e1) {
        log.error("Unable to determine hostname", e1);
        return;
    }

    log.info("Running command " + command);

    try {
        Socket remoteWorker = new Socket(hostname, remotePort);

        PrintWriter speak = new PrintWriter(remoteWorker.getOutputStream(), true);
        BufferedReader result = new BufferedReader(new InputStreamReader(remoteWorker.getInputStream()));

        String response = result.readLine();
        if (response.equals(HELO)) {
            speak.print(HELO + " " + hostname + "\r\n");
            speak.flush();
            responseMessage.setUid(command.getUid());
            responseMessage.setStatus(Status.STARTED);
            responseMessage.setMessage(STARTED + " " + command.getUid());
            speak.print(mapper.writeValueAsString(responseMessage) + "\r\n");
            speak.flush();
        } else {
            log.error("Unexpected response from ControlServer");
            return;
        }

        result.close();
        speak.close();
        remoteWorker.close();

        Process process = Runtime.getRuntime().exec(command.getCommand());

        if (command.getStdout() != null) {
            stdoutOutput = new PrintWriter(
                    new FileOutputStream(command.getStdout(), command.getAppendStdout()));
        }
        if (command.getStderr() != null) {
            stderrOutput = new PrintWriter(
                    new FileOutputStream(command.getStderr(), command.getAppendStderr()));
        }
        // getInputStream() returns the stdout of the process that ran
        BufferedReader stdOut = new BufferedReader(new InputStreamReader(process.getInputStream()));
        BufferedReader stdErr = new BufferedReader(new InputStreamReader(process.getErrorStream()));
        while ((message = stdOut.readLine()) != null) {
            log.info("stdout: " + message);
            if (stdoutOutput != null) {
                stdoutOutput.write(message + "\n");
            }
        }
        while ((message = stdErr.readLine()) != null) {
            log.info("stderr: " + message);
            if (stderrOutput != null) {
                stderrOutput.write(message + "\n");
            }
        }
        if (stderrOutput != null) {
            stderrOutput.close();
            stderrOutput = null;
        }
        if (stdoutOutput != null) {
            stdoutOutput.close();
            stdoutOutput = null;
        }

        remoteWorker = new Socket(hostname, remotePort);

        speak = new PrintWriter(remoteWorker.getOutputStream(), true);
        result = new BufferedReader(new InputStreamReader(remoteWorker.getInputStream()));

        response = result.readLine();
        if (response.equals(HELO)) {
            speak.print(HELO + " " + hostname + "\r\n");
            speak.flush();
            responseMessage.setUid(command.getUid());
            responseMessage.setStatus(process.exitValue() == 0 ? Status.SUCCESS : Status.FAILURE);
            responseMessage.setExitValue(process.exitValue());
            responseMessage.setMessage(COMPLETE + " " + command.getUid());
            speak.print(mapper.writeValueAsString(responseMessage) + "\r\n");
            speak.flush();
        } else {
            log.error("Unexpected response from ControlServer");
            return;
        }

        result.close();
        speak.close();
        remoteWorker.close();

    } catch (IOException e) {
        log.error("Execution failed", e);
    } finally {
        if (stderrOutput != null) {
            stderrOutput.close();
        }
        if (stdoutOutput != null) {
            stdoutOutput.close();
        }
    }

}

From source file:ingestor.SingleFileProcessor.java

private boolean UnzipArchive(File inSourceFile) throws IOException, InterruptedException {
    logger.begin("UnzipArchive(" + inSourceFile + ")");
    String line = "";
    String cmd = "";
    Runtime run = Runtime.getRuntime();
    Process pr = null;
    String srcPath = "", tgtPath = "";
    line = GetFileHeader(inSourceFile);/*ww  w.j  av a  2s.  co m*/
    srcPath = inSourceFile.getAbsolutePath();
    tgtPath = inSourceFile.getParentFile().getAbsolutePath();

    logger.force("\tsrcPath=" + srcPath);
    logger.force("\ttgtPath=" + tgtPath);

    logger.force("\tline=" + line);
    if (line.contains("gzip compressed data")) {
        cmd = "/bin/tar -C " + tgtPath + " -xvzf " + srcPath;
    } else if (line.contains("xz compressed data")) {
        cmd = "/bin/tar  -C " + tgtPath + " -xvJf " + srcPath;
    } else if (line.contains("Zip archive data")) {
        cmd = "/usr/bin/unzip " + srcPath + " -d " + tgtPath;
    } else {
        logger.force("\t\tUnable to determine compression type, exiting");
        return false;
    }
    logger.force("\t\tcmd=" + cmd);

    //logger.fine("\nexecuting: "+cmd+"\n");
    pr = run.exec(cmd);

    BufferedReader buf = new BufferedReader(new InputStreamReader(pr.getInputStream()));
    BufferedReader buferr = new BufferedReader(new InputStreamReader(pr.getErrorStream()));

    // read everything and output to outputStream as you go
    String s = null;
    ScreenLog.out("===== stdout =====");
    while ((s = buf.readLine()) != null) {
        ScreenLog.out("line=" + s);
    }

    ScreenLog.out("===== stderr =====");
    while ((s = buferr.readLine()) != null) {
        ScreenLog.out("line=" + s);
    }
    pr.waitFor();

    logger.end("UnzipArchive(" + inSourceFile + ")==exit(" + pr.exitValue() + ")");
    return pr.exitValue() == 0;
}

From source file:org.kalypso.optimize.SceJob.java

private void startSCEOptimization(final SceIOHandler sceIO, final ISimulationMonitor monitor)
        throws SimulationException {
    InputStreamReader inStream = null;
    InputStreamReader errStream = null;

    // FIXME: too much copy/paste from ProcessHelper; we can probably use process helper instead!
    ProcessControlThread procCtrlThread = null;
    try {//from   ww  w. j av  a2 s  .c o m
        final String[] commands = new String[] { m_sceExe.getAbsolutePath() };

        final Process process = Runtime.getRuntime().exec(commands, null, m_sceDir);
        final long lTimeOut = 1000l * 60l * 15l;// 15 minutes
        procCtrlThread = new ProcessControlThread(process, lTimeOut);
        procCtrlThread.start();

        final StringBuffer outBuffer = new StringBuffer();
        final StringBuffer errBuffer = new StringBuffer();

        final Writer inputWriter = new PrintWriter(process.getOutputStream(), false);

        inStream = new InputStreamReader(process.getInputStream());
        errStream = new InputStreamReader(process.getErrorStream());

        final int stepMax = m_autoCalibration.getOptParameter().getMaxN();

        while (true) {
            final int step = sceIO.getStep();
            monitor.setProgress(100 * step / (stepMax + 1));
            if (step > stepMax) {
                final String monitorMsg = String.format(
                        "Optimierungsrechnung abgeschlossen, Ergebnisauswertung", step + 1, stepMax + 1);
                monitor.setMessage(monitorMsg);
            } else {
                final String monitorMsg = String.format("Optimierungsrechnung %d von %d", step + 1,
                        stepMax + 1);
                monitor.setMessage(monitorMsg);
            }

            if (inStream.ready()) {
                final char buffer[] = new char[100];
                final int bufferC = inStream.read(buffer);
                outBuffer.append(buffer, 0, bufferC);
            }
            if (errStream.ready()) {
                final char buffer[] = new char[100];
                final int bufferC = errStream.read(buffer);
                errBuffer.append(buffer, 0, bufferC);
            }
            if (monitor.isCanceled()) {
                process.destroy();
                procCtrlThread.endProcessControl();
                return;
            }
            try {
                process.exitValue();
                break;
            } catch (final IllegalThreadStateException e) {
                final OptimizeMonitor subMonitor = new OptimizeMonitor(monitor);
                sceIO.handleStreams(outBuffer, errBuffer, inputWriter, subMonitor);
            }
            Thread.sleep(100);
        }

        procCtrlThread.endProcessControl();
    } catch (final IOException e) {
        e.printStackTrace();
        throw new SimulationException("Fehler beim Ausfuehren", e);
    } catch (final InterruptedException e) {
        e.printStackTrace();
        throw new SimulationException("beim Ausfuehren unterbrochen", e);
    } finally {
        IOUtils.closeQuietly(inStream);
        IOUtils.closeQuietly(errStream);
        if (procCtrlThread != null && procCtrlThread.procDestroyed()) {
            throw new SimulationException("beim Ausfuehren unterbrochen",
                    new ProcessTimeoutException("Timeout bei der Abarbeitung der Optimierung"));
        }
    }
}

From source file:com.ibm.bi.dml.test.integration.AutomatedTestBase.java

/**
 * Runs an R script in the old or the new way
 *//*from  w  ww.  j  a va 2 s .  co  m*/
protected void runRScript(boolean newWay) {

    String executionFile = sourceDirectory + selectedTest + ".R";

    // *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
    // Some of the R scripts will fail if the "expected" directory doesn't exist.
    // Make sure the directory exists.
    File expectedDir = new File(baseDirectory, "expected" + "/" + cacheDir);
    expectedDir.mkdirs();
    // *** END HACK ***

    String cmd;
    if (!newWay) {
        executionFile = executionFile + "t";
        cmd = "R -f " + executionFile;
    } else {
        // *** HACK ALERT *** HACK ALERT *** HACK ALERT ***
        // Rscript does *not* load the "methods" package by default
        // to save on start time. The "Matrix" package used in the
        // tests requires the "methods" package and should still
        // load and attach it, but in R 3.2 with the latest version
        // of the "Matrix" package, "methods" is loaded *but not
        // attached* when run with Rscript.  Therefore, we need to
        // explicitly load it with Rscript.
        cmd = rCmd.replaceFirst("Rscript",
                "Rscript --default-packages=methods,datasets,graphics,grDevices,stats,utils");
        // *** END HACK ***
    }

    if (System.getProperty("os.name").contains("Windows")) {
        cmd = cmd.replace('/', '\\');
        executionFile = executionFile.replace('/', '\\');
    }
    if (DEBUG) {
        if (!newWay) { // not sure why have this condition
            TestUtils.printRScript(executionFile);
        }
    }
    if (!newWay) {
        ParameterBuilder.setVariablesInScript(sourceDirectory, selectedTest + ".R", testVariables);
    }

    if (cacheDir.length() > 0) {
        File expectedFile = null;
        String[] outputFiles = null;
        TestConfiguration testConfig = getTestConfiguration(selectedTest);
        if (testConfig != null) {
            outputFiles = testConfig.getOutputFiles();
        }

        if (outputFiles != null && outputFiles.length > 0) {
            expectedFile = new File(expectedDir.getPath() + "/" + outputFiles[0]);
            if (expectedFile.canRead()) {
                System.out.println("Skipping R script cmd: " + cmd);
                return;
            }
        }
    }

    try {
        long t0 = System.nanoTime();
        System.out.println("starting R script");
        System.out.println("cmd: " + cmd);
        Process child = Runtime.getRuntime().exec(cmd);

        String outputR = IOUtils.toString(child.getInputStream());
        System.out.println("Standard Output from R:" + outputR);
        String errorString = IOUtils.toString(child.getErrorStream());
        System.err.println("Standard Error from R:" + errorString);

        //
        // To give any stream enough time to print all data, otherwise there
        // are situations where the test case fails, even before everything
        // has been printed
        //
        child.waitFor();
        //      Thread.sleep(30000);

        try {
            if (child.exitValue() != 0) {
                throw new Exception(
                        "ERROR: R has ended irregularly\n" + outputR + "\nscript file: " + executionFile);
            }
        } catch (IllegalThreadStateException ie) {
            //
            // In UNIX JVM does not seem to be able to close threads
            // correctly. However, give it a try, since R processed the
            // script, therefore we can terminate the process.
            //
            child.destroy();
        }

        long t1 = System.nanoTime();
        System.out.println("R is finished (in " + ((double) t1 - t0) / 1000000000 + " sec)");

    } catch (Exception e) {
        e.printStackTrace();
        StringBuilder errorMessage = new StringBuilder();
        errorMessage.append("failed to run script " + executionFile);
        errorMessage.append("\nexception: " + e.toString());
        errorMessage.append("\nmessage: " + e.getMessage());
        errorMessage.append("\nstack trace:");
        for (StackTraceElement ste : e.getStackTrace()) {
            errorMessage.append("\n>" + ste);
        }
        fail(errorMessage.toString());
    }
}