Example usage for java.lang Process getErrorStream

List of usage examples for java.lang Process getErrorStream

Introduction

In this page you can find the example usage for java.lang Process getErrorStream.

Prototype

public abstract InputStream getErrorStream();

Source Link

Document

Returns the input stream connected to the error output of the process.

Usage

From source file:com.thinkbiganalytics.nifi.pyspark.core.ExecutePySpark.java

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();/*from   ww w  .j  av a 2 s .c o  m*/

    if (flowFile == null) {
        flowFile = session.create();
        logger.info("Created a flow file having uuid: {}",
                new Object[] { flowFile.getAttribute(CoreAttributes.UUID.key()) });
    } else {
        logger.info("Using an existing flow file having uuid: {}",
                new Object[] { flowFile.getAttribute(CoreAttributes.UUID.key()) });
    }
    try {
        final String kerberosPrincipal = context.getProperty(KERBEROS_PRINCIPAL).getValue();
        final String kerberosKeyTab = context.getProperty(KERBEROS_KEYTAB).getValue();
        final String hadoopConfigurationResources = context.getProperty(HADOOP_CONFIGURATION_RESOURCES)
                .getValue();
        final String pySparkAppFile = context.getProperty(PYSPARK_APP_FILE)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String pySparkAppArgs = context.getProperty(PYSPARK_APP_ARGS)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String pySparkAppName = context.getProperty(PYSPARK_APP_NAME)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String pySparkAdditionalFiles = context.getProperty(PYSPARK_ADDITIONAL_FILES)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String sparkMaster = context.getProperty(SPARK_MASTER).evaluateAttributeExpressions(flowFile)
                .getValue().trim().toLowerCase();
        final String sparkYarnDeployMode = context.getProperty(SPARK_YARN_DEPLOY_MODE)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String yarnQueue = context.getProperty(YARN_QUEUE).evaluateAttributeExpressions(flowFile)
                .getValue();
        final String sparkHome = context.getProperty(SPARK_HOME).evaluateAttributeExpressions(flowFile)
                .getValue();
        final String driverMemory = context.getProperty(DRIVER_MEMORY).evaluateAttributeExpressions(flowFile)
                .getValue();
        final String executorMemory = context.getProperty(EXECUTOR_MEMORY)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String executorInstances = context.getProperty(EXECUTOR_INSTANCES)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String executorCores = context.getProperty(EXECUTOR_CORES).evaluateAttributeExpressions(flowFile)
                .getValue();
        final String networkTimeout = context.getProperty(NETWORK_TIMEOUT)
                .evaluateAttributeExpressions(flowFile).getValue();
        final String additionalSparkConfigOptions = context.getProperty(ADDITIONAL_SPARK_CONFIG_OPTIONS)
                .evaluateAttributeExpressions(flowFile).getValue();

        PySparkUtils pySparkUtils = new PySparkUtils();

        /* Get app arguments */
        String[] pySparkAppArgsArray = null;
        if (!StringUtils.isEmpty(pySparkAppArgs)) {
            pySparkAppArgsArray = pySparkUtils.getCsvValuesAsArray(pySparkAppArgs);
            logger.info("Provided application arguments: {}",
                    new Object[] { pySparkUtils.getCsvStringFromArray(pySparkAppArgsArray) });
        }

        /* Get additional python files */
        String[] pySparkAdditionalFilesArray = null;
        if (!StringUtils.isEmpty(pySparkAdditionalFiles)) {
            pySparkAdditionalFilesArray = pySparkUtils.getCsvValuesAsArray(pySparkAdditionalFiles);
            logger.info("Provided python files: {}",
                    new Object[] { pySparkUtils.getCsvStringFromArray(pySparkAdditionalFilesArray) });
        }

        /* Get additional config key-value pairs */
        String[] additionalSparkConfigOptionsArray = null;
        if (!StringUtils.isEmpty(additionalSparkConfigOptions)) {
            additionalSparkConfigOptionsArray = pySparkUtils.getCsvValuesAsArray(additionalSparkConfigOptions);
            logger.info("Provided spark config options: {}",
                    new Object[] { pySparkUtils.getCsvStringFromArray(additionalSparkConfigOptionsArray) });
        }

        /* Determine if Kerberos is enabled */
        boolean kerberosEnabled = false;
        if (!StringUtils.isEmpty(kerberosPrincipal) && !StringUtils.isEmpty(kerberosKeyTab)
                && !StringUtils.isEmpty(hadoopConfigurationResources)) {
            kerberosEnabled = true;
            logger.info("Kerberos is enabled");
        }

        /* For Kerberized cluster, attempt user authentication */
        if (kerberosEnabled) {
            logger.info("Attempting user authentication for Kerberos");
            ApplySecurityPolicy applySecurityObject = new ApplySecurityPolicy();
            Configuration configuration;
            try {
                logger.info("Getting Hadoop configuration from " + hadoopConfigurationResources);
                configuration = ApplySecurityPolicy.getConfigurationFromResources(hadoopConfigurationResources);

                if (SecurityUtil.isSecurityEnabled(configuration)) {
                    logger.info("Security is enabled");

                    if (kerberosPrincipal.equals("") && kerberosKeyTab.equals("")) {
                        logger.error(
                                "Kerberos Principal and Keytab provided with empty values for a Kerberized cluster.");
                        session.transfer(flowFile, REL_FAILURE);
                        return;
                    }

                    try {
                        logger.info("User authentication initiated");

                        boolean authenticationStatus = applySecurityObject.validateUserWithKerberos(logger,
                                hadoopConfigurationResources, kerberosPrincipal, kerberosKeyTab);
                        if (authenticationStatus) {
                            logger.info("User authenticated successfully.");
                        } else {
                            logger.error("User authentication failed.");
                            session.transfer(flowFile, REL_FAILURE);
                            return;
                        }

                    } catch (Exception unknownException) {
                        logger.error("Unknown exception occurred while validating user :"
                                + unknownException.getMessage());
                        session.transfer(flowFile, REL_FAILURE);
                        return;
                    }
                }
            } catch (IOException e1) {
                logger.error("Unknown exception occurred while authenticating user :" + e1.getMessage());
                session.transfer(flowFile, REL_FAILURE);
                return;
            }
        }

        /* Build and launch PySpark Job */
        logger.info("Configuring PySpark job for execution");
        SparkLauncher pySparkLauncher = new SparkLauncher().setAppResource(pySparkAppFile);
        logger.info("PySpark app file set to: {}", new Object[] { pySparkAppFile });

        if (pySparkAppArgsArray != null && pySparkAppArgsArray.length > 0) {
            pySparkLauncher = pySparkLauncher.addAppArgs(pySparkAppArgsArray);
            logger.info("App arguments set to: {}",
                    new Object[] { pySparkUtils.getCsvStringFromArray(pySparkAppArgsArray) });
        }

        pySparkLauncher = pySparkLauncher.setAppName(pySparkAppName).setMaster(sparkMaster);

        logger.info("App name set to: {}", new Object[] { pySparkAppName });
        logger.info("Spark master set to: {}", new Object[] { sparkMaster });

        if (pySparkAdditionalFilesArray != null && pySparkAdditionalFilesArray.length > 0) {
            for (String pySparkAdditionalFile : pySparkAdditionalFilesArray) {
                pySparkLauncher = pySparkLauncher.addPyFile(pySparkAdditionalFile);
                logger.info("Additional python file set to: {}", new Object[] { pySparkAdditionalFile });
            }
        }

        if (sparkMaster.equals("yarn")) {
            pySparkLauncher = pySparkLauncher.setDeployMode(sparkYarnDeployMode);
            logger.info("YARN deploy mode set to: {}", new Object[] { sparkYarnDeployMode });
        }

        pySparkLauncher = pySparkLauncher.setSparkHome(sparkHome)
                .setConf(SparkLauncher.DRIVER_MEMORY, driverMemory)
                .setConf(SparkLauncher.EXECUTOR_MEMORY, executorMemory)
                .setConf(CONFIG_PROP_SPARK_EXECUTOR_INSTANCES, executorInstances)
                .setConf(SparkLauncher.EXECUTOR_CORES, executorCores)
                .setConf(CONFIG_PROP_SPARK_NETWORK_TIMEOUT, networkTimeout);

        logger.info("Spark home set to: {} ", new Object[] { sparkHome });
        logger.info("Driver memory set to: {} ", new Object[] { driverMemory });
        logger.info("Executor memory set to: {} ", new Object[] { executorMemory });
        logger.info("Executor instances set to: {} ", new Object[] { executorInstances });
        logger.info("Executor cores set to: {} ", new Object[] { executorCores });
        logger.info("Network timeout set to: {} ", new Object[] { networkTimeout });

        if (kerberosEnabled) {
            pySparkLauncher = pySparkLauncher.setConf(CONFIG_PROP_SPARK_YARN_PRINCIPAL, kerberosPrincipal);
            pySparkLauncher = pySparkLauncher.setConf(CONFIG_PROP_SPARK_YARN_KEYTAB, kerberosKeyTab);
            logger.info("Kerberos principal set to: {} ", new Object[] { kerberosPrincipal });
            logger.info("Kerberos keytab set to: {} ", new Object[] { kerberosKeyTab });
        }

        if (!StringUtils.isEmpty(yarnQueue)) {
            pySparkLauncher = pySparkLauncher.setConf(CONFIG_PROP_SPARK_YARN_QUEUE, yarnQueue);
            logger.info("YARN queue set to: {} ", new Object[] { yarnQueue });
        }

        if (additionalSparkConfigOptionsArray != null && additionalSparkConfigOptionsArray.length > 0) {
            for (String additionalSparkConfigOption : additionalSparkConfigOptionsArray) {
                String[] confKeyValue = additionalSparkConfigOption.split("=");
                if (confKeyValue.length == 2) {
                    pySparkLauncher = pySparkLauncher.setConf(confKeyValue[0], confKeyValue[1]);
                    logger.info("Spark additional config option set to: {}={}",
                            new Object[] { confKeyValue[0], confKeyValue[1] });
                }
            }
        }

        logger.info("Starting execution of PySpark job");
        Process pySparkProcess = pySparkLauncher.launch();

        InputStreamReaderRunnable inputStreamReaderRunnable = new InputStreamReaderRunnable(LogLevel.INFO,
                logger, pySparkProcess.getInputStream());
        Thread inputThread = new Thread(inputStreamReaderRunnable, "stream input");
        inputThread.start();

        InputStreamReaderRunnable errorStreamReaderRunnable = new InputStreamReaderRunnable(LogLevel.INFO,
                logger, pySparkProcess.getErrorStream());
        Thread errorThread = new Thread(errorStreamReaderRunnable, "stream error");
        errorThread.start();

        logger.info("Waiting for PySpark job to complete");

        int exitCode = pySparkProcess.waitFor();
        if (exitCode != 0) {
            logger.info("Finished execution of PySpark job [FAILURE] [Status code: {}]",
                    new Object[] { exitCode });
            session.transfer(flowFile, REL_FAILURE);
        } else {
            logger.info("Finished execution of PySpark job [SUCCESS] [Status code: {}]",
                    new Object[] { exitCode });
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (final Exception e) {
        logger.error("Unable to execute PySpark job [FAILURE]", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
    }
}

From source file:net.firejack.platform.model.service.reverse.ReverseEngineeringService.java

private File generate(String wsdl, String domain) throws IOException, InterruptedException {
    String name = SecurityHelper.generateRandomSequence(16);
    File temp = new File(FileUtils.getTempDirectory(), name);
    FileUtils.forceMkdir(temp);// ww  w. j av  a2s  .  com

    Process exec = Runtime.getRuntime().exec(new String[] { "wsimport", "-d", temp.getPath(), "-p",
            "wsdl." + domain, "-target", "2.1", "-extension", wsdl });
    exec.getInputStream().close();
    exec.getErrorStream().close();
    exec.getOutputStream().close();
    exec.waitFor();

    return temp;
}

From source file:com.l2jfree.gameserver.util.DatabaseBackupManager.java

public static void makeBackup() {
    File f = new File(Config.DATAPACK_ROOT, Config.DATABASE_BACKUP_SAVE_PATH);
    if (!f.mkdirs() && !f.exists()) {
        _log.warn("Could not create folder " + f.getAbsolutePath());
        return;/*  w  w  w  . ja v  a 2 s  .  c o m*/
    }

    _log.info("DatabaseBackupManager: backing up `" + Config.DATABASE_BACKUP_DATABASE_NAME + "`...");

    Process run = null;
    try {
        run = Runtime.getRuntime().exec("mysqldump" + " --user=" + Config.DATABASE_LOGIN + " --password="
                + Config.DATABASE_PASSWORD
                + " --compact --complete-insert --default-character-set=utf8 --extended-insert --lock-tables --quick --skip-triggers "
                + Config.DATABASE_BACKUP_DATABASE_NAME, null, new File(Config.DATABASE_BACKUP_MYSQLDUMP_PATH));
    } catch (Exception e) {
    } finally {
        if (run == null) {
            _log.warn("Could not execute mysqldump!");
            return;
        }
    }

    try {
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
        Date time = new Date();

        File bf = new File(f, sdf.format(time) + (Config.DATABASE_BACKUP_COMPRESSION ? ".zip" : ".sql"));
        if (!bf.createNewFile())
            throw new IOException("Cannot create backup file: " + bf.getCanonicalPath());
        InputStream input = run.getInputStream();
        OutputStream out = new FileOutputStream(bf);
        if (Config.DATABASE_BACKUP_COMPRESSION) {
            ZipOutputStream dflt = new ZipOutputStream(out);
            dflt.setMethod(ZipOutputStream.DEFLATED);
            dflt.setLevel(Deflater.BEST_COMPRESSION);
            dflt.setComment("L2JFree Schema Backup Utility\r\n\r\nBackup date: "
                    + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS z").format(time));
            dflt.putNextEntry(new ZipEntry(Config.DATABASE_BACKUP_DATABASE_NAME + ".sql"));
            out = dflt;
        }

        byte[] buf = new byte[4096];
        int written = 0;
        for (int read; (read = input.read(buf)) != -1;) {
            out.write(buf, 0, read);

            written += read;
        }
        input.close();
        out.close();

        if (written == 0) {
            bf.delete();
            BufferedReader br = new BufferedReader(new InputStreamReader(run.getErrorStream()));
            String line;
            while ((line = br.readLine()) != null)
                _log.warn("DatabaseBackupManager: " + line);
            br.close();
        } else
            _log.info("DatabaseBackupManager: Schema `" + Config.DATABASE_BACKUP_DATABASE_NAME
                    + "` backed up successfully in " + (System.currentTimeMillis() - time.getTime()) / 1000
                    + " s.");

        run.waitFor();
    } catch (Exception e) {
        _log.warn("DatabaseBackupManager: Could not make backup: ", e);
    }
}

From source file:com.evolveum.midpoint.test.util.Lsof.java

private String execLsof(int pid) throws IOException, InterruptedException {
    Process process = null;
    String output = null;//from   w  w w  . ja va 2  s  . c  om
    try {
        process = Runtime.getRuntime().exec(new String[] { "lsof", "-p", Integer.toString(pid) });
        InputStream inputStream = process.getInputStream();
        output = IOUtils.toString(inputStream, "UTF-8");
        int exitCode = process.waitFor();
        if (exitCode != 0) {
            throw new IllegalStateException("Lsof process ended with error (" + exitCode + ")");
        }
    } finally {
        if (process != null) {
            try {
                process.getInputStream().close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            try {
                process.getOutputStream().close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            try {
                process.getErrorStream().close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            process.destroy();
        }
    }
    return output;
}

From source file:com.emergya.persistenceGeo.importer.shp.ShpImporterImpl.java

@Override
public boolean importShpToDb(String pathToShp, String tableName, boolean dropExistingTable) {
    boolean result = true;
    String srcProjection = "";
    Runtime rt = Runtime.getRuntime();
    if (pathToShp == null || !pathToShp.endsWith(".shp")) {
        throw new IllegalArgumentException("pathToShp does not end with .shp");
    }/*from   w  w  w  . j  a  v a  2 s.co  m*/
    int lastIndexOfSlash = pathToShp.lastIndexOf(File.separatorChar);
    int lastIndexOfShp = pathToShp.lastIndexOf('.');
    String path = pathToShp.substring(0, lastIndexOfSlash);
    String file = pathToShp.substring(lastIndexOfSlash + 1, lastIndexOfShp);
    boolean exists = checkIfAllFilesExist(path, file);
    if (!exists) {
        throw new ShpImporterException("Not all mandatory shape file components could be found");
    }

    // If exists a .prj file try to guess the EPSG code. If not code found
    // defaults to EPSG:4326
    if (checkIfPrjFileExist(path, file)) {
        String prjPath = pathToShp.substring(0, pathToShp.lastIndexOf('.')) + ".prj";
        try {
            Process proc = rt.exec(new String[] { GUESS_PROJECTION_COMMAND, prjPath });
            BufferedReader standarStream = new BufferedReader(new InputStreamReader(proc.getInputStream()));

            int projStatus = proc.waitFor();
            String line;
            if (projStatus == 0) {
                while ((line = standarStream.readLine()) != null) {
                    if (!"EPSG:-1".equals(line)) {
                        srcProjection = line.split(":")[1] + ":";
                    }
                }
            } else {
                if (LOG.isWarnEnabled()) {
                    LOG.warn(GUESS_PROJECTION_COMMAND + " return code was " + projStatus);
                }
                if (LOG.isDebugEnabled()) {
                    BufferedReader errorStream = new BufferedReader(
                            new InputStreamReader(proc.getErrorStream()));
                    while ((line = errorStream.readLine()) != null) {
                        LOG.debug(GUESS_PROJECTION_COMMAND + ": " + line);
                    }
                }

            }

        } catch (IOException e) {
            if (LOG.isErrorEnabled()) {
                LOG.error("guessEPSG.py not found. Please put this file " + " in the path");
            }
        } catch (InterruptedException e) {
            LOG.error(GUESS_PROJECTION_COMMAND + " thread interrupted", e);
        }

    }

    File shp = new File(pathToShp);
    String dropTableParameter = "";
    if (dropExistingTable) {
        dropTableParameter = DROP_TABLE_OPTION;
    }

    String command = MessageFormat.format(SHP2PGSQL_COMMAND, dropTableParameter, srcProjection,
            dbConfig.getDestSrid(), shp.getAbsolutePath(), dbConfig.getSchema(), tableName,
            dbConfig.getPostgresHost(), dbConfig.getPostgresPort(), dbConfig.getDatabaseName(),
            dbConfig.getPostgresUser());
    if (LOG.isDebugEnabled()) {
        LOG.debug("SHPIMPORTER Command: " + command);
    }
    try {
        Process proc = rt.exec(new String[] { BASH_COMMAND, BASH_COMMAND_FIRST_ARGUMENT, command },
                new String[] { "PGPASSWORD=" + dbConfig.getPostgresPassword() });

        BufferedReader standarStream = new BufferedReader(new InputStreamReader(proc.getInputStream()));

        BufferedReader errorStream = new BufferedReader(new InputStreamReader(proc.getErrorStream()));

        String line;
        while ((line = standarStream.readLine()) != null) {
            if (line.contains("ROLLBACK")) {
                result = false;
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("SHPIMPORTER standar output: " + line);
            }
        }

        while ((line = errorStream.readLine()) != null) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("SHPIMPORTER error output: ");
            }
        }
        standarStream.close();
        errorStream.close();
        int exitStatus = proc.waitFor();

        // Am I checking the real subcommand parameter exit status or only
        // bash command exit status?
        if (exitStatus != 0) {
            throw new ShpImporterException("Exit status is not 0 after executing " + command);
        }
    } catch (IOException e) {
        throw new ShpImporterException("IOExcption thrown while executing SHP import", e);
    } catch (InterruptedException e) {
        throw new ShpImporterException(
                "Another thread interrumped this when it " + " was waiting for SHP import command ending.", e);
    }

    return result;

}

From source file:com.twosigma.beaker.core.rest.PluginServiceLocatorRest.java

@GET
@Path("getIPythonVersion")
@Produces(MediaType.TEXT_PLAIN)/*from   ww  w . j a  v a  2  s.  c o m*/
public String getIPythonVersion(@QueryParam("pluginId") String pluginId, @QueryParam("command") String command)
        throws IOException {
    Process proc;
    List<String> cmd = pythonBaseCommand(pluginId, command);
    cmd.add("--version");
    proc = Runtime.getRuntime().exec(listToArray(cmd), buildEnv(pluginId, null));
    BufferedReader br = new BufferedReader(new InputStreamReader(proc.getInputStream()));
    new StreamGobbler(proc.getErrorStream(), "stderr", "ipython-version", null, null).start();
    String line = br.readLine();
    return line;
}

From source file:net.sourceforge.vulcan.shell.ShellBuildTool.java

private OutputStream startOutputProcessors(final Process process, final File logFile,
        final BuildDetailCallback buildDetailCallback) {
    final OutputStream logOutputStream;

    try {/*from  www .j  a  v  a 2  s . com*/
        logOutputStream = new FileOutputStream(logFile);
    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    }

    logWriter = new BufferedWriter(new OutputStreamWriter(logOutputStream));

    stdoutThread = new Thread() {
        @Override
        public void run() {
            try {
                new ShellOutputProcessor(logWriter, buildDetailCallback)
                        .processStream(process.getInputStream());
            } catch (IOException e) {
                LOG.error(e.getMessage(), e);
            }
        }
    };

    stdoutThread.start();

    stderrThread = new Thread() {
        @Override
        public void run() {
            try {
                new ShellOutputProcessor(logWriter, buildDetailCallback)
                        .processStream(process.getErrorStream());
            } catch (IOException e) {
                LOG.error(e.getMessage(), e);
            }
        }
    };

    stderrThread.start();

    return logOutputStream;
}

From source file:com.ficeto.esp.EspExceptionDecoder.java

private int listenOnProcess(String[] arguments) {
    try {/*w ww .  j a v  a  2 s. co  m*/
        final Process p = ProcessUtils.exec(arguments);
        Thread thread = new Thread() {
            public void run() {
                try {
                    InputStreamReader reader = new InputStreamReader(p.getInputStream());
                    int c;
                    String line = "";
                    while ((c = reader.read()) != -1) {
                        if ((char) c == '\r')
                            continue;
                        if ((char) c == '\n') {
                            printLine(line);
                            line = "";
                        } else {
                            line += (char) c;
                        }
                    }
                    printLine(line);
                    reader.close();

                    reader = new InputStreamReader(p.getErrorStream());
                    while ((c = reader.read()) != -1)
                        System.err.print((char) c);
                    reader.close();
                } catch (Exception e) {
                }
            }
        };
        thread.start();
        int res = p.waitFor();
        thread.join();
        return res;
    } catch (Exception e) {
    }
    return -1;
}

From source file:com.thinkbiganalytics.nifi.v2.spark.ExecuteSparkJob.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();//from w  w  w . ja  va2s.  co m
    if (flowFile == null) {
        return;
    }
    String PROVENANCE_JOB_STATUS_KEY = "Job Status";
    String PROVENANCE_SPARK_EXIT_CODE_KEY = "Spark Exit Code";

    try {

        PROVENANCE_JOB_STATUS_KEY = context.getName() + " Job Status";
        PROVENANCE_SPARK_EXIT_CODE_KEY = context.getName() + " Spark Exit Code";

        /* Configuration parameters for spark launcher */
        String appJar = context.getProperty(APPLICATION_JAR).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String extraJars = context.getProperty(EXTRA_JARS).evaluateAttributeExpressions(flowFile).getValue();
        String yarnQueue = context.getProperty(YARN_QUEUE).evaluateAttributeExpressions(flowFile).getValue();
        String mainClass = context.getProperty(MAIN_CLASS).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String sparkMaster = context.getProperty(SPARK_MASTER).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String appArgs = context.getProperty(MAIN_ARGS).evaluateAttributeExpressions(flowFile).getValue()
                .trim();
        String driverMemory = context.getProperty(DRIVER_MEMORY).evaluateAttributeExpressions(flowFile)
                .getValue();
        String executorMemory = context.getProperty(EXECUTOR_MEMORY).evaluateAttributeExpressions(flowFile)
                .getValue();
        String numberOfExecutors = context.getProperty(NUMBER_EXECUTORS).evaluateAttributeExpressions(flowFile)
                .getValue();
        String sparkApplicationName = context.getProperty(SPARK_APPLICATION_NAME)
                .evaluateAttributeExpressions(flowFile).getValue();
        String executorCores = context.getProperty(EXECUTOR_CORES).evaluateAttributeExpressions(flowFile)
                .getValue();
        String networkTimeout = context.getProperty(NETWORK_TIMEOUT).evaluateAttributeExpressions(flowFile)
                .getValue();
        String principal = context.getProperty(kerberosPrincipal).getValue();
        String keyTab = context.getProperty(kerberosKeyTab).getValue();
        String hadoopConfigurationResources = context.getProperty(HADOOP_CONFIGURATION_RESOURCES).getValue();
        String sparkConfs = context.getProperty(SPARK_CONFS).evaluateAttributeExpressions(flowFile).getValue();
        String extraFiles = context.getProperty(EXTRA_SPARK_FILES).evaluateAttributeExpressions(flowFile)
                .getValue();
        Integer sparkProcessTimeout = context.getProperty(PROCESS_TIMEOUT)
                .evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.SECONDS).intValue();
        String datasourceIds = context.getProperty(DATASOURCES).evaluateAttributeExpressions(flowFile)
                .getValue();
        MetadataProviderService metadataService = context.getProperty(METADATA_SERVICE)
                .asControllerService(MetadataProviderService.class);

        String[] confs = null;
        if (!StringUtils.isEmpty(sparkConfs)) {
            confs = sparkConfs.split("\\|");
        }

        String[] args = null;
        if (!StringUtils.isEmpty(appArgs)) {
            args = appArgs.split(",");
        }

        final List<String> extraJarPaths = new ArrayList<>();
        if (!StringUtils.isEmpty(extraJars)) {
            extraJarPaths.addAll(Arrays.asList(extraJars.split(",")));
        } else {
            getLog().info("No extra jars to be added to class path");
        }

        // If all 3 fields are filled out then assume kerberos is enabled, and user should be authenticated
        boolean authenticateUser = false;
        if (!StringUtils.isEmpty(principal) && !StringUtils.isEmpty(keyTab)
                && !StringUtils.isEmpty(hadoopConfigurationResources)) {
            authenticateUser = true;
        }

        if (authenticateUser) {
            ApplySecurityPolicy applySecurityObject = new ApplySecurityPolicy();
            Configuration configuration;
            try {
                getLog().info("Getting Hadoop configuration from " + hadoopConfigurationResources);
                configuration = ApplySecurityPolicy.getConfigurationFromResources(hadoopConfigurationResources);

                if (SecurityUtil.isSecurityEnabled(configuration)) {
                    getLog().info("Security is enabled");

                    if (principal.equals("") && keyTab.equals("")) {
                        getLog().error(
                                "Kerberos Principal and Kerberos KeyTab information missing in Kerboeros enabled cluster. {} ",
                                new Object[] { flowFile });
                        session.transfer(flowFile, REL_FAILURE);
                        return;
                    }

                    try {
                        getLog().info("User authentication initiated");

                        boolean authenticationStatus = applySecurityObject.validateUserWithKerberos(logger,
                                hadoopConfigurationResources, principal, keyTab);
                        if (authenticationStatus) {
                            getLog().info("User authenticated successfully.");
                        } else {
                            getLog().error("User authentication failed.  {} ", new Object[] { flowFile });
                            session.transfer(flowFile, REL_FAILURE);
                            return;
                        }

                    } catch (Exception unknownException) {
                        getLog().error("Unknown exception occurred while validating user : {}.  {} ",
                                new Object[] { unknownException.getMessage(), flowFile });
                        session.transfer(flowFile, REL_FAILURE);
                        return;
                    }

                }
            } catch (IOException e1) {
                getLog().error("Unknown exception occurred while authenticating user : {} and flow file: {}",
                        new Object[] { e1.getMessage(), flowFile });
                session.transfer(flowFile, REL_FAILURE);
                return;
            }
        }

        String sparkHome = context.getProperty(SPARK_HOME).evaluateAttributeExpressions(flowFile).getValue();

        // Build environment
        final Map<String, String> env = new HashMap<>();

        if (StringUtils.isNotBlank(datasourceIds)) {
            final StringBuilder datasources = new StringBuilder(10240);
            final ObjectMapper objectMapper = new ObjectMapper();
            final MetadataProvider provider = metadataService.getProvider();

            for (final String id : datasourceIds.split(",")) {
                datasources.append((datasources.length() == 0) ? '[' : ',');

                final Optional<Datasource> datasource = provider.getDatasource(id);
                if (datasource.isPresent()) {
                    if (datasource.get() instanceof JdbcDatasource && StringUtils
                            .isNotBlank(((JdbcDatasource) datasource.get()).getDatabaseDriverLocation())) {
                        final String[] databaseDriverLocations = ((JdbcDatasource) datasource.get())
                                .getDatabaseDriverLocation().split(",");
                        extraJarPaths.addAll(Arrays.asList(databaseDriverLocations));
                    }
                    datasources.append(objectMapper.writeValueAsString(datasource.get()));
                } else {
                    logger.error("Required datasource {} is missing for Spark job: {}",
                            new Object[] { id, flowFile });
                    flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY,
                            "Invalid data source: " + id);
                    session.transfer(flowFile, REL_FAILURE);
                    return;
                }
            }

            datasources.append(']');
            env.put("DATASOURCES", datasources.toString());
        }

        /* Launch the spark job as a child process */
        SparkLauncher launcher = new SparkLauncher(env).setAppResource(appJar).setMainClass(mainClass)
                .setMaster(sparkMaster).setConf(SparkLauncher.DRIVER_MEMORY, driverMemory)
                .setConf(SPARK_NUM_EXECUTORS, numberOfExecutors)
                .setConf(SparkLauncher.EXECUTOR_MEMORY, executorMemory)
                .setConf(SparkLauncher.EXECUTOR_CORES, executorCores)
                .setConf(SPARK_NETWORK_TIMEOUT_CONFIG_NAME, networkTimeout).setSparkHome(sparkHome)
                .setAppName(sparkApplicationName);

        if (authenticateUser) {
            launcher.setConf(SPARK_YARN_KEYTAB, keyTab);
            launcher.setConf(SPARK_YARN_PRINCIPAL, principal);
        }
        if (args != null) {
            launcher.addAppArgs(args);
        }

        if (confs != null) {
            for (String conf : confs) {
                getLog().info("Adding sparkconf '" + conf + "'");
                launcher.addSparkArg(SPARK_CONFIG_NAME, conf);
            }
        }

        if (!extraJarPaths.isEmpty()) {
            for (String path : extraJarPaths) {
                getLog().info("Adding to class path '" + path + "'");
                launcher.addJar(path);
            }
        }
        if (StringUtils.isNotEmpty(yarnQueue)) {
            launcher.setConf(SPARK_YARN_QUEUE, yarnQueue);
        }
        if (StringUtils.isNotEmpty(extraFiles)) {
            launcher.addSparkArg(SPARK_EXTRA_FILES_CONFIG_NAME, extraFiles);
        }

        Process spark = launcher.launch();

        /* Read/clear the process input stream */
        InputStreamReaderRunnable inputStreamReaderRunnable = new InputStreamReaderRunnable(LogLevel.INFO,
                logger, spark.getInputStream());
        Thread inputThread = new Thread(inputStreamReaderRunnable, "stream input");
        inputThread.start();

        /* Read/clear the process error stream */
        InputStreamReaderRunnable errorStreamReaderRunnable = new InputStreamReaderRunnable(LogLevel.INFO,
                logger, spark.getErrorStream());
        Thread errorThread = new Thread(errorStreamReaderRunnable, "stream error");
        errorThread.start();

        logger.info("Waiting for Spark job to complete");

        /* Wait for job completion */
        boolean completed = spark.waitFor(sparkProcessTimeout, TimeUnit.SECONDS);
        if (!completed) {
            spark.destroyForcibly();
            getLog().error("Spark process timed out after {} seconds using flow file: {}  ",
                    new Object[] { sparkProcessTimeout, flowFile });
            session.transfer(flowFile, REL_FAILURE);
            return;
        }

        int exitCode = spark.exitValue();

        flowFile = session.putAttribute(flowFile, PROVENANCE_SPARK_EXIT_CODE_KEY, exitCode + "");
        if (exitCode != 0) {
            logger.error("ExecuteSparkJob for {} and flowfile: {} completed with failed status {} ",
                    new Object[] { context.getName(), flowFile, exitCode });
            flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Failed");
            session.transfer(flowFile, REL_FAILURE);
        } else {
            logger.info("ExecuteSparkJob for {} and flowfile: {} completed with success status {} ",
                    new Object[] { context.getName(), flowFile, exitCode });
            flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Success");
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (final Exception e) {
        logger.error("Unable to execute Spark job {},{}", new Object[] { flowFile, e.getMessage() }, e);
        flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Failed With Exception");
        flowFile = session.putAttribute(flowFile, "Spark Exception:", e.getMessage());
        session.transfer(flowFile, REL_FAILURE);
    }
}

From source file:com.azurenight.maven.TroposphereMojo.java

public void runJythonScriptOnInstall(File outputDirectory, List<String> args, File outputFile)
        throws MojoExecutionException {
    getLog().info("running " + args + " in " + outputDirectory);
    ProcessBuilder pb = new ProcessBuilder(args);
    pb.directory(outputDirectory);/* w  w w.j a  v  a 2s.c o m*/
    pb.environment().put("BASEDIR", project.getBasedir().getAbsolutePath());
    final Process p;
    ByteArrayOutputStream stdoutBaos = null;
    ByteArrayOutputStream stderrBaos = null;
    try {
        p = pb.start();
    } catch (IOException e) {
        throw new MojoExecutionException("Executing jython failed. tried to run: " + pb.command(), e);
    }
    if (outputFile == null) {
        stdoutBaos = new ByteArrayOutputStream();
        copyIO(p.getInputStream(), stdoutBaos);
    } else {
        try {
            copyIO(p.getInputStream(), new FileOutputStream(outputFile));
        } catch (FileNotFoundException e) {
            throw new MojoExecutionException("Failed to copy output to : " + outputFile.getAbsolutePath(), e);
        }
    }
    stderrBaos = new ByteArrayOutputStream();
    copyIO(p.getErrorStream(), stderrBaos);
    copyIO(System.in, p.getOutputStream());
    try {
        boolean error = false;
        if (p.waitFor() != 0) {
            error = true;
        }
        if (getLog().isDebugEnabled() && stdoutBaos != null) {
            getLog().debug(stdoutBaos.toString());
        }
        if (getLog().isErrorEnabled() && stderrBaos != null) {
            getLog().error(stderrBaos.toString());
        }
        if (error) {
            throw new MojoExecutionException("Jython failed with return code: " + p.exitValue());
        }
    } catch (InterruptedException e) {
        throw new MojoExecutionException("Python tests were interrupted", e);
    }

}