Example usage for org.apache.spark.launcher SparkLauncher startApplication

List of usage examples for org.apache.spark.launcher SparkLauncher startApplication

Introduction

In this page you can find the example usage for org.apache.spark.launcher SparkLauncher startApplication.

Prototype

@Override
public SparkAppHandle startApplication(SparkAppHandle.Listener... listeners) throws IOException 

Source Link

Document

Starts a Spark application.

Usage

From source file:com.streamsets.datacollector.pipeline.executor.spark.yarn.YarnAppLauncher.java

License:Apache License

@Override
public Optional<String> launchApp(Record record) throws ApplicationLaunchFailureException, ELEvalException {

    SparkLauncher launcher = getLauncher();

    if (yarnConfigs.language == Language.JVM) {
        launcher.setMainClass(yarnConfigs.mainClass);
    }//  w  w  w.  j  a va 2  s .c  o m

    launcher.setAppResource(yarnConfigs.appResource).setAppName(yarnConfigs.appName).setMaster(YARN)
            .setDeployMode(yarnConfigs.deployMode.getLabel().toLowerCase()).setVerbose(yarnConfigs.verbose);

    if (yarnConfigs.dynamicAllocation) {
        launcher.setConf("spark.dynamicAllocation.enabled", "true");
        launcher.setConf("spark.shuffle.service.enabled", "true");
        launcher.setConf("spark.dynamicAllocation.minExecutors", String.valueOf(yarnConfigs.minExecutors));
        launcher.setConf("spark.dynamicAllocation.maxExecutors", String.valueOf(yarnConfigs.maxExecutors));
    } else {
        launcher.setConf("spark.dynamicAllocation.enabled", "false");
        launcher.addSparkArg("--num-executors", String.valueOf(yarnConfigs.numExecutors));
    }

    launcher.addSparkArg("--executor-memory", yarnConfigs.executorMemory);
    launcher.addSparkArg("--driver-memory", yarnConfigs.driverMemory);

    if (yarnConfigs.deployMode == DeployMode.CLUSTER && yarnConfigs.waitForCompletion) {
        launcher.setConf("spark.yarn.submit.waitAppCompletion", "true");
    }

    // Default is empty string, so pass only non-empty ones.
    yarnConfigs.noValueArgs.forEach((String arg) -> applyConfIfPresent(arg, launcher::addSparkArg));
    yarnConfigs.args.forEach((String k, String v) -> applyConfIfPresent(k, v, launcher::addSparkArg));

    // For files, no need of removing empty strings, since we verify the file exists in init itself.
    yarnConfigs.additionalFiles.forEach(launcher::addFile);
    yarnConfigs.additionalJars.forEach(launcher::addJar);
    yarnConfigs.pyFiles.forEach(launcher::addPyFile);

    launcher.addAppArgs(getNonEmptyArgs(yarnConfigs.evaluateArgsELs(record)));

    applyConfIfPresent(configs.javaHome, launcher::setJavaHome);
    applyConfIfPresent("spark.yarn.principal", configs.credentialsConfigBean.principal, launcher::setConf);
    applyConfIfPresent("spark.yarn.keytab", configs.credentialsConfigBean.keytab, launcher::setConf);
    applyConfIfPresent("--proxy-user", yarnConfigs.proxyUser, launcher::addSparkArg);
    applyConfIfPresent(configs.sparkHome, launcher::setSparkHome);

    timeout = yarnConfigs.waitTimeout;

    try {
        final SparkAppHandle handle = launcher.startApplication(new AppListener());
        return Optional.ofNullable(handle.getAppId());
    } catch (IOException ex) {
        latch.countDown();
        throw new ApplicationLaunchFailureException(ex);
    } catch (Throwable ex) { // NOSONAR
        latch.countDown();
        throw ex;
    }
}