Example usage for org.apache.spark.launcher SparkLauncher setPropertiesFile

List of usage examples for org.apache.spark.launcher SparkLauncher setPropertiesFile

Introduction

In this page you can find the example usage for org.apache.spark.launcher SparkLauncher setPropertiesFile.

Prototype

@Override
    public SparkLauncher setPropertiesFile(String path) 

Source Link

Usage

From source file:com.cloudera.livy.client.local.ContextLauncher.java

License:Apache License

private static ChildProcess startDriver(final RpcServer rpcServer, final LocalConf conf, final String clientId,
        final String secret, final String className) throws IOException {
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());
    if (conf.get(CLIENT_IN_PROCESS) != null) {
        // Mostly for testing things quickly. Do not do this in production.
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        Runnable child = new Runnable() {
            @Override//  ww  w. ja  v  a 2s  .c om
            public void run() {
                List<String> args = new ArrayList<>();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);

                for (Map.Entry<String, String> e : conf) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), e.getValue()));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
        return new ChildProcess(conf, child);
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        final SparkLauncher launcher = new SparkLauncher();
        String sparkHome = conf.get(SPARK_HOME_KEY);
        if (sparkHome == null) {
            sparkHome = System.getenv(SPARK_HOME_ENV);
        }
        if (sparkHome == null) {
            sparkHome = System.getProperty(SPARK_HOME_KEY);
        }
        launcher.setSparkHome(sparkHome);

        conf.set(CLIENT_ID, clientId);
        conf.set(CLIENT_SECRET, secret);

        launcher.setAppResource("spark-internal");

        String livyJars = conf.get(LIVY_JARS);
        if (livyJars == null) {
            String livyHome = System.getenv("LIVY_HOME");
            Preconditions.checkState(livyHome != null, "Need one of LIVY_HOME or %s set.", LIVY_JARS.key());
            File clientJars = new File(livyHome, "client-jars");
            Preconditions.checkState(clientJars.isDirectory(),
                    "Cannot find 'client-jars' directory under LIVY_HOME.");
            List<String> jars = new ArrayList<>();
            for (File f : clientJars.listFiles()) {
                jars.add(f.getAbsolutePath());
            }
            livyJars = Joiner.on(",").join(jars);
        }

        String userJars = conf.get(SPARK_JARS_KEY);
        if (userJars != null) {
            String allJars = Joiner.on(",").join(livyJars, userJars);
            conf.set(SPARK_JARS_KEY, allJars);
        } else {
            conf.set(SPARK_JARS_KEY, livyJars);
        }

        // Disable multiple attempts since the RPC server doesn't yet support multiple
        // connections for the same registered app.
        conf.set("spark.yarn.maxAppAttempts", "1");

        File confFile = writeConfToFile(conf);

        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        launcher.setMaster(master);
        launcher.setPropertiesFile(confFile.getAbsolutePath());
        launcher.setMainClass(className);
        if (conf.get(PROXY_USER) != null) {
            launcher.addSparkArg("--proxy-user", conf.get(PROXY_USER));
        }
        launcher.addAppArgs("--remote-host", serverAddress);
        launcher.addAppArgs("--remote-port", serverPort);
        return new ChildProcess(conf, launcher.launch());
    }
}

From source file:com.cloudera.livy.rsc.ContextLauncher.java

License:Apache License

private static ChildProcess startDriver(final RSCConf conf, Promise<?> promise) throws IOException {
    String livyJars = conf.get(LIVY_JARS);
    if (livyJars == null) {
        String livyHome = System.getenv("LIVY_HOME");
        Utils.checkState(livyHome != null, "Need one of LIVY_HOME or %s set.", LIVY_JARS.key());
        File rscJars = new File(livyHome, "rsc-jars");
        if (!rscJars.isDirectory()) {
            rscJars = new File(livyHome, "rsc/target/jars");
        }// www. j  a  v a  2s.  c o  m
        Utils.checkState(rscJars.isDirectory(), "Cannot find 'client-jars' directory under LIVY_HOME.");
        List<String> jars = new ArrayList<>();
        for (File f : rscJars.listFiles()) {
            jars.add(f.getAbsolutePath());
        }
        livyJars = Utils.join(jars, ",");
    }
    merge(conf, SPARK_JARS_KEY, livyJars, ",");

    String kind = conf.get(SESSION_KIND);
    if ("sparkr".equals(kind)) {
        merge(conf, SPARK_ARCHIVES_KEY, conf.get(RSCConf.Entry.SPARKR_PACKAGE), ",");
    } else if ("pyspark".equals(kind)) {
        merge(conf, "spark.submit.pyFiles", conf.get(RSCConf.Entry.PYSPARK_ARCHIVES), ",");
    }

    // Disable multiple attempts since the RPC server doesn't yet support multiple
    // connections for the same registered app.
    conf.set("spark.yarn.maxAppAttempts", "1");

    // Let the launcher go away when launcher in yarn cluster mode. This avoids keeping lots
    // of "small" Java processes lingering on the Livy server node.
    conf.set("spark.yarn.submit.waitAppCompletion", "false");

    // For testing; propagate jacoco settings so that we also do coverage analysis
    // on the launched driver. We replace the name of the main file ("main.exec")
    // so that we don't end up fighting with the main test launcher.
    String jacocoArgs = System.getProperty("jacoco.args");
    if (jacocoArgs != null) {
        jacocoArgs = jacocoArgs.replace("main.exec", "child.exec");
        merge(conf, SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, jacocoArgs, " ");
    }

    final File confFile = writeConfToFile(conf);

    if (conf.getBoolean(CLIENT_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        Runnable child = new Runnable() {
            @Override
            public void run() {
                try {
                    RSCDriverBootstrapper.main(new String[] { confFile.getAbsolutePath() });
                } catch (Exception e) {
                    throw Utils.propagate(e);
                }
            }
        };
        return new ChildProcess(conf, promise, child, confFile);
    } else {
        final SparkLauncher launcher = new SparkLauncher();
        launcher.setSparkHome(System.getenv(SPARK_HOME_ENV));
        launcher.setAppResource("spark-internal");
        launcher.setPropertiesFile(confFile.getAbsolutePath());
        launcher.setMainClass(RSCDriverBootstrapper.class.getName());

        if (conf.get(PROXY_USER) != null) {
            launcher.addSparkArg("--proxy-user", conf.get(PROXY_USER));
        }

        return new ChildProcess(conf, promise, launcher.launch(), confFile);
    }
}

From source file:com.uber.hoodie.cli.utils.SparkUtil.java

License:Apache License

/**
 * TODO: Need to fix a bunch of hardcoded stuff here eg: history server, spark distro
 *//*w w  w. j  a v a2  s.  c  om*/
public static SparkLauncher initLauncher(String propertiesFile) throws URISyntaxException {
    String currentJar = new File(
            SparkUtil.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath())
                    .getAbsolutePath();
    SparkLauncher sparkLauncher = new SparkLauncher().setAppResource(currentJar)
            .setMainClass(SparkMain.class.getName());

    if (StringUtils.isNotEmpty(propertiesFile)) {
        sparkLauncher.setPropertiesFile(propertiesFile);
    }

    File libDirectory = new File(new File(currentJar).getParent(), "lib");
    for (String library : libDirectory.list()) {
        sparkLauncher.addJar(new File(libDirectory, library).getAbsolutePath());
    }
    return sparkLauncher;
}

From source file:org.apache.eagle.app.environment.impl.SparkExecutionRuntime.java

License:Apache License

private SparkLauncher prepareSparkConfig(Config config) {
    String master = config.hasPath(TOPOLOGY_MASTER) ? config.getString(TOPOLOGY_MASTER) : "local[*]";
    String sparkExecutorCores = config.getString(SPARK_EXECUTOR_CORES);
    String sparkExecutorMemory = config.getString(SPARK_EXECUTOR_MEMORY);
    String driverMemory = config.getString(DRIVER_MEMORY);
    String driverCore = config.getString(DRIVER_CORES);
    String deployMode = config.getString(DEPLOY_MODE);
    String enable = config.getString(TOPOLOGY_DYNAMICALLOCATION);
    boolean verbose = config.getBoolean(TOPOLOGY_VERBOSE);
    String mainClass = config.getString(TOPOLOGY_MAINCLASS);
    String sparkHome = config.getString(TOPOLOGY_SPARKHOME);
    String uiport = config.getString(TOPOLOGY_SPARKUIPORT);
    String appResource = config.getString(TOPOLOGY_APPRESOURCE);
    String yarnqueue = config.getString(TOPOLOGY_YARNQUEUE);

    SparkLauncher sparkLauncher = new SparkLauncher();
    sparkLauncher.setMaster(master);/*from  ww  w  .  j  a v  a  2  s . com*/
    sparkLauncher.setMainClass(mainClass);
    sparkLauncher.setSparkHome(sparkHome);
    //sparkLauncher.setJavaHome(TOPOLOGY_JAVAHOME);
    sparkLauncher.setDeployMode(deployMode);
    sparkLauncher.setVerbose(verbose);
    sparkLauncher.setAppResource(appResource);
    sparkLauncher.setAppName(config.getString(TOPOLOGY_NAME));
    sparkLauncher.setConf("spark.yarn.queue", yarnqueue);
    sparkLauncher.setConf("spark.executor.cores", sparkExecutorCores);
    sparkLauncher.setConf("spark.executor.memory", sparkExecutorMemory);
    sparkLauncher.setConf("spark.driver.memory", driverMemory);
    sparkLauncher.setConf("spark.driver.cores", driverCore);
    sparkLauncher.setConf("spark.streaming.dynamicAllocation.enable", enable);
    sparkLauncher.setConf("spark.ui.port", uiport);
    String path = config.getString(TOPOLOGY_SPARKCONFFILEPATH);
    if (StringUtil.isNotBlank(path)) {
        sparkLauncher.setPropertiesFile(path);
    }

    String batchDuration = config.getString(BATCH_DURATION);
    String routerTasknum = config.getString(ROUTER_TASK_NUM);
    String alertTasknum = config.getString(ALERT_TASK_NUM);
    String publishTasknum = config.getString(PUBLISH_TASK_NUM);
    String slideDurationsecond = config.getString(SLIDE_DURATION_SECOND);
    String windowDurationssecond = config.getString(WINDOW_DURATIONS_SECOND);
    String checkpointPath = config.getString(CHECKPOINT_PATH);
    String topologyGroupid = config.getString(TOPOLOGY_GROUPID);
    String autoOffsetReset = config.getString(AUTO_OFFSET_RESET);
    String restApihost = config.getString(EAGLE_CORRELATION_SERVICE_HOST);
    String restApiport = config.getString(EAGLE_CORRELATION_SERVICE_PORT);
    String restApicontext = config.getString(EAGLE_CORRELATION_CONTEXT);
    String useMultiKafka = config.getString(TOPOLOGY_MULTIKAFKA);
    String kafkaBrokerZkQuorum = config.getString(SPOUT_KAFKABROKERZKQUORUM);
    String zkConfigzkQuorum = config.getString(ZKCONFIG_ZKQUORUM);

    sparkLauncher.addAppArgs(batchDuration, routerTasknum, alertTasknum, publishTasknum, slideDurationsecond,
            windowDurationssecond, checkpointPath, topologyGroupid, autoOffsetReset, restApicontext,
            restApiport, restApihost, useMultiKafka, kafkaBrokerZkQuorum, zkConfigzkQuorum);
    return sparkLauncher;
}

From source file:org.cripac.isee.vpe.ctrl.SystemPropertyCenter.java

License:Open Source License

SparkLauncher GetSparkLauncher(String appName) throws IOException, NoAppSpecifiedException {
    SparkLauncher launcher = new SparkLauncher().setAppResource(jarPath)
            .setMainClass(AppManager.getMainClassName(appName)).setMaster(sparkMaster).setAppName(appName)
            .setVerbose(verbose).addFile(ConfManager.getConcatCfgFilePathList(","))
            .setConf(SparkLauncher.DRIVER_MEMORY, driverMem).setConf(SparkLauncher.EXECUTOR_MEMORY, executorMem)
            .setConf(SparkLauncher.CHILD_PROCESS_LOGGER_NAME, appName)
            .setConf(SparkLauncher.EXECUTOR_CORES, "" + executorCores)
            .setConf("spark.driver.extraJavaOptions", "-Dlog4j.configuration=log4j.properties")
            .setConf("spark.executor.extraJavaOptions", "-Dlog4j.configuration=log4j.properties")
            .setConf("spark.yarn.am.nodeLabelExpression", yarnAmNodeLabelExpression)
            .addSparkArg("--driver-cores", "" + driverCores).addSparkArg("--num-executors", "" + numExecutors)
            .addSparkArg("--total-executor-cores", "" + totalExecutorCores).addSparkArg("--queue", hadoopQueue)
            .addAppArgs(getArgs());//  ww  w .j av  a 2 s  .co  m
    if (sparkConfFilePath != null) {
        if (new File(sparkConfFilePath).exists()) {
            launcher = launcher.setPropertiesFile(sparkConfFilePath);
        } else {
            logger.warn("Spark configuration file " + sparkConfFilePath + " does not exist!");
        }
    }
    if (log4jPropFilePath != null) {
        if (new File(log4jPropFilePath).exists()) {
            launcher = launcher.addFile(log4jPropFilePath);
        } else {
            logger.warn("Loj4j configuration file " + log4jPropFilePath + " does not exist!");
        }
    }
    if (sysPropFilePath != null) {
        if (new File(sysPropFilePath).exists()) {
            launcher = launcher.addFile(sysPropFilePath);
        } else {
            logger.warn("System configuration file " + sysPropFilePath + " does not exist!");
        }
        launcher = launcher.addFile(sysPropFilePath);
    }
    if (appPropFilePath != null) {
        if (new File(appPropFilePath).exists()) {
            launcher = launcher.addFile(appPropFilePath);
        } else {
            logger.warn("App configuration file " + appPropFilePath + " does not exist!");
        }
    }
    return launcher;
}