Example usage for java.io File pathSeparator

List of usage examples for java.io File pathSeparator

Introduction

In this page you can find the example usage for java.io File pathSeparator.

Prototype

String pathSeparator

To view the source code for java.io File pathSeparator.

Click Source Link

Document

The system-dependent path-separator character, represented as a string for convenience.

Usage

From source file:io.fabric8.kubernetes.client.ConfigTest.java

@Test
public void testWithMultipleKubeConfigAndOverrideContext() {
    System.setProperty(Config.KUBERNETES_KUBECONFIG_FILE,
            TEST_KUBECONFIG_FILE + File.pathSeparator + "some-other-file");

    Config config = Config.autoConfigure("production/172-28-128-4:8443/root");
    assertNotNull(config);/*from w  w  w. ja v  a  2s.com*/

    assertEquals("https://172.28.128.4:8443/", config.getMasterUrl());
    assertEquals("production", config.getNamespace());
    assertEquals("supertoken", config.getOauthToken());
    assertTrue(config.getCaCertFile().endsWith("testns/ca.pem".replace("/", File.separator)));
    assertTrue(new File(config.getCaCertFile()).isAbsolute());
}

From source file:net.cliseau.composer.javacor.MissingToolException.java

/**
 * Compiles the unit startup file from Java source to Java bytecode.
 *
 * This compiles the startup file. During this process, multiple class files
 * may be generated even though only a single input file to compile is
 * specified. The reason for this is that classes other than the main class
 * may be defined in the single file and are written to distinct class
 * files. All created files are collected and returned by the method.
 *
 * @param startupFile The file to be compiled.
 * @param startupDependencies Names of classpath entries to use for compilation.
 * @return List of names of created (class) files during compilation.
 * @exception UnitGenerationException Thrown when finding a Java compiler failed.
 *//* w  w  w .jav a2s.  co m*/
private LinkedList<String> compile(final File startupFile, final Collection<String> startupDependencies)
        throws MissingToolException, InvalidConfigurationException {
    // code inspired by the examples at
    //   http://docs.oracle.com/javase/6/docs/api/javax/tools/JavaCompiler.html
    final LinkedList<String> createdFiles = new LinkedList<String>();

    // set the file manager, which records the written files
    JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
    if (compiler == null) {
        throw new MissingToolException("Could not find system Java compiler.");
    }
    StandardJavaFileManager stdFileManager = compiler.getStandardFileManager(null, null, null);
    JavaFileManager fileManager = new ForwardingJavaFileManager<StandardJavaFileManager>(stdFileManager) {
        /**
         * Collect the list of all output (class) files.
         *
         * Besides its side-effect on the createdFiles list of the containing
         * method, this method is functionally equivalent to its superclass
         * version.
         */
        public JavaFileObject getJavaFileForOutput(JavaFileManager.Location location, String className,
                JavaFileObject.Kind kind, FileObject sibling) throws IOException {
            JavaFileObject fileForOutput = super.getJavaFileForOutput(location, className, kind, sibling);
            createdFiles.addLast(fileForOutput.getName());
            return fileForOutput;
        }
    };

    // set the files to compile
    Iterable<? extends JavaFileObject> compilationUnits = stdFileManager
            .getJavaFileObjectsFromFiles(Arrays.asList(startupFile));

    // do the actual compilation
    ArrayList<String> compileParams = new ArrayList<String>(2);

    boolean verbose = org.apache.log4j.Level.DEBUG.isGreaterOrEqual(config.getInstantiationLogLevel());
    if (verbose)
        compileParams.add("-verbose");

    compileParams
            .addAll(Arrays.asList("-classpath", StringUtils.join(startupDependencies, File.pathSeparator)));
    if (!compiler.getTask(null, fileManager, null, compileParams, null, compilationUnits).call()) {
        // could not compile all files without error
        //TODO: throw an exception ... see where to get the required information from
    }
    return createdFiles;
}

From source file:org.codehaus.mojo.axistools.Java2WSDLMojo.java

/**
 * Computes the runtime classpath./*from   w ww .ja va 2  s.c om*/
 * 
 * @return A representation of the computed runtime classpath.
 * @throws MojoExecutionException in case of dependency resolution failure
 */
private String getCompileClasspath() throws MojoExecutionException {
    try {
        // get the union of compile- and runtime classpath elements
        Set dependencySet = new LinkedHashSet();
        dependencySet.addAll(project.getCompileClasspathElements());
        dependencySet.add(classesDirectory.getAbsolutePath());
        String compileClasspath = StringUtils.join(dependencySet, File.pathSeparator);

        return compileClasspath;
    } catch (DependencyResolutionRequiredException e) {
        throw new MojoExecutionException(e.getMessage(), e);
    }
}

From source file:com.bluemarsh.jswat.console.Main.java

/**
 * Process the given command line arguments.
 *
 * @param  args  command line arguments.
 * @throws  ParseException  if argument parsing fails.
 *///from w w w. j  av  a  2 s  . c  o  m
private static void processArguments(String[] args) throws ParseException {
    Options options = new Options();
    // Option: h/help
    OptionBuilder.withDescription(NbBundle.getMessage(Main.class, "MSG_Main_Option_help"));
    OptionBuilder.withLongOpt("help");
    options.addOption(OptionBuilder.create("h"));

    // Option: attach <port>
    OptionBuilder.hasArg();
    OptionBuilder.withArgName("port");
    OptionBuilder.withDescription(NbBundle.getMessage(Main.class, "MSG_Main_Option_attach"));
    options.addOption(OptionBuilder.create("attach"));

    // Option: sourcepath <path>
    OptionBuilder.hasArg();
    OptionBuilder.withArgName("path");
    OptionBuilder.withDescription(NbBundle.getMessage(Main.class, "MSG_Main_Option_sourcepath"));
    options.addOption(OptionBuilder.create("sourcepath"));

    // Option: e/emacs
    OptionBuilder.withDescription(NbBundle.getMessage(Main.class, "MSG_Main_Option_jdb"));
    options.addOption(OptionBuilder.create("jdb"));

    // Parse the command line arguments.
    CommandLineParser parser = new GnuParser();
    CommandLine line = parser.parse(options, args);

    // Interrogate the command line options.
    jdbEmulationMode = line.hasOption("jdb");
    if (line.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java com.bluemarsh.jswat.console.Main", options);
        System.exit(0);
    }
    if (line.hasOption("sourcepath")) {
        Session session = SessionProvider.getCurrentSession();
        PathManager pm = PathProvider.getPathManager(session);
        String path = line.getOptionValue("sourcepath");
        List<String> roots = Strings.stringToList(path, File.pathSeparator);
        pm.setSourcePath(roots);
    }
    if (line.hasOption("attach")) {
        final Session session = SessionProvider.getCurrentSession();
        String port = line.getOptionValue("attach");
        ConnectionFactory factory = ConnectionProvider.getConnectionFactory();
        final JvmConnection connection;
        try {
            connection = factory.createSocket("localhost", port);
            // The actual connection may be made some time from now,
            // so set up a listener to be notified at that time.
            connection.addConnectionListener(new ConnectionListener() {

                @Override
                public void connected(ConnectionEvent event) {
                    if (session.isConnected()) {
                        // The user already connected to something else.
                        JvmConnection c = event.getConnection();
                        c.getVM().dispose();
                        c.disconnect();
                    } else {
                        session.connect(connection);
                    }
                }
            });
            connection.connect();
        } catch (Exception e) {
            logger.log(Level.SEVERE, null, e);
        }
    }
}

From source file:org.zywx.wbpalmstar.widgetone.WidgetOneApplication.java

private void initClassLoader() {
    try {//  www .ja va2 s  .c  om
        pluginJars = getAssets().list(dexJar);

        if (pluginJars != null && pluginJars.length > 0) {

            // create the dexPath

            int PluginCount = pluginJars.length;
            String dexPath = cachePath + File.separator + dexJar;
            StringBuilder sb = new StringBuilder();
            for (int i = 0; i < PluginCount; i++) {
                sb.append(dexPath).append(File.separator).append(pluginJars[i]).append(File.pathSeparator);
            }
            dexPath = sb.toString();

            // create the optPath

            String optPath = cachePath + File.separator + optFile;
            File dirFile = new File(optPath);
            if (!dirFile.exists()) {
                dirFile.mkdirs();
            }
            String libPath = cachePath + File.separator + dexLib;

            // create the dexclassloader
            DexClassLoader dexCl = new DexClassLoader(dexPath, optPath, libPath, getClassLoader());

            // use reflection tech replace the current classloader

            Context mBase = new Smith<Context>(this, "mBase").get();

            Object mPackageInfo = new Smith<Object>(mBase, "mPackageInfo").get();

            Smith<ClassLoader> sClassLoader = new Smith<ClassLoader>(mPackageInfo, "mClassLoader");
            sClassLoader.set(dexCl);

        }

        /*
         * Field mMainThread =
         * Activity.class.getDeclaredField("mMainThread");
         * mMainThread.setAccessible(true); Object mainThread =
         * mMainThread.get((EBrowserActivity) context); Class threadClass =
         * mainThread.getClass(); Field mPackages =
         * threadClass.getDeclaredField("mPackages");
         * mPackages.setAccessible(true); WeakReference<?> ref; Map<String,
         * ?> map = (Map<String, ?>) mPackages.get(mainThread); ref =
         * (WeakReference<?>) map.get(context.getPackageName()); Object apk
         * = ref.get(); Class apkClass = apk.getClass();
         * 
         * Field mClassLoader = apkClass.getDeclaredField("mClassLoader");
         * mClassLoader.setAccessible(true); mClassLoader.set(apk, dexCl);
         */

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.alfresco.util.exec.RuntimeExec.java

/**
 * Set additional runtime properties (environment properties) that will used
 * by the executing process.// w w  w  .j a va 2s  . c  o m
 * <p>
 * Any keys or properties that start and end with <b>${...}</b> will be removed on the assumption
 * that these are unset properties.  <tt>null</tt> values are translated to empty strings.
 * All keys and values are trimmed of leading and trailing whitespace.
 * 
 * @param processProperties     Runtime process properties
 * 
 * @see Runtime#exec(String, String[], java.io.File)
 */
public void setProcessProperties(Map<String, String> processProperties) {
    ArrayList<String> processPropList = new ArrayList<String>(processProperties.size());
    boolean hasPath = false;
    String systemPath = System.getenv("PATH");
    for (Map.Entry<String, String> entry : processProperties.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();
        if (key == null) {
            continue;
        }
        if (value == null) {
            value = "";
        }
        key = key.trim();
        value = value.trim();
        if (key.startsWith(VAR_OPEN) && key.endsWith(VAR_CLOSE)) {
            continue;
        }
        if (value.startsWith(VAR_OPEN) && value.endsWith(VAR_CLOSE)) {
            continue;
        }
        // If a path is specified, prepend it to the existing path
        if (key.equals("PATH")) {
            if (systemPath != null && systemPath.length() > 0) {
                processPropList.add(key + "=" + value + File.pathSeparator + systemPath);
            } else {
                processPropList.add(key + "=" + value);
            }
            hasPath = true;
        } else {
            processPropList.add(key + "=" + value);
        }
    }
    // If a path was not specified, inherit the current one
    if (!hasPath && systemPath != null && systemPath.length() > 0) {
        processPropList.add("PATH=" + systemPath);
    }
    this.processProperties = processPropList.toArray(new String[processPropList.size()]);
}

From source file:it.govpay.core.utils.PspUtils.java

private static byte[] getLogo(String subFolder, String codPsp) {
    try {//from w  ww  .ja v  a  2s  .com
        Collection<File> logos = FileUtils.listFiles(
                new File(GovpayConfig.getInstance().getLogoDir() + File.pathSeparator + subFolder),
                new PrefixFileFilter(codPsp), null);
        File logo = logos.iterator().next();
        return IOUtils.toByteArray(new FileInputStream(logo));
    } catch (Throwable t) {
        return logoPsp;
    }
}

From source file:org.apache.hive.spark.client.SparkClientImpl.java

private Thread startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    Runnable runnable;//w w w  . j  a  v  a2 s.  c o  m
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    if (conf.containsKey(SparkClientFactory.CONF_KEY_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        // when invoked in-process it inherits the environment variables of the parent
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        runnable = new Runnable() {
            @Override
            public void run() {
                List<String> args = Lists.newArrayList();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);

                for (Map.Entry<String, String> e : conf.entrySet()) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        String sparkHome = Strings.emptyToNull(conf.get(SPARK_HOME_KEY));
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getenv(SPARK_HOME_ENV));
        }
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getProperty(SPARK_HOME_KEY));
        }
        String sparkLogDir = conf.get("hive.spark.log.dir");
        if (sparkLogDir == null) {
            if (sparkHome == null) {
                sparkLogDir = "./target/";
            } else {
                sparkLogDir = sparkHome + "/logs/";
            }
        }

        String osxTestOpts = "";
        if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
            osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
        }

        String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(DRIVER_OPTS_KEY));
        String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
                osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

        // Create a file with all the job properties to be read by spark-submit. Change the
        // file's permissions so that only the owner can read it. This avoid having the
        // connection secret show up in the child process's command line.
        File properties = File.createTempFile("spark-submit.", ".properties");
        if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
            throw new IOException("Cannot change permissions of job properties file.");
        }
        properties.deleteOnExit();

        Properties allProps = new Properties();
        // first load the defaults from spark-defaults.conf if available
        try {
            URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                    .getResource("spark-defaults.conf");
            if (sparkDefaultsUrl != null) {
                LOG.info("Loading spark defaults: " + sparkDefaultsUrl);
                allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
            }
        } catch (Exception e) {
            String msg = "Exception trying to load spark-defaults.conf: " + e;
            throw new IOException(msg, e);
        }
        // then load the SparkClientImpl config
        for (Map.Entry<String, String> e : conf.entrySet()) {
            allProps.put(e.getKey(), conf.get(e.getKey()));
        }
        allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
        allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
        allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
        allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

        String isTesting = conf.get("spark.testing");
        if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
            String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
            if (!hiveHadoopTestClasspath.isEmpty()) {
                String extraDriverClasspath = Strings
                        .nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
                if (extraDriverClasspath.isEmpty()) {
                    allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator)
                            ? extraDriverClasspath
                            : extraDriverClasspath + File.pathSeparator;
                    allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
                }

                String extraExecutorClasspath = Strings
                        .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
                if (extraExecutorClasspath.isEmpty()) {
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                            ? extraExecutorClasspath
                            : extraExecutorClasspath + File.pathSeparator;
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
                }
            }
        }

        Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
        try {
            allProps.store(writer, "Spark Context configuration");
        } finally {
            writer.close();
        }

        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        String deployMode = conf.get("spark.submit.deployMode");

        List<String> argv = Lists.newLinkedList();

        if (sparkHome != null) {
            argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
        } else {
            LOG.info("No spark.home provided, calling SparkSubmit directly.");
            argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath());

            if (master.startsWith("local") || master.startsWith("mesos")
                    || SparkClientUtilities.isYarnClientMode(master, deployMode)
                    || master.startsWith("spark")) {
                String mem = conf.get("spark.driver.memory");
                if (mem != null) {
                    argv.add("-Xms" + mem);
                    argv.add("-Xmx" + mem);
                }

                String cp = conf.get("spark.driver.extraClassPath");
                if (cp != null) {
                    argv.add("-classpath");
                    argv.add(cp);
                }

                String libPath = conf.get("spark.driver.extraLibPath");
                if (libPath != null) {
                    argv.add("-Djava.library.path=" + libPath);
                }

                String extra = conf.get(DRIVER_OPTS_KEY);
                if (extra != null) {
                    for (String opt : extra.split("[ ]")) {
                        if (!opt.trim().isEmpty()) {
                            argv.add(opt.trim());
                        }
                    }
                }
            }

            argv.add("org.apache.spark.deploy.SparkSubmit");
        }

        if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
            String executorCores = conf.get("spark.executor.cores");
            if (executorCores != null) {
                argv.add("--executor-cores");
                argv.add(executorCores);
            }

            String executorMemory = conf.get("spark.executor.memory");
            if (executorMemory != null) {
                argv.add("--executor-memory");
                argv.add(executorMemory);
            }

            String numOfExecutors = conf.get("spark.executor.instances");
            if (numOfExecutors != null) {
                argv.add("--num-executors");
                argv.add(numOfExecutors);
            }
        }
        // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
        // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
        // delegation token renewal, but not both. Since doAs is a more common case, if both
        // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
        // otherwise, we pass the principal/keypad to spark to support the token renewal for
        // long-running application.
        if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
            String principal = SecurityUtil
                    .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
            String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
            if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
                if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
                    List<String> kinitArgv = Lists.newLinkedList();
                    kinitArgv.add("kinit");
                    kinitArgv.add(principal);
                    kinitArgv.add("-k");
                    kinitArgv.add("-t");
                    kinitArgv.add(keyTabFile + ";");
                    kinitArgv.addAll(argv);
                    argv = kinitArgv;
                } else {
                    // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to
                    // support the possible delegation token renewal in Spark
                    argv.add("--principal");
                    argv.add(principal);
                    argv.add("--keytab");
                    argv.add(keyTabFile);
                }
            }
        }
        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            try {
                String currentUser = Utils.getUGI().getShortUserName();
                // do not do impersonation in CLI mode
                if (!currentUser.equals(System.getProperty("user.name"))) {
                    LOG.info("Attempting impersonation of " + currentUser);
                    argv.add("--proxy-user");
                    argv.add(currentUser);
                }
            } catch (Exception e) {
                String msg = "Cannot obtain username: " + e;
                throw new IllegalStateException(msg, e);
            }
        }

        argv.add("--properties-file");
        argv.add(properties.getAbsolutePath());
        argv.add("--class");
        argv.add(RemoteDriver.class.getName());

        String jar = "spark-internal";
        if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
            jar = SparkContext.jarOfClass(this.getClass()).get();
        }
        argv.add(jar);

        argv.add("--remote-host");
        argv.add(serverAddress);
        argv.add("--remote-port");
        argv.add(serverPort);

        //hive.spark.* keys are passed down to the RemoteDriver via --conf,
        //as --properties-file contains the spark.* keys that are meant for SparkConf object.
        for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
            String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
            argv.add("--conf");
            argv.add(String.format("%s=%s", hiveSparkConfKey, value));
        }

        String cmd = Joiner.on(" ").join(argv);
        LOG.info("Running client driver with argv: {}", cmd);
        ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd);

        // Prevent hive configurations from being visible in Spark.
        pb.environment().remove("HIVE_HOME");
        pb.environment().remove("HIVE_CONF_DIR");
        // Add credential provider password to the child process's environment
        // In case of Spark the credential provider location is provided in the jobConf when the job is submitted
        String password = getSparkJobCredentialProviderPassword();
        if (password != null) {
            pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password);
        }
        if (isTesting != null) {
            pb.environment().put("SPARK_TESTING", isTesting);
        }

        final Process child = pb.start();
        String threadName = Thread.currentThread().getName();
        final List<String> childErrorLog = Collections.synchronizedList(new ArrayList<String>());
        redirect("RemoteDriver-stdout-redir-" + threadName, new Redirector(child.getInputStream()));
        redirect("RemoteDriver-stderr-redir-" + threadName,
                new Redirector(child.getErrorStream(), childErrorLog));

        runnable = new Runnable() {
            @Override
            public void run() {
                try {
                    int exitCode = child.waitFor();
                    if (exitCode != 0) {
                        StringBuilder errStr = new StringBuilder();
                        synchronized (childErrorLog) {
                            Iterator iter = childErrorLog.iterator();
                            while (iter.hasNext()) {
                                errStr.append(iter.next());
                                errStr.append('\n');
                            }
                        }

                        LOG.warn("Child process exited with code {}", exitCode);
                        rpcServer.cancelClient(clientId,
                                "Child process (spark-submit) exited before connecting back with error log "
                                        + errStr.toString());
                    }
                } catch (InterruptedException ie) {
                    LOG.warn(
                            "Thread waiting on the child process (spark-submit) is interrupted, killing the child process.");
                    rpcServer.cancelClient(clientId,
                            "Thread waiting on the child porcess (spark-submit) is interrupted");
                    Thread.interrupted();
                    child.destroy();
                } catch (Exception e) {
                    String errMsg = "Exception while waiting for child process (spark-submit)";
                    LOG.warn(errMsg, e);
                    rpcServer.cancelClient(clientId, errMsg);
                }
            }
        };
    }

    Thread thread = new Thread(runnable);
    thread.setDaemon(true);
    thread.setName("Driver");
    thread.start();
    return thread;
}

From source file:org.codehaus.mojo.jspc.CompilationMojoSupport.java

/**
 * Figure out where the tools.jar file lives.
 *//*from w w  w  . j  a  v  a 2 s  .  c om*/
private URL findToolsJar() throws MojoExecutionException {
    final File javaHome = FileUtils.resolveFile(new File(File.pathSeparator), System.getProperty("java.home"));

    final List<File> toolsPaths = new ArrayList<File>();

    File file = null;
    if (SystemUtils.IS_OS_MAC_OSX) {
        file = FileUtils.resolveFile(javaHome, "../Classes/classes.jar");
        toolsPaths.add(file);
    }
    if (file == null || !file.exists()) {
        file = FileUtils.resolveFile(javaHome, "../lib/tools.jar");
        toolsPaths.add(file);
    }

    if (!file.exists()) {
        throw new MojoExecutionException(
                "Could not find tools.jar at " + toolsPaths + " under java.home: " + javaHome);
    }
    getLog().debug("Using tools.jar: " + file);

    final URI fileUri = file.toURI();
    try {
        return fileUri.toURL();
    } catch (MalformedURLException e) {
        throw new MojoExecutionException("Could not generate URL from URI: " + fileUri, e);
    }
}

From source file:gov.nih.nci.sdk.example.generator.WebServiceGenerator.java

private void compileWebServiceInterface() {
    java.util.Set<String> processedFocusDomainSet = (java.util.Set<String>) getScriptContext().getMemory()
            .get("processedFocusDomainSet");

    if (processedFocusDomainSet == null) {
        processedFocusDomainSet = new java.util.HashSet<String>();
        getScriptContext().getMemory().put("processedFocusDomainSet", processedFocusDomainSet);
    }// w ww  . j  a v a 2s.c  o  m

    processedFocusDomainSet.add(getScriptContext().getFocusDomain());

    if (processedFocusDomainSet.containsAll(getScriptContext().retrieveDomainSet()) == true) { //All domains have been processed so now we can compile and generate WSDL

        StandardJavaFileManager fileManager = null;

        try {
            String jaxbPojoPath = GeneratorUtil.getJaxbPojoPath(getScriptContext());
            String servicePath = GeneratorUtil.getServicePath(getScriptContext());
            String serviceImplPath = GeneratorUtil.getServiceImplPath(getScriptContext());
            String projectRoot = getScriptContext().getProperties().getProperty("PROJECT_ROOT");

            List<String> compilerFiles = GeneratorUtil.getFiles(jaxbPojoPath, new String[] { "java" });
            compilerFiles.addAll(GeneratorUtil.getFiles(servicePath, new String[] { "java" }));
            compilerFiles.addAll(GeneratorUtil.getFiles(serviceImplPath, new String[] { "java" }));

            getScriptContext().logInfo("Compiling files: " + compilerFiles);
            // Check if output directory exist, create it
            GeneratorUtil.createOutputDir(projectRoot + File.separator + "classes");

            List<String> options = new ArrayList<String>();
            options.add("-classpath");
            String classPathStr = GeneratorUtil
                    .getFiles(new java.io.File(getScriptContext().getGeneratorBase()).getAbsolutePath()
                            + File.separator + "lib", new String[] { "jar" }, File.pathSeparator)
                    + File.pathSeparator
                    + new java.io.File(projectRoot + File.separatorChar + "classes").getAbsolutePath();

            getScriptContext().logInfo("compiler classpath is: " + classPathStr);

            options.add(classPathStr);

            options.add("-d");
            options.add(projectRoot + File.separator + "classes");

            options.add("-s");
            options.add(projectRoot + File.separator + "src/generated");

            JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
            DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>();
            fileManager = compiler.getStandardFileManager(diagnostics, null, null);
            Iterable<? extends JavaFileObject> compilationUnits = fileManager
                    .getJavaFileObjectsFromStrings(compilerFiles);
            JavaCompiler.CompilationTask task = compiler.getTask(null, fileManager, diagnostics, options, null,
                    compilationUnits);
            boolean success = task.call();

            for (Diagnostic diagnostic : diagnostics.getDiagnostics()) {
                getScriptContext().logInfo(diagnostic.getCode());
                getScriptContext().logInfo(diagnostic.getKind().toString());
                getScriptContext().logInfo(diagnostic.getPosition() + "");
                getScriptContext().logInfo(diagnostic.getStartPosition() + "");
                getScriptContext().logInfo(diagnostic.getEndPosition() + "");
                getScriptContext().logInfo(diagnostic.getSource().toString());
                getScriptContext().logInfo(diagnostic.getMessage(null));
            }
        } catch (Throwable t) {
            getScriptContext().logError(t);
        } finally {
            try {
                fileManager.close();
            } catch (Throwable t) {
            }
        }

        for (String focusDomain : getScriptContext().retrieveDomainSet()) {
            generateWebServiceArtifacts(focusDomain);
        }
    }
}