Example usage for java.io File pathSeparator

List of usage examples for java.io File pathSeparator

Introduction

In this page you can find the example usage for java.io File pathSeparator.

Prototype

String pathSeparator

To view the source code for java.io File pathSeparator.

Click Source Link

Document

The system-dependent path-separator character, represented as a string for convenience.

Usage

From source file:jeplus.RadianceWinTools.java

/**
 * Call a sequence of DaySim programs to run the simulation
 * @param config Radiance Configuration//  www.  ja  v  a 2 s.  c  o  m
 * @param WorkDir The working directory where the input files are stored and the output files to be generated
 * @param model
 * @param in
 * @param out
 * @param err
 * @param process
 * @return the result code represents the state of execution steps. >=0 means successful
 */
public static int runDaySim(RadianceConfig config, String WorkDir, String model, String in, String out,
        String err, ProcessWrapper process) {

    int ExitValue = -99;

    // Manipulate header file
    HashMap<String, String> props = new HashMap<>();
    // props.put("project_name", "");
    props.put("project_directory", "./");
    props.put("bin_directory", config.getResolvedDaySimBinDir());
    props.put("tmp_directory", "./");
    props.put("Template_File", config.getResolvedDaySimBinDir() + "../template/DefaultTemplate.htm");
    props.put("sensor_file", in);
    try {
        FileUtils.moveFile(new File(WorkDir + File.separator + model),
                new File(WorkDir + File.separator + model + ".ori"));
    } catch (IOException ex) {
        logger.error("Error renaming header file to " + WorkDir + File.separator + model + ".ori", ex);
    }
    DaySimModel.updateHeaderFile(WorkDir + File.separator + model + ".ori", WorkDir + File.separator + model,
            props);

    // Run gen_dc command
    try {
        StringBuilder buf = new StringBuilder(config.getResolvedDaySimBinDir());
        buf.append(File.separator).append("gen_dc");

        List<String> command = new ArrayList<>();
        command.add(buf.toString());
        command.add(model);
        ProcessBuilder builder = new ProcessBuilder(command);
        builder.directory(new File(WorkDir));
        builder.environment().put("RAYPATH", "." + File.pathSeparator + config.getResolvedDaySimLibDir());
        builder.redirectError(new File(WorkDir + File.separator + err));
        builder.redirectOutput(new File(WorkDir + File.separator + out));
        if (in != null) {
            builder.redirectInput(new File(WorkDir + File.separator + in));
        }
        Process proc = builder.start();
        if (process != null) {
            process.setWrappedProc(proc);
        }
        ExitValue = proc.waitFor();
    } catch (IOException | InterruptedException ex) {
        logger.error("Error occoured when executing gen_dc", ex);
    }

    // Run ds_illum command
    try {
        StringBuilder buf = new StringBuilder(config.getResolvedDaySimBinDir());
        buf.append(File.separator).append("ds_illum");

        List<String> command = new ArrayList<>();
        command.add(buf.toString());
        command.add(model);
        ProcessBuilder builder = new ProcessBuilder(command);
        builder.directory(new File(WorkDir));
        builder.environment().put("RAYPATH", "." + File.pathSeparator + config.getResolvedDaySimLibDir());
        builder.redirectError(ProcessBuilder.Redirect.appendTo(new File(WorkDir + File.separator + err)));
        builder.redirectOutput(ProcessBuilder.Redirect.appendTo(new File(WorkDir + File.separator + out)));
        if (in != null) {
            builder.redirectInput(new File(WorkDir + File.separator + in));
        }
        Process proc = builder.start();
        if (process != null) {
            process.setWrappedProc(proc);
        }
        ExitValue = proc.waitFor();
    } catch (IOException | InterruptedException ex) {
        logger.error("Error occoured when executing ds_illum", ex);
    }

    // Run ds_el_lighting command
    try {
        StringBuilder buf = new StringBuilder(config.getResolvedDaySimBinDir());
        buf.append(File.separator).append("ds_el_lighting");

        List<String> command = new ArrayList<>();
        command.add(buf.toString());
        command.add(model);
        ProcessBuilder builder = new ProcessBuilder(command);
        builder.directory(new File(WorkDir));
        builder.environment().put("RAYPATH", "." + File.pathSeparator + config.getResolvedDaySimLibDir());
        builder.redirectError(ProcessBuilder.Redirect.appendTo(new File(WorkDir + File.separator + err)));
        builder.redirectOutput(ProcessBuilder.Redirect.appendTo(new File(WorkDir + File.separator + out)));
        if (in != null) {
            builder.redirectInput(new File(WorkDir + File.separator + in));
        }
        Process proc = builder.start();
        ExitValue = proc.waitFor();
    } catch (IOException | InterruptedException ex) {
        logger.error("Error occoured when executing ds_el_lighting", ex);
    }

    // Return Radiance exit value
    return ExitValue;
}

From source file:org.apache.maven.plugin.javadoc.JavadocUtilTest.java

/**
 * Method to test unifyPathSeparator()//  ww  w  .  j  ava2  s  .  com
 *
 * @throws Exception if any
 */
public void testUnifyPathSeparator() throws Exception {
    assertEquals(null, JavadocUtil.unifyPathSeparator(null));

    final String ps = File.pathSeparator;

    // Windows
    String path1 = "C:\\maven-javadoc-plugin\\src\\main\\java";
    String path2 = "C:\\maven-javadoc-plugin\\src\\main\\javadoc";
    assertEquals(path1 + ps + path2, JavadocUtil.unifyPathSeparator(path1 + ";" + path2));
    assertEquals(path1 + ps + path2, JavadocUtil.unifyPathSeparator(path1 + ":" + path2));

    path1 = "C:/maven-javadoc-plugin/src/main/java";
    path2 = "C:/maven-javadoc-plugin/src/main/javadoc";
    assertEquals(path1 + ps + path2, JavadocUtil.unifyPathSeparator(path1 + ";" + path2));
    assertEquals(path1 + ps + path2, JavadocUtil.unifyPathSeparator(path1 + ":" + path2));
    assertEquals(path1 + ps + path2 + ps + path1 + ps + path2,
            JavadocUtil.unifyPathSeparator(path1 + ";" + path2 + ";" + path1 + ":" + path2));

    // Unix
    path1 = "/tmp/maven-javadoc-plugin/src/main/java";
    path2 = "/tmp/maven-javadoc-plugin/src/main/javadoc";
    assertEquals(path1 + ps + path2, JavadocUtil.unifyPathSeparator(path1 + ";" + path2));
    assertEquals(path1 + ps + path2, JavadocUtil.unifyPathSeparator(path1 + ":" + path2));
    assertEquals(path1 + ps + path2 + ps + path1 + ps + path2,
            JavadocUtil.unifyPathSeparator(path1 + ";" + path2 + ":" + path1 + ":" + path2));
}

From source file:org.codehaus.mojo.cassandra.AbstractCassandraMojo.java

/**
 * Create a {@link CommandLine} to launch Java.
 *
 * @return a {@link CommandLine} to launch Java.
 *//*  w w  w .j  ava  2s .c  o  m*/
protected CommandLine newJavaCommandLine() {
    String exec = null;
    Toolchain tc = getToolchain();

    // if the file doesn't exist & toolchain is null, java is probably in the PATH...
    // we should probably also test for isFile and canExecute, but the second one is only
    // available in SDK 6.
    if (tc != null) {
        getLog().info("Toolchain in cassandra-maven-plugin: " + tc);
        exec = tc.findTool("java");
    } else {
        if (OS.isFamilyWindows()) {
            String ex = "java.exe";
            // now try to figure the path from PATH, PATHEXT env vars
            // if bat file, wrap in cmd /c
            String path = System.getenv("PATH");
            if (path != null) {
                for (String elem : StringUtils.split(path, File.pathSeparator)) {
                    File f = new File(new File(elem), ex);
                    if (f.exists()) {
                        exec = ex;
                        break;
                    }
                }
            }
        }
    }

    if (exec == null) {
        exec = "java";
    }

    return new CommandLine(exec);
}

From source file:org.apache.qpid.test.utils.QpidBrokerTestCase.java

public void startBroker(int port, TestBrokerConfiguration testConfiguration, XMLConfiguration virtualHosts, boolean managementMode) throws Exception
{
    port = getPort(port);/*w  w  w  .  j  av a2 s  . c  om*/
    String testConfig = saveTestConfiguration(port, testConfiguration);
    String virtualHostsConfig = saveTestVirtualhosts(port, virtualHosts);

    if(_brokers.get(port) != null)
    {
        throw new IllegalStateException("There is already an existing broker running on port " + port);
    }

    Set<Integer> portsUsedByBroker = guessAllPortsUsedByBroker(port);

    if (_brokerType.equals(BrokerType.INTERNAL) && !existingInternalBroker())
    {
        _logger.info("Set test.virtualhosts property to: " + virtualHostsConfig);
        setSystemProperty(TEST_VIRTUALHOSTS, virtualHostsConfig);
        setSystemProperty(BrokerProperties.PROPERTY_USE_CUSTOM_RMI_SOCKET_FACTORY, "false");
        BrokerOptions options = new BrokerOptions();

        options.setConfigurationStoreType(_brokerStoreType);
        options.setConfigurationStoreLocation(testConfig);
        options.setManagementMode(managementMode);

        //Set the log config file, relying on the log4j.configuration system property
        //set on the JVM by the JUnit runner task in module.xml.
        options.setLogConfigFile(_logConfigFile.getAbsolutePath());

        Broker broker = new Broker();
        _logger.info("Starting internal broker (same JVM)");
        broker.startup(options);

        _brokers.put(port, new InternalBrokerHolder(broker, System.getProperty("QPID_WORK"), portsUsedByBroker));
    }
    else if (!_brokerType.equals(BrokerType.EXTERNAL))
    {
        // Add the port to QPID_WORK to ensure unique working dirs for multi broker tests
        final String qpidWork = getQpidWork(_brokerType, port);

        String[] cmd = _brokerCommandHelper.getBrokerCommand(port, testConfig, _brokerStoreType, _logConfigFile);
        if (managementMode)
        {
            String[] newCmd = new String[cmd.length + 1];
            System.arraycopy(cmd, 0, newCmd, 0, cmd.length);
            newCmd[cmd.length] = "-mm";
            cmd = newCmd;
        }
        _logger.info("Starting spawn broker using command: " + StringUtils.join(cmd, ' '));
        ProcessBuilder pb = new ProcessBuilder(cmd);
        pb.redirectErrorStream(true);
        Map<String, String> processEnv = pb.environment();
        String qpidHome = System.getProperty(QPID_HOME);
        processEnv.put(QPID_HOME, qpidHome);
        //Augment Path with bin directory in QPID_HOME.
        processEnv.put("PATH", processEnv.get("PATH").concat(File.pathSeparator + qpidHome + "/bin"));

        //Add the test name to the broker run.
        // DON'T change PNAME, qpid.stop needs this value.
        processEnv.put("QPID_PNAME", "-DPNAME=QPBRKR -DTNAME=\"" + getTestName() + "\"");
        processEnv.put("QPID_WORK", qpidWork);

        // Use the environment variable to set amqj.logging.level for the broker
        // The value used is a 'server' value in the test configuration to
        // allow a differentiation between the client and broker logging levels.
        if (System.getProperty("amqj.server.logging.level") != null)
        {
            setBrokerEnvironment("AMQJ_LOGGING_LEVEL", System.getProperty("amqj.server.logging.level"));
        }

        // Add all the environment settings the test requested
        if (!_env.isEmpty())
        {
            for (Map.Entry<String, String> entry : _env.entrySet())
            {
                processEnv.put(entry.getKey(), entry.getValue());
            }
        }

        String qpidOpts = "";

        // a synchronized hack to avoid adding into QPID_OPTS the values
        // of JVM properties "test.virtualhosts" and "test.config" set by a concurrent startup process
        synchronized (_propertiesSetForBroker)
        {
            // Add default test logging levels that are used by the log4j-test
            // Use the convenience methods to push the current logging setting
            // in to the external broker's QPID_OPTS string.
            setSystemProperty("amqj.protocol.logging.level");
            setSystemProperty("root.logging.level");
            setSystemProperty(BrokerProperties.PROPERTY_BROKER_DEFAULT_AMQP_PROTOCOL_EXCLUDES);
            setSystemProperty(BrokerProperties.PROPERTY_BROKER_DEFAULT_AMQP_PROTOCOL_INCLUDES);
            setSystemProperty(TEST_VIRTUALHOSTS, virtualHostsConfig);

            // Add all the specified system properties to QPID_OPTS
            if (!_propertiesSetForBroker.isEmpty())
            {
                for (String key : _propertiesSetForBroker.keySet())
                {
                    qpidOpts += " -D" + key + "=" + _propertiesSetForBroker.get(key);
                }
            }
        }
        if (processEnv.containsKey("QPID_OPTS"))
        {
            qpidOpts = processEnv.get("QPID_OPTS") + qpidOpts;
        }
        processEnv.put("QPID_OPTS", qpidOpts);

        // cpp broker requires that the work directory is created
        createBrokerWork(qpidWork);

        Process process = pb.start();

        Piper p = new Piper(process.getInputStream(),
                            _testcaseOutputStream,
                            System.getProperty(BROKER_READY),
                            System.getProperty(BROKER_STOPPED),
                            _interleaveBrokerLog ? _brokerLogPrefix : null);

        p.start();

        SpawnedBrokerHolder holder = new SpawnedBrokerHolder(process, qpidWork, portsUsedByBroker);
        if (!p.await(30, TimeUnit.SECONDS))
        {
            _logger.info("broker failed to become ready (" + p.getReady() + "):" + p.getStopLine());
            String threadDump = holder.dumpThreads();
            if (!threadDump.isEmpty())
            {
                _logger.info("the result of a try to capture thread dump:" + threadDump);
            }
            //Ensure broker has stopped
            process.destroy();
            cleanBrokerWork(qpidWork);
            throw new RuntimeException("broker failed to become ready:"
                                       + p.getStopLine());
        }

        try
        {
            //test that the broker is still running and hasn't exited unexpectedly
            int exit = process.exitValue();
            _logger.info("broker aborted: " + exit);
            cleanBrokerWork(qpidWork);
            throw new RuntimeException("broker aborted: " + exit);
        }
        catch (IllegalThreadStateException e)
        {
            // this is expect if the broker started successfully
        }

        _brokers.put(port, holder);
    }
}

From source file:org.apache.hadoop.tools.HadoopArchiveLogs.java

@VisibleForTesting
void generateScript(File localScript, Path workingDir, Path remoteRootLogDir, String suffix)
        throws IOException {
    if (verbose) {
        LOG.info("Generating script at: " + localScript.getAbsolutePath());
    }//from w w  w.  ja  v a 2 s .  c  o  m
    String halrJarPath = HadoopArchiveLogsRunner.class.getProtectionDomain().getCodeSource().getLocation()
            .getPath();
    String harJarPath = HadoopArchives.class.getProtectionDomain().getCodeSource().getLocation().getPath();
    String classpath = halrJarPath + File.pathSeparator + harJarPath;
    FileWriterWithEncoding fw = null;
    try {
        fw = new FileWriterWithEncoding(localScript, "UTF-8");
        fw.write("#!/bin/bash\nset -e\nset -x\n");
        int containerCount = 1;
        for (AppInfo app : eligibleApplications) {
            fw.write("if [ \"$YARN_SHELL_ID\" == \"");
            fw.write(Integer.toString(containerCount));
            fw.write("\" ]; then\n\tappId=\"");
            fw.write(app.getAppId());
            fw.write("\"\n\tuser=\"");
            fw.write(app.getUser());
            fw.write("\"\nel");
            containerCount++;
        }
        fw.write("se\n\techo \"Unknown Mapping!\"\n\texit 1\nfi\n");
        fw.write("export HADOOP_CLIENT_OPTS=\"-Xmx");
        fw.write(Long.toString(memory));
        fw.write("m\"\n");
        fw.write("export HADOOP_CLASSPATH=");
        fw.write(classpath);
        fw.write("\n\"$HADOOP_PREFIX\"/bin/hadoop ");
        fw.write(HadoopArchiveLogsRunner.class.getName());
        fw.write(" -appId \"$appId\" -user \"$user\" -workingDir ");
        fw.write(workingDir.toString());
        fw.write(" -remoteRootLogDir ");
        fw.write(remoteRootLogDir.toString());
        fw.write(" -suffix ");
        fw.write(suffix);
        if (!proxy) {
            fw.write(" -noProxy\n");
        }
        fw.write("\n");
    } finally {
        if (fw != null) {
            fw.close();
        }
    }
}

From source file:org.apache.zeppelin.spark.OldSparkInterpreter.java

@Override
public void open() throws InterpreterException {
    this.enableSupportedVersionCheck = java.lang.Boolean
            .parseBoolean(getProperty("zeppelin.spark.enableSupportedVersionCheck", "true"));

    // set properties and do login before creating any spark stuff for secured cluster
    if (isYarnMode()) {
        System.setProperty("SPARK_YARN_MODE", "true");
    }/*w  w w. j a va 2  s .c  o m*/
    if (getProperties().containsKey("spark.yarn.keytab")
            && getProperties().containsKey("spark.yarn.principal")) {
        try {
            String keytab = getProperties().getProperty("spark.yarn.keytab");
            String principal = getProperties().getProperty("spark.yarn.principal");
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException e) {
            throw new RuntimeException("Can not pass kerberos authentication", e);
        }
    }

    conf = new SparkConf();
    URL[] urls = getClassloaderUrls();

    // Very nice discussion about how scala compiler handle classpath
    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0

    /*
     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
     * nsc.Settings.classpath.
     *
     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
     * getClass.getClassLoader >> } >> in.setContextClassLoader()
     */
    Settings settings = new Settings();

    // process args
    String args = getProperty("args");
    if (args == null) {
        args = "";
    }

    String[] argsArray = args.split(" ");
    LinkedList<String> argList = new LinkedList<>();
    for (String arg : argsArray) {
        argList.add(arg);
    }

    DepInterpreter depInterpreter = getParentSparkInterpreter()
            .getInterpreterInTheSameSessionByClassName(DepInterpreter.class, false);
    String depInterpreterClasspath = "";
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFiles();
            if (files != null) {
                for (File f : files) {
                    if (depInterpreterClasspath.length() > 0) {
                        depInterpreterClasspath += File.pathSeparator;
                    }
                    depInterpreterClasspath += f.getAbsolutePath();
                }
            }
        }
    }

    if (Utils.isScala2_10()) {
        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        Object sparkCommandLine = Utils.instantiateClass("org.apache.spark.repl.SparkCommandLine",
                new Class[] { scala.collection.immutable.List.class }, new Object[] { list });

        settings = (Settings) Utils.invokeMethod(sparkCommandLine, "settings");
    } else {
        String sparkReplClassDir = getProperty("spark.repl.classdir");
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("spark.repl.classdir");
        }
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("java.io.tmpdir");
        }

        synchronized (sharedInterpreterLock) {
            if (outputDir == null) {
                outputDir = createTempDir(sparkReplClassDir);
            }
        }
        argList.add("-Yrepl-class-based");
        argList.add("-Yrepl-outdir");
        argList.add(outputDir.getAbsolutePath());

        String classpath = "";
        if (conf.contains("spark.jars")) {
            classpath = StringUtils.join(conf.get("spark.jars").split(","), File.separator);
        }

        if (!depInterpreterClasspath.isEmpty()) {
            if (!classpath.isEmpty()) {
                classpath += File.separator;
            }
            classpath += depInterpreterClasspath;
        }

        if (!classpath.isEmpty()) {
            argList.add("-classpath");
            argList.add(classpath);
        }

        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        settings.processArguments(list, true);
    }

    // set classpath for scala compiler
    PathSetting pathSettings = settings.classpath();
    String classpath = "";

    List<File> paths = currentClassPath();
    for (File f : paths) {
        if (classpath.length() > 0) {
            classpath += File.pathSeparator;
        }
        classpath += f.getAbsolutePath();
    }

    if (urls != null) {
        for (URL u : urls) {
            if (classpath.length() > 0) {
                classpath += File.pathSeparator;
            }
            classpath += u.getFile();
        }
    }

    // add dependency from DepInterpreter
    if (classpath.length() > 0) {
        classpath += File.pathSeparator;
    }
    classpath += depInterpreterClasspath;

    // add dependency from local repo
    String localRepo = getProperty("zeppelin.interpreter.localRepo");
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (classpath.length() > 0) {
                        classpath += File.pathSeparator;
                    }
                    classpath += f.getAbsolutePath();
                }
            }
        }
    }

    pathSettings.v_$eq(classpath);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);

    // set classloader for scala compiler
    settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
    BooleanSetting b = (BooleanSetting) settings.usejavacp();
    b.v_$eq(true);
    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);

    /* Required for scoped mode.
     * In scoped mode multiple scala compiler (repl) generates class in the same directory.
     * Class names is not randomly generated and look like '$line12.$read$$iw$$iw'
     * Therefore it's possible to generated class conflict(overwrite) with other repl generated
     * class.
     *
     * To prevent generated class name conflict,
     * change prefix of generated class name from each scala compiler (repl) instance.
     *
     * In Spark 2.x, REPL generated wrapper class name should compatible with the pattern
     * ^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$
     *
     * As hashCode() can return a negative integer value and the minus character '-' is invalid
     * in a package name we change it to a numeric value '0' which still conforms to the regexp.
     *
     */
    System.setProperty("scala.repl.name.line", ("$line" + this.hashCode()).replace('-', '0'));

    // To prevent 'File name too long' error on some file system.
    MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
    numClassFileSetting.v_$eq(128);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(numClassFileSetting);

    synchronized (sharedInterpreterLock) {
        /* create scala repl */
        if (printREPLOutput()) {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
        } else {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null,
                    new PrintWriter(Console.out(), false));
        }

        interpreter.settings_$eq(settings);

        interpreter.createInterpreter();

        intp = Utils.invokeMethod(interpreter, "intp");
        Utils.invokeMethod(intp, "setContextClassLoader");
        Utils.invokeMethod(intp, "initializeSynchronous");

        if (Utils.isScala2_10()) {
            if (classOutputDir == null) {
                classOutputDir = settings.outputDirs().getSingleOutput().get();
            } else {
                // change SparkIMain class output dir
                settings.outputDirs().setSingleOutput(classOutputDir);
                ClassLoader cl = (ClassLoader) Utils.invokeMethod(intp, "classLoader");
                try {
                    Field rootField = cl.getClass().getSuperclass().getDeclaredField("root");
                    rootField.setAccessible(true);
                    rootField.set(cl, classOutputDir);
                } catch (NoSuchFieldException | IllegalAccessException e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }

        if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) != null) {
            completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion",
                    new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter",
                    new Class[] { IMain.class }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.JLineCompletion", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.JLineCompletion",
                    new Class[] { IMain.class }, new Object[] { intp });
        }

        if (Utils.isSpark2()) {
            sparkSession = getSparkSession();
        }
        sc = getSparkContext();
        if (sc.getPoolForName("fair").isEmpty()) {
            Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
            int minimumShare = 0;
            int weight = 1;
            Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
            sc.taskScheduler().rootPool().addSchedulable(pool);
        }

        sparkVersion = SparkVersion.fromVersionString(sc.version());
        sqlc = getSQLContext();
        dep = getDependencyResolver();
        hooks = getInterpreterGroup().getInterpreterHookRegistry();
        sparkUrl = getSparkUIUrl();
        sparkShims = SparkShims.getInstance(sc.version(), getProperties());
        sparkShims.setupSparkListener(sc.master(), sparkUrl, InterpreterContext.get());
        numReferenceOfSparkContext.incrementAndGet();

        z = new SparkZeppelinContext(sc, sparkShims, hooks,
                Integer.parseInt(getProperty("zeppelin.spark.maxResult")));

        interpret("@transient val _binder = new java.util.HashMap[String, Object]()");
        Map<String, Object> binder;
        if (Utils.isScala2_10()) {
            binder = (Map<String, Object>) getValue("_binder");
        } else {
            binder = (Map<String, Object>) getLastObject();
        }
        binder.put("sc", sc);
        binder.put("sqlc", sqlc);
        binder.put("z", z);

        if (Utils.isSpark2()) {
            binder.put("spark", sparkSession);
        }

        interpret("@transient val z = "
                + "_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.SparkZeppelinContext]");
        interpret("@transient val sc = " + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
        interpret("@transient val sqlc = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
        interpret("@transient val sqlContext = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");

        if (Utils.isSpark2()) {
            interpret("@transient val spark = "
                    + "_binder.get(\"spark\").asInstanceOf[org.apache.spark.sql.SparkSession]");
        }

        interpret("import org.apache.spark.SparkContext._");

        if (importImplicit()) {
            if (Utils.isSpark2()) {
                interpret("import spark.implicits._");
                interpret("import spark.sql");
                interpret("import org.apache.spark.sql.functions._");
            } else {
                interpret("import sqlContext.implicits._");
                interpret("import sqlContext.sql");
                interpret("import org.apache.spark.sql.functions._");
            }
        }
    }

    /* Temporary disabling DisplayUtils. see https://issues.apache.org/jira/browse/ZEPPELIN-127
     *
    // Utility functions for display
    intp.interpret("import org.apache.zeppelin.spark.utils.DisplayUtils._");
            
    // Scala implicit value for spark.maxResult
    intp.interpret("import org.apache.zeppelin.spark.utils.SparkMaxResult");
    intp.interpret("implicit val sparkMaxResult = new SparkMaxResult(" +
    Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
     */

    if (Utils.isScala2_10()) {
        try {
            Method loadFiles = this.interpreter.getClass()
                    .getMethod("org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
            loadFiles.invoke(this.interpreter, settings);
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            throw new InterpreterException(e);
        }
    }

    // add jar from DepInterpreter
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFilesDist();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    // add jar from local repo
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

}

From source file:net.grinder.engine.agent.AgentImplementationEx.java

/**
 * Get classpath which should be located in the head of classpath.
 *
 * @param properties system properties//  ww w. j a v a 2s. c o m
 * @param handler    language specific handler
 * @param logger     logger
 * @return foremost classpath
 */
private String getForeMostClassPath(Properties properties, AbstractLanguageHandler handler, Logger logger) {
    String systemClassPath = properties.getProperty("java.class.path");
    AbstractGrinderClassPathProcessor classPathProcessor = handler.getClassPathProcessor();
    return classPathProcessor.filterForeMostClassPath(systemClassPath, logger) + File.pathSeparator
            + classPathProcessor.filterPatchClassPath(systemClassPath, logger);
}

From source file:net.rim.ejde.internal.packaging.PackagingManager.java

private void runRapcCommand() throws CoreException {
    try {/*from  ww  w  .j  a va 2s.  co m*/
        File workDir = _bbProject.getProject().getLocation().toFile();
        if (writeToFile) {
            File outputFile = null;
            String outputFileName = _bbProject.getProject().getName() + ".files";
            outputFile = new File(workDir, outputFileName);
            _rapcCommandsHead.add("@" + outputFileName);
            flushToFile(outputFile);
        } else {
            _rapcCommandsHead.addAll(_rapcCommands);
        }
        String command = getStringCommand(_rapcCommandsHead);
        _log.trace("Execute rapc command: " + command + "; Working Directory: " + workDir.getPath());
        ProcessBuilder rapcBuilder = new ProcessBuilder(_rapcCommandsHead);

        String javaHome = System.getenv("JAVA_HOME");
        if (javaHome != null) {
            Map<String, String> env = rapcBuilder.environment();
            String pathName = "Path";
            for (String s : env.keySet()) {
                if (s.equalsIgnoreCase("Path"))
                    pathName = s;
            }
            String path = env.get(pathName);
            path = path == null ? javaHome : (path + File.pathSeparator + javaHome);
            path = path + File.pathSeparator + javaHome + File.separator + "bin";
            env.put(pathName, path);
            _log.trace("PATH=" + path);
        }

        rapcBuilder.directory(workDir);
        rapcBuilder.redirectErrorStream(true);
        long startTime = System.currentTimeMillis();
        _consoleOutputStream.println(
                NLS.bind(Messages.PackagingManager_PACKAGING_PROJECT_MSG, _bbProject.getProject().getName()));
        _consoleOutputStream.println(command);
        Process process = rapcBuilder.start();
        InputStream inStream = process.getInputStream();
        InputStreamHandler inputHandler = new InputStreamHandler(_bbProject.getProject(), _consoleOutputStream,
                inStream);
        inputHandler.start();
        int result = process.waitFor();
        inputHandler.join();
        float spendTime = ((float) (System.currentTimeMillis() - startTime)) / 1000;
        if (result == 0) {

            _consoleOutputStream.println(NLS.bind(Messages.PackagingManager_PACKAGING_SUCCEED_MSG,
                    new String[] { _bbProject.getProject().getName(), String.valueOf(spendTime) }));
        } else {
            _consoleOutputStream.println(NLS.bind(Messages.PackagingManager_PACKAGING_FAILED_MSG,
                    new String[] { _bbProject.getProject().getName(), String.valueOf(spendTime) }));
        }
    } catch (IOException e) {
        throw new CoreException(StatusFactory.createErrorStatus(e.getMessage()));
    } catch (InterruptedException e) {
        throw new CoreException(StatusFactory.createErrorStatus(e.getMessage()));
    }
}

From source file:org.evosuite.executionmode.TestGeneration.java

private static void handleClassPath(List<String> cmdLine) {
    String classPath = ClassPathHandler.getInstance().getEvoSuiteClassPath();
    String projectCP = ClassPathHandler.getInstance().getTargetProjectClasspath();

    if (!classPath.isEmpty() && !projectCP.isEmpty()) {
        classPath += File.pathSeparator;
    }/* ww w .  j  av a2 s  .com*/

    if (!projectCP.isEmpty()) {
        classPath += projectCP;
    }

    cmdLine.add("-cp");
    //cmdLine.add(classPath);
    String pathingJar = JarPathing.createJarPathing(classPath);
    cmdLine.add(pathingJar);

    if (projectCP.isEmpty()) {
        projectCP = classPath;
    }

    String projectCPFilePath = ClassPathHandler.writeClasspathToFile(projectCP);
    cmdLine.add("-DCP_file_path=" + projectCPFilePath);
}

From source file:com.ibm.bi.dml.yarn.DMLYarnClient.java

/**
 * /*  ww w. ja v  a2 s.co m*/
 * @param yconf
 * @return
 * @throws IOException
 */
private Map<String, String> constructEnvionmentMap(YarnConfiguration yconf) throws IOException {
    Map<String, String> eMap = new HashMap<String, String>();

    //setup default app master environment
    StringBuilder classpath = new StringBuilder();
    for (String value : yconf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
            YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
        if (classpath.length() > 0)
            classpath.append(File.pathSeparator);
        classpath.append(value.trim());
    }

    //setup mapreduce appmaster environment (for robustness if not included in default environment)
    //for example, by default HDP 2.2 did not include mapred client libraries in this configuration
    //note: we cannot use mapreduce.application.classpath because it refers to HDFS and $PWD that needs to be setup       
    Map<String, String> env = System.getenv();
    String mapred_home = null;
    //get mapred home via alternative environment variables
    if (env.containsKey(MAPRED_HOME_ENV_CONST)) {
        mapred_home = env.get(MAPRED_HOME_ENV_CONST);
    } else if (env.containsKey(HADOOP_HOME_ENV_CONST)) {
        String tmp = env.get(HADOOP_HOME_ENV_CONST);
        mapred_home = tmp + File.separator + ".." + File.separator + "hadoop-mapreduce";
    }
    //concatenate mapred home libs to classpath
    if (mapred_home != null) {
        if (classpath.length() > 0)
            classpath.append(File.pathSeparator);
        classpath.append(mapred_home + File.separator + "*");
        classpath.append(File.pathSeparator);
        classpath.append(mapred_home + File.separator + "lib" + File.separator + "*");
    }

    eMap.put(Environment.CLASSPATH.name(), classpath.toString());
    MRApps.setClasspath(eMap, yconf);

    LOG.debug("Constructed environment classpath: " + classpath.toString());

    return eMap;
}