Example usage for java.io File pathSeparator

List of usage examples for java.io File pathSeparator

Introduction

In this page you can find the example usage for java.io File pathSeparator.

Prototype

String pathSeparator

To view the source code for java.io File pathSeparator.

Click Source Link

Document

The system-dependent path-separator character, represented as a string for convenience.

Usage

From source file:cl.utfsm.cdbChecker.CDBChecker.java

/**
 * This method validates the XSD files./*from w  w  w.  ja v a 2 s  .c o  m*/
 * 
 * @param xsdFilenames names with absolute path of the XSD file to validate.
 */
protected void validateSchemas(Vector<String> xsdFilenames) {

    System.out.println("*** Will verify XSD files in: " + this.XSDPath);

    // We share the resolver, to benefit more from its cache.
    CDBSchemasResolver resolver = new CDBSchemasResolver(this, schemaFolder + File.pathSeparator + XSDPath);
    ErrorHandler errorHandler = new CDBErrorHandler(this);

    for (String xsdFilename : xsdFilenames) {
        final File xsdFile = new File(xsdFilename);
        if (xsdFile.length() != 0) {
            if (verbose) {
                System.out.print("    " + xsdFilename);
            }
            try {
                validateFileEncoding(xsdFilename);
                SP.reset();
                resolver.setResolveOnlyHttp(true); // not sure why, but this is the legacy behavior
                SP.setEntityResolver(resolver);
                SP.setFeature("http://xml.org/sax/features/validation", true);
                SP.setFeature("http://apache.org/xml/features/validation/schema", true);
                SP.setFeature("http://xml.org/sax/features/namespace-prefixes", false);
                SP.setFeature("http://xml.org/sax/features/namespaces", true);
                SP.setErrorHandler(errorHandler);
                SP.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation",
                        "http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd");

                FileInputStream fis = new FileInputStream(xsdFile);
                InputSource inputSource = new InputSource(fis);
                inputSource.setSystemId("file:///" + xsdFile.getAbsolutePath());
                SP.parse(inputSource);
                fis.close();
                try {
                    // Now we know that the schema is technically valid (well, it does not seem to check xsd imports...)
                    // Still we have to check special requirements for CharacteristicComponent XSDs.
                    // This second check probably includes the first check's functionality, so that the
                    // first check could be removed once we have verified XSOM's xsd error reporting
                    // and hooked up the shared error handler in BaciSchemaChecker.
                    resolver.setResolveOnlyHttp(false); // here we want to resolve all schemas
                    BaciSchemaChecker baciChecker = new BaciSchemaChecker(xsdFile, resolver, errorHandler,
                            logger);
                    List<BaciPropertyLocator> badProps = baciChecker.findBaciPropsOutsideCharacteristicComp();
                    if (!badProps.isEmpty()) {
                        // Reduce the available error output to show only xsd element(s), not the individual baci properties
                        Set<String> badXsdElementNames = new HashSet<String>();
                        for (BaciPropertyLocator badProp : badProps) {
                            badXsdElementNames.add(badProp.elementName);
                        }
                        System.out.println(xsdFilename
                                + ": illegal use of baci properties in xsd elements that are not derived from baci:CharacteristicComponent. "
                                + "Offending element(s): " + StringUtils.join(badXsdElementNames, ' '));
                        errorFlag = true;
                        globalErrorFlag = true;
                    }
                } catch (SAXException ex) {
                    // ignore SAXException coming from BaciSchemaChecker, because we don't want to report errors
                    // twice when the xsd file is really messed up. 
                    // There are cases where xerces reports a normal error but XSOM reports a fatal error and throws this exception.
                }
                if (verbose && !errorFlag) {
                    System.out.println("[OK]");
                }
            } catch (SAXException e) {
                System.out.println("Caught a SAXException in validateSchemas: ");
                e.printStackTrace(System.out);
            } catch (IOException e) {
                System.out.println("[IOException] Probably " + xsdFilename + " doesn't exists.");
            }
        } else {
            System.out.print(xsdFilename + ": [Warning] file is empty.\n");
        }
    }
    resolver.setResolveOnlyHttp(true); // back to legacy mode again... yuck, our resolver still sticks to the "SP" field and will be used elsewhere!
}

From source file:org.ebayopensource.turmeric.plugins.maven.resources.ResourceLocator.java

private Location lookInClasspath(String pathref) throws MojoExecutionException {
    log.debug("Looking for resource in project classpath: " + pathref);

    if (log.isDebugEnabled()) {
        StringBuilder dbg = new StringBuilder();
        dbg.append("System.getProperty('java.class.path')=");

        String rawcp = System.getProperty("java.class.path");
        for (String cp : rawcp.split(File.pathSeparator)) {
            dbg.append("\n  ").append(cp);
        }/*w  w  w  .  j a  va 2s .  c o m*/

        log.debug(dbg.toString());

        ClassLoader cl = this.getClass().getClassLoader();
        if (cl instanceof URLClassLoader) {
            dbg = new StringBuilder();
            dbg.append("URLClassLoader(");
            dbg.append(cl.getClass().getName());
            dbg.append("):");

            URLClassLoader ucl = (URLClassLoader) cl;

            for (URL url : ucl.getURLs()) {
                dbg.append("\n  ").append(url.toExternalForm());
            }

            log.debug(dbg.toString());
        }
    }

    List<URL> resources = new ArrayList<URL>();
    try {
        Enumeration<URL> enurls = ClassLoader.getSystemResources(pathref);
        if (enurls != null) {
            while (enurls.hasMoreElements()) {
                URL url = enurls.nextElement();
                if (!resources.contains(url)) {
                    resources.add(url);
                }
            }
        }

        addFoundResource(resources, pathref, Thread.currentThread().getContextClassLoader());
        addFoundResource(resources, pathref, this.getClass().getClassLoader());
        if (resources.isEmpty()) {
            log.debug("NOT FOUND in project classpath");
            return null;
        }

        if (resources.size() > 1) {
            log.warn("Found more than 1 classpath entry for: " + pathref);
            for (URL url : resources) {
                log.warn(" + " + url.toExternalForm());
            }
        }

        URI uri = resources.get(0).toURI();
        log.debug("FOUND resource in project classpath: " + uri);
        return new Location(uri, project);
    } catch (IOException e) {
        throw new MojoExecutionException(
                "Unable to process resource lookup in project classpath: " + e.getMessage(), e);
    } catch (URISyntaxException e) {
        throw new MojoExecutionException(
                "Unable to process resource lookup in project classpath: " + e.getMessage(), e);
    }
}

From source file:org.apache.giraph.hive.HiveGiraphRunner.java

/**
* set hive configuration//from  w  w  w  . j a  v  a 2 s.  co  m
*/
private void adjustConfigurationForHive() {
    // when output partitions are used, workers register them to the
    // metastore at cleanup stage, and on HiveConf's initialization, it
    // looks for hive-site.xml.
    addToStringCollection(conf, "tmpfiles", conf.getClassLoader().getResource("hive-site.xml").toString());

    // Or, more effectively, we can provide all the jars client needed to
    // the workers as well
    String[] hadoopJars = System.getenv("HADOOP_CLASSPATH").split(File.pathSeparator);
    List<String> hadoopJarURLs = Lists.newArrayList();
    for (String jarPath : hadoopJars) {
        File file = new File(jarPath);
        if (file.exists() && file.isFile()) {
            String jarURL = file.toURI().toString();
            hadoopJarURLs.add(jarURL);
        }
    }
    addToStringCollection(conf, "tmpjars", hadoopJarURLs);
}

From source file:io.hops.hopsworks.common.dao.jupyter.config.JupyterConfigFilesGenerator.java

private boolean createConfigFiles(String confDirPath, String hdfsUser, String realName, Project project,
        String nameNodeEndpoint, Integer port, JupyterSettings js) throws IOException, ServiceException {
    File jupyter_config_file = new File(confDirPath + JUPYTER_NOTEBOOK_CONFIG);
    File jupyter_kernel_file = new File(confDirPath + JUPYTER_CUSTOM_KERNEL);
    File sparkmagic_config_file = new File(confDirPath + SPARKMAGIC_CONFIG);
    File custom_js = new File(confDirPath + JUPYTER_CUSTOM_JS);
    boolean createdJupyter = false;
    boolean createdSparkmagic = false;
    boolean createdCustomJs = false;

    if (!jupyter_config_file.exists()) {

        String[] nn = nameNodeEndpoint.split(":");
        String nameNodeIp = nn[0];
        String nameNodePort = nn[1];

        String pythonKernel = "";

        if (settings.isPythonKernelEnabled() && !project.getPythonVersion().contains("X")) {
            pythonKernel = ", 'python-" + hdfsUser + "'";
            StringBuilder jupyter_kernel_config = ConfigFileGenerator.instantiateFromTemplate(
                    ConfigFileGenerator.JUPYTER_CUSTOM_KERNEL, "hdfs_user", hdfsUser, "hadoop_home",
                    settings.getHadoopSymbolicLinkDir(), "hadoop_version", settings.getHadoopVersion(),
                    "anaconda_home", settings.getAnacondaProjectDir(project.getName()), "secret_dir",
                    settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret());
            ConfigFileGenerator.createConfigFile(jupyter_kernel_file, jupyter_kernel_config.toString());
        }/*from w  w w . ja  v a2s  .  co  m*/

        StringBuilder jupyter_notebook_config = ConfigFileGenerator.instantiateFromTemplate(
                ConfigFileGenerator.JUPYTER_NOTEBOOK_CONFIG_TEMPLATE, "project", project.getName(),
                "namenode_ip", nameNodeIp, "namenode_port", nameNodePort, "hopsworks_ip",
                settings.getHopsworksIp(), "base_dir", js.getBaseDir(), "hdfs_user", hdfsUser, "port",
                port.toString(), "python-kernel", pythonKernel, "umask", js.getUmask(), "hadoop_home",
                this.settings.getHadoopSymbolicLinkDir(), "hdfs_home", this.settings.getHadoopSymbolicLinkDir(),
                "secret_dir", this.settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret());
        createdJupyter = ConfigFileGenerator.createConfigFile(jupyter_config_file,
                jupyter_notebook_config.toString());
    }
    if (!sparkmagic_config_file.exists()) {
        StringBuilder sparkFiles = new StringBuilder();
        sparkFiles
                //Log4j.properties
                .append(settings.getSparkLog4JPath()).append(",")
                // Glassfish domain truststore
                .append(settings.getGlassfishTrustStoreHdfs()).append("#").append(Settings.DOMAIN_CA_TRUSTSTORE)
                .append(",")
                // Add HopsUtil
                .append(settings.getHopsUtilHdfsPath());

        if (!js.getFiles().equals("")) {
            //Split the comma-separated string and append it to sparkFiles
            for (String file : js.getFiles().split(",")) {
                sparkFiles.append(",").append(file);
            }
        }

        String extraClassPath = settings.getHopsLeaderElectionJarPath() + File.pathSeparator
                + settings.getHopsUtilFilename();

        if (!js.getJars().equals("")) {
            //Split the comma-separated string and append the names to the driver and executor classpath
            for (String jar : js.getJars().split(",")) {
                sparkFiles.append(",").append(jar);
                //Get jar name
                String name = jar.substring(jar.lastIndexOf("/") + 1);
                extraClassPath += File.pathSeparator + name;
            }
        }

        // If Hops RPC TLS is enabled, password file would be injected by the
        // NodeManagers. We don't need to add it as LocalResource
        if (!settings.getHopsRpcTls()) {
            sparkFiles
                    // Keystore
                    .append(",hdfs://").append(settings.getHdfsTmpCertDir()).append(File.separator)
                    .append(hdfsUser).append(File.separator).append(hdfsUser).append("__kstore.jks#")
                    .append(Settings.K_CERTIFICATE).append(",")
                    // TrustStore
                    .append("hdfs://").append(settings.getHdfsTmpCertDir()).append(File.separator)
                    .append(hdfsUser).append(File.separator).append(hdfsUser).append("__tstore.jks#")
                    .append(Settings.T_CERTIFICATE).append(",")
                    // File with crypto material password
                    .append("hdfs://").append(settings.getHdfsTmpCertDir()).append(File.separator)
                    .append(hdfsUser).append(File.separator).append(hdfsUser).append("__cert.key#")
                    .append(Settings.CRYPTO_MATERIAL_PASSWORD);
        }

        //Prepare pyfiles
        StringBuilder pyFilesBuilder = new StringBuilder();
        if (!Strings.isNullOrEmpty(js.getPyFiles())) {
            pyFilesBuilder = new StringBuilder();
            for (String file : js.getPyFiles().split(",")) {
                file += "#" + file.substring(file.lastIndexOf("/") + 1);
                pyFilesBuilder.append(file).append(",");
            }
            //Remove last comma character
            pyFilesBuilder.deleteCharAt(pyFilesBuilder.length() - 1);
        }

        String sparkProps = js.getSparkParams();

        // Spark properties user has defined in the jupyter dashboard
        Map<String, String> userSparkProperties = HopsUtils.validateUserProperties(sparkProps,
                settings.getSparkDir());

        LOGGER.info("SparkProps are: " + System.lineSeparator() + sparkProps);

        boolean isExperiment = js.getMode().compareToIgnoreCase("experiment") == 0;
        boolean isParallelExperiment = js.getMode().compareToIgnoreCase("parallelexperiments") == 0;
        boolean isDistributedTraining = js.getMode().compareToIgnoreCase("distributedtraining") == 0;
        boolean isMirroredStrategy = js.getDistributionStrategy().compareToIgnoreCase("mirroredstrategy") == 0
                && isDistributedTraining;
        boolean isParameterServerStrategy = js.getDistributionStrategy()
                .compareToIgnoreCase("parameterserverstrategy") == 0 && isDistributedTraining;
        boolean isCollectiveAllReduceStrategy = js.getDistributionStrategy()
                .compareToIgnoreCase("collectiveallreducestrategy") == 0 && isDistributedTraining;
        boolean isSparkDynamic = js.getMode().compareToIgnoreCase("sparkdynamic") == 0;
        String extraJavaOptions = "-D" + Settings.LOGSTASH_JOB_INFO + "=" + project.getName().toLowerCase()
                + ",jupyter,notebook,?" + " -D" + Settings.HOPSWORKS_JOBTYPE_PROPERTY + "=" + JobType.SPARK
                + " -D" + Settings.KAFKA_BROKERADDR_PROPERTY + "=" + settings.getKafkaBrokersStr() + " -D"
                + Settings.HOPSWORKS_REST_ENDPOINT_PROPERTY + "=" + settings.getRestEndpoint() + " -D"
                + Settings.HOPSWORKS_ELASTIC_ENDPOINT_PROPERTY + "=" + settings.getElasticRESTEndpoint() + " -D"
                + Settings.HOPSWORKS_PROJECTID_PROPERTY + "=" + project.getId() + " -D"
                + Settings.HOPSWORKS_PROJECTNAME_PROPERTY + "=" + project.getName()
                + " -Dlog4j.configuration=./log4j.properties";

        // Get information about which version of TensorFlow the user is running
        TfLibMapping tfLibMapping = tfLibMappingFacade.findTfMappingForProject(project);
        if (tfLibMapping == null) {
            // We are not supporting this version.
            throw new ServiceException(RESTCodes.ServiceErrorCode.TENSORFLOW_VERSION_NOT_SUPPORTED, Level.INFO);
        }
        String tfLdLibraryPath = tfLibMappingUtil.buildTfLdLibraryPath(tfLibMapping);

        // Map of default/system Spark(Magic) properties <Property_Name, ConfigProperty>
        // Property_Name should be either the SparkMagic property name or Spark property name
        // The replacement pattern is defined in ConfigProperty
        Map<String, ConfigProperty> sparkMagicParams = new HashMap<>();
        sparkMagicParams.put("livy_ip", new ConfigProperty("livy_ip", HopsUtils.IGNORE, settings.getLivyIp()));
        sparkMagicParams.put("jupyter_home", new ConfigProperty("jupyter_home", HopsUtils.IGNORE, confDirPath));
        sparkMagicParams.put("driverCores",
                new ConfigProperty("driver_cores", HopsUtils.IGNORE,
                        (isExperiment || isDistributedTraining || isParallelExperiment) ? "1"
                                : Integer.toString(js.getAppmasterCores())));
        sparkMagicParams.put("driverMemory", new ConfigProperty("driver_memory", HopsUtils.IGNORE,
                Integer.toString(js.getAppmasterMemory()) + "m"));
        sparkMagicParams.put("numExecutors",
                new ConfigProperty("num_executors", HopsUtils.IGNORE, (isExperiment || isMirroredStrategy) ? "1"
                        : (isParameterServerStrategy) ? Integer.toString(js.getNumExecutors() + js.getNumTfPs())
                                : (isSparkDynamic) ? Integer.toString(js.getDynamicMinExecutors())
                                        : Integer.toString(js.getNumExecutors())));
        sparkMagicParams.put("executorCores",
                new ConfigProperty("executor_cores", HopsUtils.IGNORE,
                        (isExperiment || isDistributedTraining || isParallelExperiment) ? "1"
                                : Integer.toString(js.getNumExecutorCores())));
        sparkMagicParams.put("executorMemory", new ConfigProperty("executor_memory", HopsUtils.IGNORE,
                Integer.toString(js.getExecutorMemory()) + "m"));
        sparkMagicParams.put("proxyUser", new ConfigProperty("hdfs_user", HopsUtils.IGNORE, hdfsUser));
        sparkMagicParams.put("name", new ConfigProperty("spark_magic_name", HopsUtils.IGNORE,
                "remotesparkmagics-jupyter-" + js.getMode()));
        sparkMagicParams.put("queue", new ConfigProperty("yarn_queue", HopsUtils.IGNORE, "default"));

        // Export versions of software

        sparkMagicParams.put("spark.yarn.appMasterEnv.LIVY_VERSION",
                new ConfigProperty("livy_version", HopsUtils.IGNORE, this.settings.getLivyVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.SPARK_VERSION",
                new ConfigProperty("spark_version", HopsUtils.IGNORE, this.settings.getSparkVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.KAFKA_VERSION",
                new ConfigProperty("kafka_version", HopsUtils.IGNORE, this.settings.getKafkaVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.TENSORFLOW_VERSION",
                new ConfigProperty("tensorflow_version", HopsUtils.IGNORE, tfLibMapping.getTfVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.CUDA_VERSION",
                new ConfigProperty("cuda_version", HopsUtils.IGNORE, tfLibMapping.getCudaVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HOPSWORKS_VERSION",
                new ConfigProperty("hopsworks_version", HopsUtils.IGNORE, this.settings.getHopsworksVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_VERSION",
                new ConfigProperty("hadoop_version", HopsUtils.IGNORE, this.settings.getHadoopVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.KAFKA_BROKERS",
                new ConfigProperty("kafka_brokers", HopsUtils.IGNORE, this.settings.getKafkaBrokersStr()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.ELASTIC_ENDPOINT", new ConfigProperty("elastic_endpoint",
                HopsUtils.IGNORE, this.settings.getElasticRESTEndpoint()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HOPSWORKS_USER",
                new ConfigProperty("hopsworks_user", HopsUtils.IGNORE, realName));

        // Spark properties
        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_PATH,
                new ConfigProperty("spark_executorEnv_PATH", HopsUtils.APPEND_PATH,
                        this.settings.getAnacondaProjectDir(project.getName())
                                + "/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.PYSPARK_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.PYSPARK3_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put(Settings.SPARK_YARN_APPMASTERENV_LD_LIBRARY_PATH,
                new ConfigProperty("spark_yarn_appMaster_LD_LIBRARY_PATH", HopsUtils.APPEND_PATH,
                        this.settings.getJavaHome() + "/jre/lib/amd64/server:" + tfLdLibraryPath
                                + this.settings.getHadoopSymbolicLinkDir() + "/lib/native"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        sparkMagicParams.put(Settings.SPARK_YARN_APPMASTERENV_LIBHDFS_OPTS,
                new ConfigProperty("spark_yarn_appMasterEnv_LIBHDFS_OPTS", HopsUtils.APPEND_SPACE,
                        "-Xmx96m -Dlog4j.configuration=" + this.settings.getHadoopSymbolicLinkDir()
                                + "/etc/hadoop/log4j.properties -Dhadoop.root.logger=ERROR,RFA"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_HDFS_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_USER_NAME",
                new ConfigProperty("hdfs_user", HopsUtils.IGNORE, hdfsUser));

        sparkMagicParams.put("spark.yarn.appMasterEnv.REST_ENDPOINT",
                new ConfigProperty("rest_endpoint", HopsUtils.IGNORE, settings.getRestEndpoint()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HDFS_BASE_DIR",
                new ConfigProperty("spark_yarn_appMasterEnv_HDFS_BASE_DIR", HopsUtils.IGNORE,
                        "hdfs://Projects/" + project.getName() + js.getBaseDir()));

        sparkMagicParams.put(Settings.SPARK_DRIVER_STAGINGDIR_ENV, new ConfigProperty("spark_yarn_stagingDir",
                HopsUtils.IGNORE, "hdfs:///Projects/" + project.getName() + "/Resources"));

        sparkMagicParams.put("spark.yarn.dist.files",
                new ConfigProperty("spark_yarn_dist_files", HopsUtils.IGNORE, sparkFiles.toString()));

        sparkMagicParams.put("spark.yarn.dist.archives",
                new ConfigProperty("spark_yarn_dist_archives", HopsUtils.IGNORE, js.getArchives()));

        sparkMagicParams.put("spark.yarn.dist.pyFiles",
                new ConfigProperty("spark_yarn_dist_pyFiles", HopsUtils.IGNORE, pyFilesBuilder.toString()));

        sparkMagicParams.put(Settings.SPARK_DRIVER_EXTRALIBRARYPATH,
                new ConfigProperty("spark_driver_extraLibraryPath", HopsUtils.APPEND_PATH, tfLdLibraryPath));

        sparkMagicParams.put(Settings.SPARK_DRIVER_EXTRAJAVAOPTIONS,
                new ConfigProperty("spark_driver_extraJavaOptions", HopsUtils.APPEND_SPACE, extraJavaOptions));

        sparkMagicParams.put(Settings.SPARK_DRIVER_EXTRACLASSPATH,
                new ConfigProperty("spark_driver_extraClassPath", HopsUtils.APPEND_PATH, extraClassPath));

        sparkMagicParams.put(Settings.SPARK_EXECUTOR_EXTRACLASSPATH,
                new ConfigProperty("spark_executor_extraClassPath", HopsUtils.APPEND_PATH, extraClassPath));

        sparkMagicParams.put("spark.executorEnv.REST_ENDPOINT",
                new ConfigProperty("rest_endpoint", HopsUtils.IGNORE, settings.getRestEndpoint()));

        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_HADOOP_USER_NAME,
                new ConfigProperty("hdfs_user", HopsUtils.IGNORE, hdfsUser));

        sparkMagicParams.put("spark.executorEnv.HADOOP_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_LIBHDFS_OPTS,
                new ConfigProperty("spark_executorEnv_LIBHDFS_OPTS", HopsUtils.APPEND_SPACE,
                        "-Xmx96m -Dlog4j.configuration=" + this.settings.getHadoopSymbolicLinkDir()
                                + "/etc/hadoop/log4j.properties -Dhadoop.root.logger=ERROR,RFA"));

        sparkMagicParams.put("spark.executorEnv.PYSPARK_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.executorEnv.PYSPARK3_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_LD_LIBRARY_PATH,
                new ConfigProperty("spark_executorEnv_LD_LIBRARY_PATH", HopsUtils.APPEND_PATH,
                        this.settings.getJavaHome() + "/jre/lib/amd64/server:" + tfLdLibraryPath
                                + this.settings.getHadoopSymbolicLinkDir() + "/lib/native"));

        sparkMagicParams.put("spark.executorEnv.HADOOP_HDFS_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        // Export versions of software

        sparkMagicParams.put("spark.executorEnv.LIVY_VERSION",
                new ConfigProperty("livy_version", HopsUtils.IGNORE, this.settings.getLivyVersion()));

        sparkMagicParams.put("spark.executorEnv.SPARK_VERSION",
                new ConfigProperty("spark_version", HopsUtils.IGNORE, this.settings.getSparkVersion()));

        sparkMagicParams.put("spark.executorEnv.KAFKA_VERSION",
                new ConfigProperty("kafka_version", HopsUtils.IGNORE, this.settings.getKafkaVersion()));

        sparkMagicParams.put("spark.executorEnv.TENSORFLOW_VERSION",
                new ConfigProperty("tensorflow_version", HopsUtils.IGNORE, tfLibMapping.getTfVersion()));

        sparkMagicParams.put("spark.executorEnv.CUDA_VERSION",
                new ConfigProperty("cuda_version", HopsUtils.IGNORE, tfLibMapping.getCudaVersion()));

        sparkMagicParams.put("spark.executorEnv.HOPSWORKS_VERSION",
                new ConfigProperty("hopsworks_version", HopsUtils.IGNORE, this.settings.getHopsworksVersion()));

        sparkMagicParams.put("spark.executorEnv.HADOOP_VERSION",
                new ConfigProperty("hadoop_version", HopsUtils.IGNORE, this.settings.getHadoopVersion()));

        sparkMagicParams.put("spark.executorEnv.KAFKA_BROKERS",
                new ConfigProperty("kafka_brokers", HopsUtils.IGNORE, this.settings.getKafkaBrokersStr()));

        sparkMagicParams.put("spark.executorEnv.ELASTIC_ENDPOINT", new ConfigProperty("elastic_endpoint",
                HopsUtils.IGNORE, this.settings.getElasticRESTEndpoint()));

        sparkMagicParams.put("spark.executorEnv.HOPSWORKS_USER",
                new ConfigProperty("hopsworks_user", HopsUtils.IGNORE, realName));

        sparkMagicParams.put(Settings.SPARK_EXECUTOR_EXTRA_JAVA_OPTS, new ConfigProperty(
                "spark_executor_extraJavaOptions", HopsUtils.APPEND_SPACE, extraJavaOptions));

        sparkMagicParams.put("spark.executorEnv.HDFS_BASE_DIR",
                new ConfigProperty("spark_executorEnv_HDFS_BASE_DIR", HopsUtils.IGNORE,
                        "hdfs://Projects/" + project.getName() + js.getBaseDir()));

        sparkMagicParams.put("spark.pyspark.python", new ConfigProperty("pyspark_bin", HopsUtils.IGNORE,
                this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.shuffle.service.enabled", new ConfigProperty("", HopsUtils.IGNORE, "true"));

        sparkMagicParams.put("spark.submit.deployMode", new ConfigProperty("", HopsUtils.IGNORE, "cluster"));

        sparkMagicParams.put("spark.tensorflow.application",
                new ConfigProperty("spark_tensorflow_application", HopsUtils.IGNORE,
                        Boolean.toString(isExperiment || isParallelExperiment || isDistributedTraining)));

        sparkMagicParams.put("spark.tensorflow.num.ps", new ConfigProperty("spark_tensorflow_num_ps",
                HopsUtils.IGNORE, (isParameterServerStrategy) ? Integer.toString(js.getNumTfPs()) : "0"));

        sparkMagicParams.put("spark.executor.gpus",
                new ConfigProperty("spark_executor_gpus", HopsUtils.IGNORE,
                        (isDistributedTraining || isParallelExperiment || isExperiment)
                                ? Integer.toString(js.getNumExecutorGpus())
                                : "0"));

        sparkMagicParams.put("spark.dynamicAllocation.enabled",
                new ConfigProperty("spark_dynamicAllocation_enabled", HopsUtils.OVERWRITE, Boolean.toString(
                        isSparkDynamic || isExperiment || isParallelExperiment || isDistributedTraining)));

        sparkMagicParams.put("spark.dynamicAllocation.initialExecutors", new ConfigProperty(
                "spark_dynamicAllocation_initialExecutors", HopsUtils.OVERWRITE,
                (isExperiment || isParallelExperiment || isMirroredStrategy) ? "0"
                        : (isParameterServerStrategy) ? Integer.toString(js.getNumExecutors() + js.getNumTfPs())
                                : (isCollectiveAllReduceStrategy) ? Integer.toString(js.getNumExecutors())
                                        : Integer.toString(js.getDynamicMinExecutors())));

        sparkMagicParams.put("spark.dynamicAllocation.minExecutors",
                new ConfigProperty("spark_dynamicAllocation_minExecutors", HopsUtils.OVERWRITE,
                        (isExperiment || isParallelExperiment || isDistributedTraining) ? "0"
                                : Integer.toString(js.getDynamicMinExecutors())));

        sparkMagicParams.put("spark.dynamicAllocation.maxExecutors",
                new ConfigProperty("spark_dynamicAllocation_maxExecutors", HopsUtils.OVERWRITE,
                        (isExperiment || isMirroredStrategy) ? "1"
                                : (isParallelExperiment) ? Integer.toString(js.getNumExecutors())
                                        : (isParameterServerStrategy)
                                                ? Integer.toString(js.getNumExecutors() + js.getNumTfPs())
                                                : (isCollectiveAllReduceStrategy)
                                                        ? Integer.toString(js.getNumExecutors())
                                                        : Integer.toString(js.getDynamicMaxExecutors())));

        sparkMagicParams.put("spark.dynamicAllocation.executorIdleTimeout",
                new ConfigProperty("spark_dynamicAllocation_executorIdleTimeout", HopsUtils.OVERWRITE,
                        (isParameterServerStrategy)
                                ? Integer.toString(((js.getNumExecutors() + js.getNumTfPs()) * 15) + 60) + "s"
                                : "60s"));

        // Blacklisting behaviour for TensorFlow on Spark (e.g. Hyperparameter search) to make it robust
        // Allow many failures on a particular node before blacklisting the node
        // Blacklist executor instantly

        sparkMagicParams.put("spark.blacklist.enabled",
                new ConfigProperty("spark_blacklist_enabled", HopsUtils.OVERWRITE,
                        ((isExperiment || isParallelExperiment) && js.getFaultTolerant()) ? "true" : "false"));

        // If any task fails on an executor - kill it instantly (need fresh working directory for each task)
        sparkMagicParams.put("spark.blacklist.task.maxTaskAttemptsPerExecutor",
                new ConfigProperty("spark_max_task_attempts_per_executor", HopsUtils.OVERWRITE, "1"));

        // Blacklist node after 2 tasks fails on it
        sparkMagicParams.put("spark.blacklist.task.maxTaskAttemptsPerNode",
                new ConfigProperty("spark_max_task_attempts_per_node", HopsUtils.OVERWRITE, "2"));

        // If any task fails on an executor within a stage - blacklist it
        sparkMagicParams.put("spark.blacklist.stage.maxFailedTasksPerExecutor",
                new ConfigProperty("spark_stage_max_failed_tasks_per_executor", HopsUtils.OVERWRITE, "1"));

        // Blacklist node after 2 tasks within a stage fails on it
        sparkMagicParams.put("spark.blacklist.stage.maxFailedExecutorsPerNode",
                new ConfigProperty("spark_stage_max_failed_executors_per_node", HopsUtils.OVERWRITE, "2"));

        // If any task fails on an executor within an application - blacklist it
        sparkMagicParams.put("spark.blacklist.application.maxFailedTasksPerExecutor", new ConfigProperty(
                "spark_application_max_failed_tasks_per_executor", HopsUtils.OVERWRITE, "1"));

        // If 2 task fails on a node within an application - blacklist it
        sparkMagicParams.put("spark.blacklist.application.maxFailedExecutorsPerNode", new ConfigProperty(
                "spark_application_max_failed_executors_per_node", HopsUtils.OVERWRITE, "2"));

        sparkMagicParams.put("spark.task.maxFailures",
                new ConfigProperty("spark_task_max_failures", HopsUtils.OVERWRITE,
                        (isParallelExperiment || isExperiment) && js.getFaultTolerant() ? "3"
                                : (isParallelExperiment || isExperiment || isDistributedTraining) ? "1" : "4"));

        // Always kill the blacklisted executors (further failures could be results of local files from the failed task)
        sparkMagicParams.put("spark.blacklist.killBlacklistedExecutors",
                new ConfigProperty("spark_kill_blacklisted_executors", HopsUtils.OVERWRITE,
                        (isExperiment || isParallelExperiment) ? "true" : "false"));

        // Merge system and user defined properties
        Map<String, String> sparkParamsAfterMerge = HopsUtils.mergeHopsworksAndUserParams(sparkMagicParams,
                userSparkProperties, false);

        StringBuilder sparkmagic_sb = ConfigFileGenerator
                .instantiateFromTemplate(ConfigFileGenerator.SPARKMAGIC_CONFIG_TEMPLATE, sparkParamsAfterMerge);
        createdSparkmagic = ConfigFileGenerator.createConfigFile(sparkmagic_config_file,
                sparkmagic_sb.toString());
    }
    if (!custom_js.exists()) {

        StringBuilder custom_js_sb = ConfigFileGenerator.instantiateFromTemplate(
                ConfigFileGenerator.JUPYTER_CUSTOM_TEMPLATE, "hadoop_home",
                this.settings.getHadoopSymbolicLinkDir());
        createdCustomJs = ConfigFileGenerator.createConfigFile(custom_js, custom_js_sb.toString());
    }

    // Add this local file to 'spark: file' to copy it to hdfs and localize it.
    return createdJupyter || createdSparkmagic || createdCustomJs;
}

From source file:org.apache.carbondata.processing.loading.DataLoadProcessBuilder.java

public static CarbonDataLoadConfiguration createConfiguration(CarbonLoadModel loadModel,
        String[] storeLocation) {
    CarbonDataProcessorUtil.createLocations(storeLocation);

    String databaseName = loadModel.getDatabaseName();
    String tableName = loadModel.getTableName();
    String tempLocationKey = CarbonDataProcessorUtil.getTempStoreLocationKey(databaseName, tableName,
            loadModel.getSegmentId(), loadModel.getTaskNo(), false, false);
    CarbonProperties.getInstance().addProperty(tempLocationKey,
            StringUtils.join(storeLocation, File.pathSeparator));

    return createConfiguration(loadModel);
}

From source file:org.apache.cocoon.components.language.programming.java.JavaLanguage.java

/**
 * Expand a directory path or list of directory paths (File.pathSeparator
 * delimited) into a list of file paths of all the jar files in those
 * directories./*from   ww  w  . ja v  a  2s  .co  m*/
 *
 * @param dirPaths The string containing the directory path or list of
 *       directory paths.
 * @return The file paths of the jar files in the directories. This is an
 *      empty string if no files were found, and is terminated by an
 *      additional pathSeparator in all other cases.
 */
private String expandDirs(String dirPaths) {
    StringTokenizer st = new StringTokenizer(dirPaths, File.pathSeparator);
    StringBuffer buffer = new StringBuffer();
    while (st.hasMoreTokens()) {
        String d = st.nextToken();
        File dir = new File(d);
        if (!dir.isDirectory()) {
            // The absence of a listed directory may not be an error.
            if (getLogger().isWarnEnabled()) {
                getLogger().warn("Attempted to retrieve directory listing of non-directory " + dir.toString());
            }
        } else {
            File[] files = dir.listFiles(new JavaArchiveFilter());
            for (int i = 0; i < files.length; i++) {
                buffer.append(files[i]).append(File.pathSeparator);
            }
        }
    }
    return buffer.toString();
}

From source file:eu.udig.omsbox.OmsBoxPlugin.java

/**
 * Adds custom libs from the plugins.//from w ww.j  a  va2s  .  co m
 * 
 * FIXME this should hopefully get better at some point. 
 * 
 * @throws IOException 
 */
private void addCustomLibs(StringBuilder sb) throws IOException {
    // add some extra jars that are locked inside some eclipse plugins
    Bundle log4jBundle = Platform.getBundle("org.apache.log4j");
    String log4jFolderPath = getPath(log4jBundle, "/");
    if (log4jFolderPath != null) {
        sb.append(File.pathSeparator);
        addPath(log4jFolderPath + File.separator + "*", sb);
    }
    Bundle itextBundle = Platform.getBundle("com.lowagie.text");

    String itextPath = getPath(itextBundle, "/");
    if (itextPath != null) {
        itextPath = itextPath.replaceAll("!", "");
        sb.append(File.pathSeparator);
        addPath(itextPath, sb);
    }

    Location installLocation = Platform.getInstallLocation();
    File installFolder = DataUtilities.urlToFile(installLocation.getURL());
    if (installFolder != null && installFolder.exists()) {
        File pluginsFolder = new File(installFolder, "plugins");
        if (pluginsFolder.exists()) {

            File[] files = pluginsFolder.listFiles(new FilenameFilter() {
                public boolean accept(File dir, String name) {
                    boolean isCommonsLog = name.startsWith("org.apache.commons.logging_")
                            && name.endsWith(".jar");
                    return isCommonsLog;
                }
            });
            if (files.length > 1) {
                sb.append(File.pathSeparator);
                addPath(files[0].getAbsolutePath(), sb);
            }
            files = pluginsFolder.listFiles(new FilenameFilter() {
                public boolean accept(File dir, String name) {
                    boolean isJunit = name.startsWith("junit") && name.endsWith(".jar");
                    return isJunit;
                }
            });
            if (files.length > 1) {
                sb.append(File.pathSeparator);
                addPath(files[0].getAbsolutePath(), sb);
            }
        }
    }
}

From source file:org.opennms.smoketest.OpenNMSSeleniumTestCase.java

private File findPhantomJS() {
    final String os = System.getProperty("os.name").toLowerCase();
    final String extension = (os.indexOf("win") >= 0) ? ".exe" : "";

    final String path = System.getenv("PATH");
    if (path == null) {
        LOG.debug("findPhantomJS(): Unable to get PATH.");
        final File phantomFile = new File("/usr/local/bin/phantomjs" + extension);
        LOG.debug("findPhantomJS(): trying {}", phantomFile);
        if (phantomFile.exists() && phantomFile.canExecute()) {
            return phantomFile;
        }/* ww w . ja va 2 s  . c om*/
    } else {
        final List<String> paths = new ArrayList<String>(Arrays.asList(path.split(File.pathSeparator)));
        paths.add("/usr/local/bin");
        paths.add("/usr/local/sbin");
        LOG.debug("findPhantomJS(): paths = {}", paths);
        for (final String directory : paths) {
            final File phantomFile = new File(directory + File.separator + "phantomjs" + extension);
            LOG.debug("findPhantomJS(): trying {}", phantomFile);
            if (phantomFile.exists() && phantomFile.canExecute()) {
                return phantomFile;
            }
        }
    }
    return null;
}

From source file:org.cloudifysource.usm.launcher.DefaultProcessLauncher.java

private void initGroovyCommandLine(final File workingDir) throws FileNotFoundException, USMException {
    if (this.groovyCommandLinePrefixParams != null) {
        return;/*w  ww .ja  v a  2s .com*/
    }
    final String home = Environment.getHomeDirectory();

    final File homeDir = new File(home);
    final String groovyPath = createGroovyPath(homeDir);
    final StringBuilder sb = new StringBuilder();

    final List<File> jars = getJarFilesForGroovyClasspath(homeDir, workingDir);

    if (jars != null) {

        for (final File jar : jars) {
            sb.append(jar.getAbsolutePath()).append(File.pathSeparator);
        }

    }

    final ArrayList<String> groovyCommandParams = new ArrayList<String>();
    groovyCommandParams.add(groovyPath);

    // pass values as system props to the jvm, as required by XAP
    final List<String> envVarsList = new ArrayList<String>();
    envVarsList.add("LOOKUP_LOCATORS_PROP");
    envVarsList.add("LOOKUP_GROUPS_PROP");
    envVarsList.add("RMI_OPTIONS");

    // The GS logging configuration uses a custom JDK logger
    // JDK logging expects loggers to be available in the system classloader, but groovy loads
    // classpath entries into the GroovyClassLoader whoe parent is the System class loader.
    // See more details at CLOUDIFY-1694
    // envVarsList.add("GS_LOGGING_CONFIG_FILE_PROP");

    groovyCommandParams.addAll(convertEnvVarsToSysPropsList(envVarsList));

    if (ServiceUtils.isWindows()) {
        modifyWindowsCommandLine(groovyCommandParams, workingDir);
    }

    String classPathEnv = System.getenv("CLASSPATH");
    if (classPathEnv == null) {
        classPathEnv = "";
    }
    classPathEnv = classPathEnv + File.pathSeparator + sb.toString();

    // We use the classpath env variable, as the full classpath may be too
    // long for the command line
    // limit imposed by the operating system.
    logger.info("Setting ClassPath environment variable for child processes to: " + classPathEnv);
    this.groovyEnvironmentClassPath = classPathEnv;
    // if ((jars != null) && (jars.size() > 0)) {
    // groovyCommandParams.add("-cp");
    // groovyCommandParams.add(sb.toString());
    // }

    logger.info("Setting groovyCommandParams to: " + groovyCommandParams);
    this.groovyCommandLinePrefixParams = groovyCommandParams;
}