Example usage for java.lang StringBuilder deleteCharAt

List of usage examples for java.lang StringBuilder deleteCharAt

Introduction

In this page you can find the example usage for java.lang StringBuilder deleteCharAt.

Prototype

@Override
public StringBuilder deleteCharAt(int index) 

Source Link

Usage

From source file:com.vinaysshenoy.easyoauth.factory.OAuthFactory.java

public InputStream executeRequestForInputStream(HttpRequestTypes requestType, String requestUrl, Bundle params)
        throws OAuthFactoryException, OAuthMessageSignerException, OAuthExpectationFailedException,
        OAuthCommunicationException, IllegalStateException, IOException {

    if (accessToken == null) {

        throw new OAuthFactoryException(OAuthFactoryException.OAuthExceptionMessages.OAUTH_NOT_AUTHORIZED);
    }//from  w  w  w  . j  a v  a2  s  . c  o m

    StringBuilder requestParamsBuilder;

    switch (requestType) {

    case GET:

        HttpGet get = null;
        requestParamsBuilder = new StringBuilder('?');
        if (params != null && params.size() > 0) {

            Set<String> keySet = params.keySet();
            Iterator<String> keyIterator = keySet.iterator();
            String curKey;

            while (keyIterator.hasNext()) {

                curKey = keyIterator.next();

                requestParamsBuilder.append(curKey).append('=').append(params.get(curKey));

                requestParamsBuilder.append('&');

            }
        }

        switch (oAuthConfig.oAuthType) {

        case OAUTH_1_0_A:
            if (requestParamsBuilder.lastIndexOf("&") != -1)
                requestParamsBuilder.deleteCharAt(requestParamsBuilder.length() - 1);
            get = new HttpGet(requestUrl + requestParamsBuilder.toString());
            signHttpRequest(get);
            break;

        case OAUTH_2_0:

            requestParamsBuilder.append(oAuthConfig.oAuthToken).append('=').append(accessToken.getToken());

            get = new HttpGet(requestUrl + requestParamsBuilder.toString());
            break;
        }
        return httpManager.executeHttpRequestForStreamResponse(get);

    case POST:

        HttpPost post = null;

        List<NameValuePair> postParams = null;

        if (params != null && params.size() > 0) {

            Set<String> keySet = params.keySet();
            Iterator<String> keyIterator = keySet.iterator();
            String curKey;
            postParams = new ArrayList<NameValuePair>(params.size());

            while (keyIterator.hasNext()) {

                curKey = keyIterator.next();
                postParams.add(new BasicNameValuePair(curKey, params.getString(curKey)));
            }
        }

        switch (oAuthConfig.oAuthType) {

        case OAUTH_1_0_A:

            post = new HttpPost(requestUrl);

            signHttpRequest(post);
            break;

        case OAUTH_2_0:

            requestParamsBuilder = new StringBuilder('?');
            requestParamsBuilder.append(oAuthConfig.oAuthToken).append('=').append(accessToken.getToken());

            post = new HttpPost(requestUrl + requestParamsBuilder.toString());

            break;
        }

        post.setEntity(new UrlEncodedFormEntity(postParams));
        return httpManager.executeHttpRequestForStreamResponse(post);

    default:
        throw new OAuthFactoryException(OAuthFactoryException.OAuthExceptionMessages.UNSUPPORTED_METHOD);
    }

}

From source file:com.vinaysshenoy.easyoauth.factory.OAuthFactory.java

public String executeRequestForString(HttpRequestTypes requestType, String requestUrl, Bundle params)
        throws OAuthFactoryException, OAuthMessageSignerException, OAuthExpectationFailedException,
        OAuthCommunicationException, IllegalStateException, IOException {

    if (accessToken == null) {

        throw new OAuthFactoryException(OAuthFactoryException.OAuthExceptionMessages.OAUTH_NOT_AUTHORIZED);
    }//from w w  w.jav  a  2s  . c  o m

    StringBuilder requestParamsBuilder;

    switch (requestType) {

    case GET:

        HttpGet get = null;
        requestParamsBuilder = new StringBuilder('?');
        if (params != null && params.size() > 0) {

            Set<String> keySet = params.keySet();
            Iterator<String> keyIterator = keySet.iterator();
            String curKey;

            while (keyIterator.hasNext()) {

                curKey = keyIterator.next();

                requestParamsBuilder.append(curKey).append('=').append(params.get(curKey));

                requestParamsBuilder.append('&');

            }
        }

        switch (oAuthConfig.oAuthType) {

        case OAUTH_1_0_A:
            if (requestParamsBuilder.lastIndexOf("&") != -1)
                requestParamsBuilder.deleteCharAt(requestParamsBuilder.length() - 1);
            get = new HttpGet(requestUrl + requestParamsBuilder.toString());
            signHttpRequest(get);
            break;

        case OAUTH_2_0:

            requestParamsBuilder.append(oAuthConfig.oAuthToken).append('=').append(accessToken.getToken());

            get = new HttpGet(requestUrl + requestParamsBuilder.toString());
            break;
        }
        return httpManager.executeHttpRequestForStringResponse(get);

    case POST:

        HttpPost post = null;

        List<NameValuePair> postParams = null;

        if (params != null && params.size() > 0) {

            Set<String> keySet = params.keySet();
            Iterator<String> keyIterator = keySet.iterator();
            String curKey;
            postParams = new ArrayList<NameValuePair>(params.size());

            while (keyIterator.hasNext()) {

                curKey = keyIterator.next();
                postParams.add(new BasicNameValuePair(curKey, params.getString(curKey)));
            }
        }

        switch (oAuthConfig.oAuthType) {

        case OAUTH_1_0_A:

            post = new HttpPost(requestUrl);

            signHttpRequest(post);
            break;

        case OAUTH_2_0:

            requestParamsBuilder = new StringBuilder('?');
            requestParamsBuilder.append(oAuthConfig.oAuthToken).append('=').append(accessToken.getToken());

            post = new HttpPost(requestUrl + requestParamsBuilder.toString());

            break;
        }

        post.setEntity(new UrlEncodedFormEntity(postParams));
        return httpManager.executeHttpRequestForStringResponse(post);

    default:
        throw new OAuthFactoryException(OAuthFactoryException.OAuthExceptionMessages.UNSUPPORTED_METHOD);
    }

}

From source file:com.prowidesoftware.swift.io.parser.SwiftParser.java

/**
 * read on the reader until a block start character or EOF is reached.
 * @throws IOException if thrown during read
 *///  w w w  . j  a  v a 2s.c  om
protected String findBlockStart() throws IOException {
    final StringBuilder textUntilBlock = new StringBuilder();
    int c;
    do {
        c = getChar();
        textUntilBlock.append((char) c);
    } while (c != -1 && !isBlockStart((char) c));
    if (textUntilBlock.length() > 0) {
        // el ultimo char es EOF of {, sea cual sea lo borramos
        textUntilBlock.deleteCharAt(textUntilBlock.length() - 1);
    }

    // debug code
    //if (textUntilBlock.length()>0) log.fine("textUntilBlock: "+textUntilBlock.toString());

    return textUntilBlock.length() > 0 ? textUntilBlock.toString() : StringUtils.EMPTY;
}

From source file:io.hops.hopsworks.common.dao.jupyter.config.JupyterConfigFilesGenerator.java

private boolean createConfigFiles(String confDirPath, String hdfsUser, String realName, Project project,
        String nameNodeEndpoint, Integer port, JupyterSettings js) throws IOException, ServiceException {
    File jupyter_config_file = new File(confDirPath + JUPYTER_NOTEBOOK_CONFIG);
    File jupyter_kernel_file = new File(confDirPath + JUPYTER_CUSTOM_KERNEL);
    File sparkmagic_config_file = new File(confDirPath + SPARKMAGIC_CONFIG);
    File custom_js = new File(confDirPath + JUPYTER_CUSTOM_JS);
    boolean createdJupyter = false;
    boolean createdSparkmagic = false;
    boolean createdCustomJs = false;

    if (!jupyter_config_file.exists()) {

        String[] nn = nameNodeEndpoint.split(":");
        String nameNodeIp = nn[0];
        String nameNodePort = nn[1];

        String pythonKernel = "";

        if (settings.isPythonKernelEnabled() && !project.getPythonVersion().contains("X")) {
            pythonKernel = ", 'python-" + hdfsUser + "'";
            StringBuilder jupyter_kernel_config = ConfigFileGenerator.instantiateFromTemplate(
                    ConfigFileGenerator.JUPYTER_CUSTOM_KERNEL, "hdfs_user", hdfsUser, "hadoop_home",
                    settings.getHadoopSymbolicLinkDir(), "hadoop_version", settings.getHadoopVersion(),
                    "anaconda_home", settings.getAnacondaProjectDir(project.getName()), "secret_dir",
                    settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret());
            ConfigFileGenerator.createConfigFile(jupyter_kernel_file, jupyter_kernel_config.toString());
        }/*w w  w .  j a va 2  s. c  o  m*/

        StringBuilder jupyter_notebook_config = ConfigFileGenerator.instantiateFromTemplate(
                ConfigFileGenerator.JUPYTER_NOTEBOOK_CONFIG_TEMPLATE, "project", project.getName(),
                "namenode_ip", nameNodeIp, "namenode_port", nameNodePort, "hopsworks_ip",
                settings.getHopsworksIp(), "base_dir", js.getBaseDir(), "hdfs_user", hdfsUser, "port",
                port.toString(), "python-kernel", pythonKernel, "umask", js.getUmask(), "hadoop_home",
                this.settings.getHadoopSymbolicLinkDir(), "hdfs_home", this.settings.getHadoopSymbolicLinkDir(),
                "secret_dir", this.settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret());
        createdJupyter = ConfigFileGenerator.createConfigFile(jupyter_config_file,
                jupyter_notebook_config.toString());
    }
    if (!sparkmagic_config_file.exists()) {
        StringBuilder sparkFiles = new StringBuilder();
        sparkFiles
                //Log4j.properties
                .append(settings.getSparkLog4JPath()).append(",")
                // Glassfish domain truststore
                .append(settings.getGlassfishTrustStoreHdfs()).append("#").append(Settings.DOMAIN_CA_TRUSTSTORE)
                .append(",")
                // Add HopsUtil
                .append(settings.getHopsUtilHdfsPath());

        if (!js.getFiles().equals("")) {
            //Split the comma-separated string and append it to sparkFiles
            for (String file : js.getFiles().split(",")) {
                sparkFiles.append(",").append(file);
            }
        }

        String extraClassPath = settings.getHopsLeaderElectionJarPath() + File.pathSeparator
                + settings.getHopsUtilFilename();

        if (!js.getJars().equals("")) {
            //Split the comma-separated string and append the names to the driver and executor classpath
            for (String jar : js.getJars().split(",")) {
                sparkFiles.append(",").append(jar);
                //Get jar name
                String name = jar.substring(jar.lastIndexOf("/") + 1);
                extraClassPath += File.pathSeparator + name;
            }
        }

        // If Hops RPC TLS is enabled, password file would be injected by the
        // NodeManagers. We don't need to add it as LocalResource
        if (!settings.getHopsRpcTls()) {
            sparkFiles
                    // Keystore
                    .append(",hdfs://").append(settings.getHdfsTmpCertDir()).append(File.separator)
                    .append(hdfsUser).append(File.separator).append(hdfsUser).append("__kstore.jks#")
                    .append(Settings.K_CERTIFICATE).append(",")
                    // TrustStore
                    .append("hdfs://").append(settings.getHdfsTmpCertDir()).append(File.separator)
                    .append(hdfsUser).append(File.separator).append(hdfsUser).append("__tstore.jks#")
                    .append(Settings.T_CERTIFICATE).append(",")
                    // File with crypto material password
                    .append("hdfs://").append(settings.getHdfsTmpCertDir()).append(File.separator)
                    .append(hdfsUser).append(File.separator).append(hdfsUser).append("__cert.key#")
                    .append(Settings.CRYPTO_MATERIAL_PASSWORD);
        }

        //Prepare pyfiles
        StringBuilder pyFilesBuilder = new StringBuilder();
        if (!Strings.isNullOrEmpty(js.getPyFiles())) {
            pyFilesBuilder = new StringBuilder();
            for (String file : js.getPyFiles().split(",")) {
                file += "#" + file.substring(file.lastIndexOf("/") + 1);
                pyFilesBuilder.append(file).append(",");
            }
            //Remove last comma character
            pyFilesBuilder.deleteCharAt(pyFilesBuilder.length() - 1);
        }

        String sparkProps = js.getSparkParams();

        // Spark properties user has defined in the jupyter dashboard
        Map<String, String> userSparkProperties = HopsUtils.validateUserProperties(sparkProps,
                settings.getSparkDir());

        LOGGER.info("SparkProps are: " + System.lineSeparator() + sparkProps);

        boolean isExperiment = js.getMode().compareToIgnoreCase("experiment") == 0;
        boolean isParallelExperiment = js.getMode().compareToIgnoreCase("parallelexperiments") == 0;
        boolean isDistributedTraining = js.getMode().compareToIgnoreCase("distributedtraining") == 0;
        boolean isMirroredStrategy = js.getDistributionStrategy().compareToIgnoreCase("mirroredstrategy") == 0
                && isDistributedTraining;
        boolean isParameterServerStrategy = js.getDistributionStrategy()
                .compareToIgnoreCase("parameterserverstrategy") == 0 && isDistributedTraining;
        boolean isCollectiveAllReduceStrategy = js.getDistributionStrategy()
                .compareToIgnoreCase("collectiveallreducestrategy") == 0 && isDistributedTraining;
        boolean isSparkDynamic = js.getMode().compareToIgnoreCase("sparkdynamic") == 0;
        String extraJavaOptions = "-D" + Settings.LOGSTASH_JOB_INFO + "=" + project.getName().toLowerCase()
                + ",jupyter,notebook,?" + " -D" + Settings.HOPSWORKS_JOBTYPE_PROPERTY + "=" + JobType.SPARK
                + " -D" + Settings.KAFKA_BROKERADDR_PROPERTY + "=" + settings.getKafkaBrokersStr() + " -D"
                + Settings.HOPSWORKS_REST_ENDPOINT_PROPERTY + "=" + settings.getRestEndpoint() + " -D"
                + Settings.HOPSWORKS_ELASTIC_ENDPOINT_PROPERTY + "=" + settings.getElasticRESTEndpoint() + " -D"
                + Settings.HOPSWORKS_PROJECTID_PROPERTY + "=" + project.getId() + " -D"
                + Settings.HOPSWORKS_PROJECTNAME_PROPERTY + "=" + project.getName()
                + " -Dlog4j.configuration=./log4j.properties";

        // Get information about which version of TensorFlow the user is running
        TfLibMapping tfLibMapping = tfLibMappingFacade.findTfMappingForProject(project);
        if (tfLibMapping == null) {
            // We are not supporting this version.
            throw new ServiceException(RESTCodes.ServiceErrorCode.TENSORFLOW_VERSION_NOT_SUPPORTED, Level.INFO);
        }
        String tfLdLibraryPath = tfLibMappingUtil.buildTfLdLibraryPath(tfLibMapping);

        // Map of default/system Spark(Magic) properties <Property_Name, ConfigProperty>
        // Property_Name should be either the SparkMagic property name or Spark property name
        // The replacement pattern is defined in ConfigProperty
        Map<String, ConfigProperty> sparkMagicParams = new HashMap<>();
        sparkMagicParams.put("livy_ip", new ConfigProperty("livy_ip", HopsUtils.IGNORE, settings.getLivyIp()));
        sparkMagicParams.put("jupyter_home", new ConfigProperty("jupyter_home", HopsUtils.IGNORE, confDirPath));
        sparkMagicParams.put("driverCores",
                new ConfigProperty("driver_cores", HopsUtils.IGNORE,
                        (isExperiment || isDistributedTraining || isParallelExperiment) ? "1"
                                : Integer.toString(js.getAppmasterCores())));
        sparkMagicParams.put("driverMemory", new ConfigProperty("driver_memory", HopsUtils.IGNORE,
                Integer.toString(js.getAppmasterMemory()) + "m"));
        sparkMagicParams.put("numExecutors",
                new ConfigProperty("num_executors", HopsUtils.IGNORE, (isExperiment || isMirroredStrategy) ? "1"
                        : (isParameterServerStrategy) ? Integer.toString(js.getNumExecutors() + js.getNumTfPs())
                                : (isSparkDynamic) ? Integer.toString(js.getDynamicMinExecutors())
                                        : Integer.toString(js.getNumExecutors())));
        sparkMagicParams.put("executorCores",
                new ConfigProperty("executor_cores", HopsUtils.IGNORE,
                        (isExperiment || isDistributedTraining || isParallelExperiment) ? "1"
                                : Integer.toString(js.getNumExecutorCores())));
        sparkMagicParams.put("executorMemory", new ConfigProperty("executor_memory", HopsUtils.IGNORE,
                Integer.toString(js.getExecutorMemory()) + "m"));
        sparkMagicParams.put("proxyUser", new ConfigProperty("hdfs_user", HopsUtils.IGNORE, hdfsUser));
        sparkMagicParams.put("name", new ConfigProperty("spark_magic_name", HopsUtils.IGNORE,
                "remotesparkmagics-jupyter-" + js.getMode()));
        sparkMagicParams.put("queue", new ConfigProperty("yarn_queue", HopsUtils.IGNORE, "default"));

        // Export versions of software

        sparkMagicParams.put("spark.yarn.appMasterEnv.LIVY_VERSION",
                new ConfigProperty("livy_version", HopsUtils.IGNORE, this.settings.getLivyVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.SPARK_VERSION",
                new ConfigProperty("spark_version", HopsUtils.IGNORE, this.settings.getSparkVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.KAFKA_VERSION",
                new ConfigProperty("kafka_version", HopsUtils.IGNORE, this.settings.getKafkaVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.TENSORFLOW_VERSION",
                new ConfigProperty("tensorflow_version", HopsUtils.IGNORE, tfLibMapping.getTfVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.CUDA_VERSION",
                new ConfigProperty("cuda_version", HopsUtils.IGNORE, tfLibMapping.getCudaVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HOPSWORKS_VERSION",
                new ConfigProperty("hopsworks_version", HopsUtils.IGNORE, this.settings.getHopsworksVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_VERSION",
                new ConfigProperty("hadoop_version", HopsUtils.IGNORE, this.settings.getHadoopVersion()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.KAFKA_BROKERS",
                new ConfigProperty("kafka_brokers", HopsUtils.IGNORE, this.settings.getKafkaBrokersStr()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.ELASTIC_ENDPOINT", new ConfigProperty("elastic_endpoint",
                HopsUtils.IGNORE, this.settings.getElasticRESTEndpoint()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HOPSWORKS_USER",
                new ConfigProperty("hopsworks_user", HopsUtils.IGNORE, realName));

        // Spark properties
        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_PATH,
                new ConfigProperty("spark_executorEnv_PATH", HopsUtils.APPEND_PATH,
                        this.settings.getAnacondaProjectDir(project.getName())
                                + "/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.PYSPARK_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.PYSPARK3_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put(Settings.SPARK_YARN_APPMASTERENV_LD_LIBRARY_PATH,
                new ConfigProperty("spark_yarn_appMaster_LD_LIBRARY_PATH", HopsUtils.APPEND_PATH,
                        this.settings.getJavaHome() + "/jre/lib/amd64/server:" + tfLdLibraryPath
                                + this.settings.getHadoopSymbolicLinkDir() + "/lib/native"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        sparkMagicParams.put(Settings.SPARK_YARN_APPMASTERENV_LIBHDFS_OPTS,
                new ConfigProperty("spark_yarn_appMasterEnv_LIBHDFS_OPTS", HopsUtils.APPEND_SPACE,
                        "-Xmx96m -Dlog4j.configuration=" + this.settings.getHadoopSymbolicLinkDir()
                                + "/etc/hadoop/log4j.properties -Dhadoop.root.logger=ERROR,RFA"));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_HDFS_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HADOOP_USER_NAME",
                new ConfigProperty("hdfs_user", HopsUtils.IGNORE, hdfsUser));

        sparkMagicParams.put("spark.yarn.appMasterEnv.REST_ENDPOINT",
                new ConfigProperty("rest_endpoint", HopsUtils.IGNORE, settings.getRestEndpoint()));

        sparkMagicParams.put("spark.yarn.appMasterEnv.HDFS_BASE_DIR",
                new ConfigProperty("spark_yarn_appMasterEnv_HDFS_BASE_DIR", HopsUtils.IGNORE,
                        "hdfs://Projects/" + project.getName() + js.getBaseDir()));

        sparkMagicParams.put(Settings.SPARK_DRIVER_STAGINGDIR_ENV, new ConfigProperty("spark_yarn_stagingDir",
                HopsUtils.IGNORE, "hdfs:///Projects/" + project.getName() + "/Resources"));

        sparkMagicParams.put("spark.yarn.dist.files",
                new ConfigProperty("spark_yarn_dist_files", HopsUtils.IGNORE, sparkFiles.toString()));

        sparkMagicParams.put("spark.yarn.dist.archives",
                new ConfigProperty("spark_yarn_dist_archives", HopsUtils.IGNORE, js.getArchives()));

        sparkMagicParams.put("spark.yarn.dist.pyFiles",
                new ConfigProperty("spark_yarn_dist_pyFiles", HopsUtils.IGNORE, pyFilesBuilder.toString()));

        sparkMagicParams.put(Settings.SPARK_DRIVER_EXTRALIBRARYPATH,
                new ConfigProperty("spark_driver_extraLibraryPath", HopsUtils.APPEND_PATH, tfLdLibraryPath));

        sparkMagicParams.put(Settings.SPARK_DRIVER_EXTRAJAVAOPTIONS,
                new ConfigProperty("spark_driver_extraJavaOptions", HopsUtils.APPEND_SPACE, extraJavaOptions));

        sparkMagicParams.put(Settings.SPARK_DRIVER_EXTRACLASSPATH,
                new ConfigProperty("spark_driver_extraClassPath", HopsUtils.APPEND_PATH, extraClassPath));

        sparkMagicParams.put(Settings.SPARK_EXECUTOR_EXTRACLASSPATH,
                new ConfigProperty("spark_executor_extraClassPath", HopsUtils.APPEND_PATH, extraClassPath));

        sparkMagicParams.put("spark.executorEnv.REST_ENDPOINT",
                new ConfigProperty("rest_endpoint", HopsUtils.IGNORE, settings.getRestEndpoint()));

        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_HADOOP_USER_NAME,
                new ConfigProperty("hdfs_user", HopsUtils.IGNORE, hdfsUser));

        sparkMagicParams.put("spark.executorEnv.HADOOP_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_LIBHDFS_OPTS,
                new ConfigProperty("spark_executorEnv_LIBHDFS_OPTS", HopsUtils.APPEND_SPACE,
                        "-Xmx96m -Dlog4j.configuration=" + this.settings.getHadoopSymbolicLinkDir()
                                + "/etc/hadoop/log4j.properties -Dhadoop.root.logger=ERROR,RFA"));

        sparkMagicParams.put("spark.executorEnv.PYSPARK_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.executorEnv.PYSPARK3_PYTHON", new ConfigProperty("pyspark_bin",
                HopsUtils.IGNORE, this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put(Settings.SPARK_EXECUTORENV_LD_LIBRARY_PATH,
                new ConfigProperty("spark_executorEnv_LD_LIBRARY_PATH", HopsUtils.APPEND_PATH,
                        this.settings.getJavaHome() + "/jre/lib/amd64/server:" + tfLdLibraryPath
                                + this.settings.getHadoopSymbolicLinkDir() + "/lib/native"));

        sparkMagicParams.put("spark.executorEnv.HADOOP_HDFS_HOME",
                new ConfigProperty("hadoop_home", HopsUtils.IGNORE, this.settings.getHadoopSymbolicLinkDir()));

        // Export versions of software

        sparkMagicParams.put("spark.executorEnv.LIVY_VERSION",
                new ConfigProperty("livy_version", HopsUtils.IGNORE, this.settings.getLivyVersion()));

        sparkMagicParams.put("spark.executorEnv.SPARK_VERSION",
                new ConfigProperty("spark_version", HopsUtils.IGNORE, this.settings.getSparkVersion()));

        sparkMagicParams.put("spark.executorEnv.KAFKA_VERSION",
                new ConfigProperty("kafka_version", HopsUtils.IGNORE, this.settings.getKafkaVersion()));

        sparkMagicParams.put("spark.executorEnv.TENSORFLOW_VERSION",
                new ConfigProperty("tensorflow_version", HopsUtils.IGNORE, tfLibMapping.getTfVersion()));

        sparkMagicParams.put("spark.executorEnv.CUDA_VERSION",
                new ConfigProperty("cuda_version", HopsUtils.IGNORE, tfLibMapping.getCudaVersion()));

        sparkMagicParams.put("spark.executorEnv.HOPSWORKS_VERSION",
                new ConfigProperty("hopsworks_version", HopsUtils.IGNORE, this.settings.getHopsworksVersion()));

        sparkMagicParams.put("spark.executorEnv.HADOOP_VERSION",
                new ConfigProperty("hadoop_version", HopsUtils.IGNORE, this.settings.getHadoopVersion()));

        sparkMagicParams.put("spark.executorEnv.KAFKA_BROKERS",
                new ConfigProperty("kafka_brokers", HopsUtils.IGNORE, this.settings.getKafkaBrokersStr()));

        sparkMagicParams.put("spark.executorEnv.ELASTIC_ENDPOINT", new ConfigProperty("elastic_endpoint",
                HopsUtils.IGNORE, this.settings.getElasticRESTEndpoint()));

        sparkMagicParams.put("spark.executorEnv.HOPSWORKS_USER",
                new ConfigProperty("hopsworks_user", HopsUtils.IGNORE, realName));

        sparkMagicParams.put(Settings.SPARK_EXECUTOR_EXTRA_JAVA_OPTS, new ConfigProperty(
                "spark_executor_extraJavaOptions", HopsUtils.APPEND_SPACE, extraJavaOptions));

        sparkMagicParams.put("spark.executorEnv.HDFS_BASE_DIR",
                new ConfigProperty("spark_executorEnv_HDFS_BASE_DIR", HopsUtils.IGNORE,
                        "hdfs://Projects/" + project.getName() + js.getBaseDir()));

        sparkMagicParams.put("spark.pyspark.python", new ConfigProperty("pyspark_bin", HopsUtils.IGNORE,
                this.settings.getAnacondaProjectDir(project.getName()) + "/bin/python"));

        sparkMagicParams.put("spark.shuffle.service.enabled", new ConfigProperty("", HopsUtils.IGNORE, "true"));

        sparkMagicParams.put("spark.submit.deployMode", new ConfigProperty("", HopsUtils.IGNORE, "cluster"));

        sparkMagicParams.put("spark.tensorflow.application",
                new ConfigProperty("spark_tensorflow_application", HopsUtils.IGNORE,
                        Boolean.toString(isExperiment || isParallelExperiment || isDistributedTraining)));

        sparkMagicParams.put("spark.tensorflow.num.ps", new ConfigProperty("spark_tensorflow_num_ps",
                HopsUtils.IGNORE, (isParameterServerStrategy) ? Integer.toString(js.getNumTfPs()) : "0"));

        sparkMagicParams.put("spark.executor.gpus",
                new ConfigProperty("spark_executor_gpus", HopsUtils.IGNORE,
                        (isDistributedTraining || isParallelExperiment || isExperiment)
                                ? Integer.toString(js.getNumExecutorGpus())
                                : "0"));

        sparkMagicParams.put("spark.dynamicAllocation.enabled",
                new ConfigProperty("spark_dynamicAllocation_enabled", HopsUtils.OVERWRITE, Boolean.toString(
                        isSparkDynamic || isExperiment || isParallelExperiment || isDistributedTraining)));

        sparkMagicParams.put("spark.dynamicAllocation.initialExecutors", new ConfigProperty(
                "spark_dynamicAllocation_initialExecutors", HopsUtils.OVERWRITE,
                (isExperiment || isParallelExperiment || isMirroredStrategy) ? "0"
                        : (isParameterServerStrategy) ? Integer.toString(js.getNumExecutors() + js.getNumTfPs())
                                : (isCollectiveAllReduceStrategy) ? Integer.toString(js.getNumExecutors())
                                        : Integer.toString(js.getDynamicMinExecutors())));

        sparkMagicParams.put("spark.dynamicAllocation.minExecutors",
                new ConfigProperty("spark_dynamicAllocation_minExecutors", HopsUtils.OVERWRITE,
                        (isExperiment || isParallelExperiment || isDistributedTraining) ? "0"
                                : Integer.toString(js.getDynamicMinExecutors())));

        sparkMagicParams.put("spark.dynamicAllocation.maxExecutors",
                new ConfigProperty("spark_dynamicAllocation_maxExecutors", HopsUtils.OVERWRITE,
                        (isExperiment || isMirroredStrategy) ? "1"
                                : (isParallelExperiment) ? Integer.toString(js.getNumExecutors())
                                        : (isParameterServerStrategy)
                                                ? Integer.toString(js.getNumExecutors() + js.getNumTfPs())
                                                : (isCollectiveAllReduceStrategy)
                                                        ? Integer.toString(js.getNumExecutors())
                                                        : Integer.toString(js.getDynamicMaxExecutors())));

        sparkMagicParams.put("spark.dynamicAllocation.executorIdleTimeout",
                new ConfigProperty("spark_dynamicAllocation_executorIdleTimeout", HopsUtils.OVERWRITE,
                        (isParameterServerStrategy)
                                ? Integer.toString(((js.getNumExecutors() + js.getNumTfPs()) * 15) + 60) + "s"
                                : "60s"));

        // Blacklisting behaviour for TensorFlow on Spark (e.g. Hyperparameter search) to make it robust
        // Allow many failures on a particular node before blacklisting the node
        // Blacklist executor instantly

        sparkMagicParams.put("spark.blacklist.enabled",
                new ConfigProperty("spark_blacklist_enabled", HopsUtils.OVERWRITE,
                        ((isExperiment || isParallelExperiment) && js.getFaultTolerant()) ? "true" : "false"));

        // If any task fails on an executor - kill it instantly (need fresh working directory for each task)
        sparkMagicParams.put("spark.blacklist.task.maxTaskAttemptsPerExecutor",
                new ConfigProperty("spark_max_task_attempts_per_executor", HopsUtils.OVERWRITE, "1"));

        // Blacklist node after 2 tasks fails on it
        sparkMagicParams.put("spark.blacklist.task.maxTaskAttemptsPerNode",
                new ConfigProperty("spark_max_task_attempts_per_node", HopsUtils.OVERWRITE, "2"));

        // If any task fails on an executor within a stage - blacklist it
        sparkMagicParams.put("spark.blacklist.stage.maxFailedTasksPerExecutor",
                new ConfigProperty("spark_stage_max_failed_tasks_per_executor", HopsUtils.OVERWRITE, "1"));

        // Blacklist node after 2 tasks within a stage fails on it
        sparkMagicParams.put("spark.blacklist.stage.maxFailedExecutorsPerNode",
                new ConfigProperty("spark_stage_max_failed_executors_per_node", HopsUtils.OVERWRITE, "2"));

        // If any task fails on an executor within an application - blacklist it
        sparkMagicParams.put("spark.blacklist.application.maxFailedTasksPerExecutor", new ConfigProperty(
                "spark_application_max_failed_tasks_per_executor", HopsUtils.OVERWRITE, "1"));

        // If 2 task fails on a node within an application - blacklist it
        sparkMagicParams.put("spark.blacklist.application.maxFailedExecutorsPerNode", new ConfigProperty(
                "spark_application_max_failed_executors_per_node", HopsUtils.OVERWRITE, "2"));

        sparkMagicParams.put("spark.task.maxFailures",
                new ConfigProperty("spark_task_max_failures", HopsUtils.OVERWRITE,
                        (isParallelExperiment || isExperiment) && js.getFaultTolerant() ? "3"
                                : (isParallelExperiment || isExperiment || isDistributedTraining) ? "1" : "4"));

        // Always kill the blacklisted executors (further failures could be results of local files from the failed task)
        sparkMagicParams.put("spark.blacklist.killBlacklistedExecutors",
                new ConfigProperty("spark_kill_blacklisted_executors", HopsUtils.OVERWRITE,
                        (isExperiment || isParallelExperiment) ? "true" : "false"));

        // Merge system and user defined properties
        Map<String, String> sparkParamsAfterMerge = HopsUtils.mergeHopsworksAndUserParams(sparkMagicParams,
                userSparkProperties, false);

        StringBuilder sparkmagic_sb = ConfigFileGenerator
                .instantiateFromTemplate(ConfigFileGenerator.SPARKMAGIC_CONFIG_TEMPLATE, sparkParamsAfterMerge);
        createdSparkmagic = ConfigFileGenerator.createConfigFile(sparkmagic_config_file,
                sparkmagic_sb.toString());
    }
    if (!custom_js.exists()) {

        StringBuilder custom_js_sb = ConfigFileGenerator.instantiateFromTemplate(
                ConfigFileGenerator.JUPYTER_CUSTOM_TEMPLATE, "hadoop_home",
                this.settings.getHadoopSymbolicLinkDir());
        createdCustomJs = ConfigFileGenerator.createConfigFile(custom_js, custom_js_sb.toString());
    }

    // Add this local file to 'spark: file' to copy it to hdfs and localize it.
    return createdJupyter || createdSparkmagic || createdCustomJs;
}

From source file:fr.ribesg.bukkit.api.chat.Chat.java

private static void appendItemTag(final StringBuilder builder, final ItemStack is) {
    boolean hasTag = false;
    final StringBuilder tagBuilder = new StringBuilder();

    // Enchantments
    final Map<Enchantment, Integer> enchantments = is.getEnchantments();
    if (enchantments != null && !enchantments.isEmpty()) {
        tagBuilder.append("ench:[");
        final Iterator<Entry<Enchantment, Integer>> it = enchantments.entrySet().iterator();
        while (it.hasNext()) {
            final Entry<Enchantment, Integer> entry = it.next();
            tagBuilder.append("{id:").append(entry.getKey().getId()).append(",lvl:").append(entry.getValue());
            if (it.hasNext()) {
                tagBuilder.append(',');
            }//from  w  w  w. j a v a  2  s  .  com
        }
        tagBuilder.append("],");
        hasTag = true;
    }

    // Meta
    if (is.hasItemMeta()) {
        final ItemMeta meta = is.getItemMeta();
        if (meta.hasDisplayName() || meta.hasLore() || Chat.isLeatherArmor(is)) {
            Chat.appendItemDisplay(tagBuilder, meta);
        }
        if (is.getType() == Material.POTION) {
            Chat.appendItemPotion(tagBuilder, (PotionMeta) meta);
        }
        if (is.getType() == Material.WRITTEN_BOOK) {
            Chat.appendItemBook(tagBuilder, (BookMeta) meta);
        }
        if (is.getType() == Material.SKULL_ITEM) {
            Chat.appendItemSkull(tagBuilder, (SkullMeta) meta);
        }
        if (is.getType() == Material.FIREWORK) { // Firework Rocket
            Chat.appendItemFirework(tagBuilder, (FireworkMeta) meta);
        }
        if (is.getType() == Material.FIREWORK_CHARGE) { // Firework Star
            Chat.appendItemFireworkEffect(tagBuilder, (FireworkEffectMeta) meta);
        }
    }

    if (hasTag && tagBuilder.charAt(builder.length() - 1) == ',') {
        tagBuilder.deleteCharAt(builder.length() - 1);
    }

    // Append to main builder
    if (hasTag) {
        builder.append(',').append("tag:{").append(tagBuilder).append('}');
    }
}

From source file:com.dell.asm.asmcore.asmmanager.db.DeviceGroupDAO.java

public List<BriefServerInfo> getAccessibleServers(final long userId, final List<String> serverRefIds) {
    try {//  ww w  . j a  va 2 s .c  o m
        return _dao.doWithSession(new BaseDAO.CallableWithSession<List<BriefServerInfo>>() {
            private StringBuilder appendServerRefIds(StringBuilder sql, Map<String, Object> keyValues) {
                if (serverRefIds != null && serverRefIds.size() > 0) {
                    int i = 0;
                    sql.append(" AND i.ref_id IN (");
                    for (String serverRefId : serverRefIds) {
                        String name = "ref" + i++;
                        sql.append(':').append(name).append(", ");
                        keyValues.put(name, serverRefId);
                    }
                    sql.deleteCharAt(sql.length() - 1);
                    sql.deleteCharAt(sql.length() - 1);
                    sql.append(")");
                }
                return sql;
            }

            @Override
            public List<BriefServerInfo> run(Session session) {
                Map<String, Object> keyValues = new HashMap<>();

                StringBuilder sql = new StringBuilder(GLOBAL_POOL_QUERY);
                if (userId == DBInit.SYSTEM_USER_ID) {
                    appendServerRefIds(sql, keyValues).append(" UNION ")
                            .append(SPECIFIC_POOL_SYSTEM_USER_QUERY);
                } else {
                    appendServerRefIds(sql, keyValues).append(" UNION ").append(SPECIFIC_POOL_QUERY);
                }
                appendServerRefIds(sql, keyValues).append(POOL_GROUP_BY_CLAUSE).append(POOL_ORDER_BY_CLAUSE);
                SQLQuery query = session.createSQLQuery(sql.toString());

                if (userId != DBInit.SYSTEM_USER_ID)
                    query.setParameter("userId", userId);

                for (Map.Entry<String, Object> entry : keyValues.entrySet()) {
                    query.setParameter(entry.getKey(), entry.getValue());
                }

                return buildBriefServerInfoList(query.list());
            }

            @Override
            public List<BriefServerInfo> failed(SQLException e) throws SQLException {
                logger.error("Failed to look up servers " + serverRefIds + " for user " + userId, e);
                throw e;
            }
        });
    } catch (SQLException e) {
        throw new AsmManagerRuntimeException(e);
    }
}

From source file:com.safasoft.treeweb.controller.DataControllerKeypro.java

/**
 * Generate table content of column list
 * @param tableName/*from w  w  w. j av a 2 s  . com*/
 * @param whereClause
 * @param pageNo
 * @return table content object
 */
@RequestMapping(value = "/content", method = RequestMethod.POST)
public @ResponseBody TableContentKeypro getListColumn(@RequestParam("tableName") String tableName,
        @RequestParam("whereClause") String whereClause, @RequestParam("pageNo") Integer pageNo) {
    logger.debug("Received request to get table content");
    //generate column select statement, order by statement (limited to 5 columns only)
    //tranform each column type into character type before execution
    StringBuilder sbColumnSelect = new StringBuilder();
    int orderByLimit = 5;
    StringBuilder sbOrderBy = new StringBuilder();
    SupportService suppServ = new SessionUtil<SupportService>().getAppContext("supportService");
    List<ColumnProp> listColumn = suppServ.getListColumn(tableName);
    for (int idx = 0; idx < orderByLimit; idx++)
        sbOrderBy.append(listColumn.get(idx).getColumnName()).append(COLUMN_DELIMITER);
    for (int idx = 0; idx < listColumn.size(); idx++) {
        String columnName;
        if (listColumn.get(idx).getDataType().equals("DATE"))
            columnName = "TO_CHAR(" + listColumn.get(idx).getColumnName() + ",'DD-MON-YYYY')";
        else if (listColumn.get(idx).getDataType().equals("NUMBER"))
            columnName = "TO_CHAR(" + listColumn.get(idx).getColumnName() + ")";
        else
            columnName = listColumn.get(idx).getColumnName();
        sbColumnSelect.append(columnName).append(" col").append(idx + 1).append(COLUMN_DELIMITER);
    }
    for (int idx = listColumn.size(); idx < MAX_COLUMN_DATA; idx++)
        sbColumnSelect.append("NULL col").append(idx + 1).append(COLUMN_DELIMITER);
    sbColumnSelect.deleteCharAt(sbColumnSelect.lastIndexOf(COLUMN_DELIMITER));
    sbOrderBy.deleteCharAt(sbOrderBy.lastIndexOf(COLUMN_DELIMITER));
    //set table content
    TableContentKeypro tc = new TableContentKeypro();
    tc.setColumns(listColumn);
    tc.setData(suppServ.getListTableValue(tableName, sbColumnSelect.toString(), whereClause,
            sbOrderBy.toString(), pageNo));
    tc.setRecordCount(suppServ.getRecordCount(tableName, whereClause));
    tc.setOrderBy(sbOrderBy.toString());
    return tc;
}

From source file:gsn.storage.StorageManager.java

/**
 * Creates a sql statement which can be used for inserting the specified
 * stream element in to the specified table.
 *
 * @param tableName The table which the generated sql will pointing to.
 * @param fields    The stream element for which the sql statement is generated.
 * @return A sql statement which can be used for inserting the provided
 *         stream element into the specified table.
 *///from  ww  w  .  jav a2 s. co  m
public StringBuilder getStatementInsert(CharSequence tableName, DataField fields[]) {
    StringBuilder toReturn = new StringBuilder("insert into ").append(tableName).append(" ( ");
    int numberOfQuestionMarks = 1; //Timed is always there.
    for (DataField dataField : fields) {
        if (dataField.getName().equalsIgnoreCase("timed"))
            continue;
        numberOfQuestionMarks++;
        toReturn.append(dataField.getName()).append(" ,");
    }
    toReturn.append(" timed ").append(" ) values (");
    for (int i = 1; i <= numberOfQuestionMarks; i++)
        toReturn.append("?,");
    toReturn.deleteCharAt(toReturn.length() - 1);
    toReturn.append(")");
    return toReturn;
}

From source file:com.compomics.colims.core.io.mztab.MzTabExporter.java

/**
 * Get ambiguity members for given proteinGroupID
 *
 * @param proteinGroupId//  www.  ja v  a 2s. c  o  m
 * @return ambiguity members as string
 */
private String getAmbiguityMembers(Long proteinGroupId) {
    List<ProteinGroupHasProtein> proteinGroupHasProteins = proteinGroupService
            .getAmbiguityMembers(proteinGroupId);

    StringBuilder ambiguityMembers = new StringBuilder("");
    proteinGroupHasProteins.stream().forEach((proteinGroupHasProtein) -> {
        ambiguityMembers.append(proteinGroupHasProtein.getProteinAccession()).append(",");
    });
    if (ambiguityMembers.toString().equals("")) {
        return ambiguityMembers.toString();
    } else {
        return ambiguityMembers.deleteCharAt(ambiguityMembers.length() - 1).toString();
    }
}

From source file:com.snaplogic.snaps.lunex.BaseService.java

private StringBuilder prepareJson(List<Pair<String, ExpressionProperty>> requestContent, Document document) {
    StringBuilder json = new StringBuilder();
    StringBuilder subJson = new StringBuilder();
    boolean isSubJsonRequired = false, isEmptyJson = true;
    if (requestContent != null) {
        if (resourceType.equals(CResource.NewOrder.toString())
                || resourceType.equals(CResource.PreOrder.toString())) {
            subJson.append(QUOTE).append(ADDRESS).append(QUOTE).append(COLON).append(OPENTAG);
            isSubJsonRequired = true;//w  ww.j av a 2 s . co m
        }
        for (Pair<String, ExpressionProperty> paramPair : requestContent) {
            if (isSubJsonRequired && ADDRESS_JSONOBJ.contains(paramPair.getKey())) {
                subJson.append(getJsonSlice(paramPair, document));
                isEmptyJson = false;
            } else {
                json.append(getJsonSlice(paramPair, document));
            }
        }
        if (!isEmptyJson) {
            subJson.append(CLOSETAG).append(COMMA);
        }
        return new StringBuilder().append(OPENTAG).append(subJson).append(json.deleteCharAt(json.length() - 1))
                .append(CLOSETAG);
    }
    return new StringBuilder("");
}