Example usage for java.util Properties keySet

List of usage examples for java.util Properties keySet

Introduction

In this page you can find the example usage for java.util Properties keySet.

Prototype

@Override
    public Set<Object> keySet() 

Source Link

Usage

From source file:org.apache.openaz.xacml.std.pap.StdPDPGroup.java

private void initialize(Properties properties, Path directory) throws PAPException {
    if (this.id == null || this.id.length() == 0) {
        logger.warn("Cannot initialize with a null or zero length id");
        return;//w w  w.ja v  a2 s. c o  m
    }
    //
    // Pull the group's properties
    //
    for (Object key : properties.keySet()) {
        if (key.toString().startsWith(this.id + ".")) {
            if (key.toString().endsWith(".name")) {
                this.name = properties.getProperty(key.toString());
            } else if (key.toString().endsWith(".description")) {
                this.description = properties.getProperty(key.toString());
            } else if (key.toString().endsWith(".pdps")) {
                String pdpList = properties.getProperty(key.toString());
                if (pdpList != null && pdpList.length() > 0) {
                    for (String id : Splitter.on(',').omitEmptyStrings().trimResults().split(pdpList)) {
                        StdPDP pdp = new StdPDP(id, properties);
                        pdp.addItemSetChangeListener(this);
                        this.pdps.add(pdp);
                    }
                }
            }
        }
        // force all policies to have a name
        if (this.name == null) {
            this.name = this.id;
        }
    }
    //
    // Validate our directory
    //
    if (Files.notExists(directory)) {
        logger.warn("Group directory does NOT exist: " + directory.toString());
        try {
            Files.createDirectory(directory);
            this.status.addLoadWarning("Group directory does NOT exist");
        } catch (IOException e) {
            logger.error(e);
            this.status.addLoadError("Group directory does NOT exist");
            this.status.setStatus(Status.LOAD_ERRORS);
        }
    }
    //
    // Parse policies
    //
    this.loadPolicies(Paths.get(directory.toString(), "xacml.policy.properties"));
    //
    // Parse pip config
    //
    this.loadPIPConfig(Paths.get(directory.toString(), "xacml.pip.properties"));
}

From source file:org.apache.sqoop.hive.TableDefWriter.java

/**
 * @return the CREATE TABLE statement for the table to load into hive.
 *///from w  w  w  .  j a va  2  s .  c o m
public String getCreateTableStmt() throws IOException {
    Map<String, Integer> columnTypes;
    Properties userMapping = options.getMapColumnHive();

    if (externalColTypes != null) {
        // Use pre-defined column types.
        columnTypes = externalColTypes;
    } else {
        // Get these from the database.
        if (null != inputTableName) {
            columnTypes = connManager.getColumnTypes(inputTableName);
        } else {
            columnTypes = connManager.getColumnTypesForQuery(options.getSqlQuery());
        }
    }

    String[] colNames = getColumnNames();
    StringBuilder sb = new StringBuilder();
    if (options.doFailIfHiveTableExists()) {
        sb.append("CREATE TABLE `").append(outputTableName).append("` ( ");
    } else {
        sb.append("CREATE TABLE IF NOT EXISTS `");
        sb.append(outputTableName).append("` ( ");
    }

    // Check that all explicitly mapped columns are present in result set
    for (Object column : userMapping.keySet()) {
        boolean found = false;
        for (String c : colNames) {
            if (c.equals(column)) {
                found = true;
                break;
            }
        }

        if (!found) {
            throw new IllegalArgumentException(
                    "No column by the name " + column + "found while importing data");
        }
    }

    boolean first = true;
    String partitionKey = options.getHivePartitionKey();
    for (String col : colNames) {
        if (col.equals(partitionKey)) {
            throw new IllegalArgumentException("Partition key " + col + " cannot " + "be a column to import.");
        }

        if (!first) {
            sb.append(", ");
        }

        first = false;

        Integer colType = columnTypes.get(col);
        String hiveColType = userMapping.getProperty(col);
        if (hiveColType == null) {
            hiveColType = connManager.toHiveType(inputTableName, col, colType);
        }
        if (null == hiveColType) {
            throw new IOException("Hive does not support the SQL type for column " + col);
        }

        sb.append('`').append(col).append("` ").append(hiveColType);

        if (HiveTypes.isHiveTypeImprovised(colType)) {
            LOG.warn("Column " + col + " had to be cast to a less precise type in Hive");
        }
    }

    sb.append(") ");

    if (commentsEnabled) {
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        String curDateStr = dateFormat.format(new Date());
        sb.append("COMMENT 'Imported by sqoop on " + curDateStr + "' ");
    }

    if (partitionKey != null) {
        sb.append("PARTITIONED BY (").append(partitionKey).append(" STRING) ");
    }

    sb.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY '");
    sb.append(getHiveOctalCharCode((int) options.getOutputFieldDelim()));
    sb.append("' LINES TERMINATED BY '");
    sb.append(getHiveOctalCharCode((int) options.getOutputRecordDelim()));
    String codec = options.getCompressionCodec();
    if (codec != null
            && (codec.equals(CodecMap.LZOP) || codec.equals(CodecMap.getCodecClassName(CodecMap.LZOP)))) {
        sb.append("' STORED AS INPUTFORMAT " + "'com.hadoop.mapred.DeprecatedLzoTextInputFormat'");
        sb.append(" OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'");
    } else {
        sb.append("' STORED AS TEXTFILE");
    }

    LOG.debug("Create statement: " + sb.toString());
    return sb.toString();
}

From source file:org.apache.zeppelin.spark.OldSparkInterpreter.java

/**
 * Spark 2.x//w  ww  .  ja  v  a 2s .c o m
 * Create SparkSession
 */
public Object createSparkSession() {
    // use local mode for embedded spark mode when spark.master is not found
    conf.setIfMissing("spark.master", "local");
    logger.info("------ Create new SparkSession {} -------", conf.get("spark.master"));
    String execUri = System.getenv("SPARK_EXECUTOR_URI");
    if (outputDir != null) {
        conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath());
    }

    if (execUri != null) {
        conf.set("spark.executor.uri", execUri);
    }
    conf.set("spark.scheduler.mode", "FAIR");

    Properties intpProperty = getProperties();
    for (Object k : intpProperty.keySet()) {
        String key = (String) k;
        String val = toString(intpProperty.get(key));
        if (!val.trim().isEmpty()) {
            if (key.startsWith("spark.")) {
                logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val));
                conf.set(key, val);
            }
            if (key.startsWith("zeppelin.spark.")) {
                String sparkPropertyKey = key.substring("zeppelin.spark.".length());
                logger.debug(String.format("SparkConf: key = [%s], value = [%s]", sparkPropertyKey, val));
                conf.set(sparkPropertyKey, val);
            }
        }
    }

    Class SparkSession = Utils.findClass("org.apache.spark.sql.SparkSession");
    Object builder = Utils.invokeStaticMethod(SparkSession, "builder");
    Utils.invokeMethod(builder, "config", new Class[] { SparkConf.class }, new Object[] { conf });

    if (useHiveContext()) {
        if (hiveClassesArePresent()) {
            Utils.invokeMethod(builder, "enableHiveSupport");
            sparkSession = Utils.invokeMethod(builder, "getOrCreate");
            logger.info("Created Spark session with Hive support");
        } else {
            Utils.invokeMethod(builder, "config", new Class[] { String.class, String.class },
                    new Object[] { "spark.sql.catalogImplementation", "in-memory" });
            sparkSession = Utils.invokeMethod(builder, "getOrCreate");
            logger.info("Created Spark session with Hive support use in-memory catalogImplementation");
        }
    } else {
        sparkSession = Utils.invokeMethod(builder, "getOrCreate");
        logger.info("Created Spark session");
    }

    return sparkSession;
}

From source file:com.webpagebytes.cms.local.WPBLocalFileStorage.java

public WPBFileInfo getFileInfo(WPBFilePath file) {
    String metaPath = getLocalFullMetaPath(file);
    String dataPath = getLocalFullDataPath(file);
    Properties props = new Properties();
    try {//from   www  .j a  v  a2  s  .  com
        props = getFileProperties(metaPath);

        String contentType = props.getProperty("contentType");
        int size = Integer.valueOf(props.getProperty("size"));
        String md5 = props.getProperty("md5");
        long crc32 = Long.valueOf(props.getProperty("crc32"));
        long creationTime = Long.valueOf(props.getProperty("creationTime"));

        boolean fileExists = checkIfFileExists(dataPath);
        WPBFileInfo fileInfo = new WPBDefaultCloudFileInfo(file, contentType, fileExists, size, md5, crc32,
                creationTime);
        props.remove("path");
        props.remove("contentType");
        props.remove("size");
        props.remove("md5");
        props.remove("crc32");
        props.remove("creationTime");
        props.remove("filePath");
        // add the custom properties of the file
        for (Object key : props.keySet()) {
            String strKey = (String) key;
            fileInfo.setProperty(strKey, props.getProperty(strKey));
        }
        return fileInfo;
    } catch (Exception e) {
        return null;
    }

}

From source file:org.apache.zeppelin.spark.OldSparkInterpreter.java

public SparkContext createSparkContext_1() {
    // use local mode for embedded spark mode when spark.master is not found
    if (!conf.contains("spark.master")) {
        conf.setMaster("local");
    }/*  www. j  a  v  a 2s  .  c om*/
    logger.info("------ Create new SparkContext {} -------", conf.get("spark.master"));

    String execUri = System.getenv("SPARK_EXECUTOR_URI");
    String[] jars = null;

    if (Utils.isScala2_10()) {
        jars = (String[]) Utils.invokeStaticMethod(SparkILoop.class, "getAddedJars");
    } else {
        jars = (String[]) Utils.invokeStaticMethod(Utils.findClass("org.apache.spark.repl.Main"),
                "getAddedJars");
    }

    String classServerUri = null;
    String replClassOutputDirectory = null;

    try { // in case of spark 1.1x, spark 1.2x
        Method classServer = intp.getClass().getMethod("classServer");
        Object httpServer = classServer.invoke(intp);
        classServerUri = (String) Utils.invokeMethod(httpServer, "uri");
    } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
            | InvocationTargetException e) {
        // continue
    }

    if (classServerUri == null) {
        try { // for spark 1.3x
            Method classServer = intp.getClass().getMethod("classServerUri");
            classServerUri = (String) classServer.invoke(intp);
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            // continue instead of: throw new InterpreterException(e);
            // Newer Spark versions (like the patched CDH5.7.0 one) don't contain this method
            logger.warn(
                    String.format("Spark method classServerUri not available due to: [%s]", e.getMessage()));
        }
    }

    if (classServerUri == null) {
        try { // for RcpEnv
            Method getClassOutputDirectory = intp.getClass().getMethod("getClassOutputDirectory");
            File classOutputDirectory = (File) getClassOutputDirectory.invoke(intp);
            replClassOutputDirectory = classOutputDirectory.getAbsolutePath();
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            // continue
        }
    }

    if (Utils.isScala2_11()) {
        classServer = createHttpServer(outputDir);
        Utils.invokeMethod(classServer, "start");
        classServerUri = (String) Utils.invokeMethod(classServer, "uri");
    }

    if (classServerUri != null) {
        conf.set("spark.repl.class.uri", classServerUri);
    }

    if (replClassOutputDirectory != null) {
        conf.set("spark.repl.class.outputDir", replClassOutputDirectory);
    }

    if (jars.length > 0) {
        conf.setJars(jars);
    }

    if (execUri != null) {
        conf.set("spark.executor.uri", execUri);
    }
    conf.set("spark.scheduler.mode", "FAIR");

    Properties intpProperty = getProperties();
    for (Object k : intpProperty.keySet()) {
        String key = (String) k;
        String val = toString(intpProperty.get(key));
        if (!val.trim().isEmpty()) {
            if (key.startsWith("spark.")) {
                logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val));
                conf.set(key, val);
            }

            if (key.startsWith("zeppelin.spark.")) {
                String sparkPropertyKey = key.substring("zeppelin.spark.".length());
                logger.debug(String.format("SparkConf: key = [%s], value = [%s]", sparkPropertyKey, val));
                conf.set(sparkPropertyKey, val);
            }
        }
    }
    SparkContext sparkContext = new SparkContext(conf);
    return sparkContext;
}

From source file:com.dtolabs.rundeck.plugin.resources.ec2.InstanceToNodeMapper.java

/**
 * Convert an AWS EC2 Instance to a RunDeck INodeEntry based on the mapping input
 *//*from   ww  w.jav  a 2s  .c  o m*/
@SuppressWarnings("unchecked")
static INodeEntry instanceToNode(final Instance inst, final Properties mapping) throws GeneratorException {
    final NodeEntryImpl node = new NodeEntryImpl();

    //evaluate single settings.selector=tags/* mapping
    if ("tags/*".equals(mapping.getProperty("attributes.selector"))) {
        //iterate through instance tags and generate settings
        for (final Tag tag : inst.getTags()) {
            if (null == node.getAttributes()) {
                node.setAttributes(new HashMap<String, String>());
            }
            node.getAttributes().put(tag.getKey(), tag.getValue());
        }
    }
    if (null != mapping.getProperty("tags.selector")) {
        final String selector = mapping.getProperty("tags.selector");
        final String value = applySelector(inst, selector, mapping.getProperty("tags.default"), true);
        if (null != value) {
            final String[] values = value.split(",");
            final HashSet<String> tagset = new HashSet<String>();
            for (final String s : values) {
                tagset.add(s.trim());
            }
            if (null == node.getTags()) {
                node.setTags(tagset);
            } else {
                final HashSet orig = new HashSet(node.getTags());
                orig.addAll(tagset);
                node.setTags(orig);
            }
        }
    }
    if (null == node.getTags()) {
        node.setTags(new HashSet());
    }
    final HashSet orig = new HashSet(node.getTags());
    //apply specific tag selectors
    final Pattern tagPat = Pattern.compile("^tag\\.(.+?)\\.selector$");
    //evaluate tag selectors
    for (final Object o : mapping.keySet()) {
        final String key = (String) o;
        final String selector = mapping.getProperty(key);
        //split selector by = if present
        final String[] selparts = selector.split("=");
        final Matcher m = tagPat.matcher(key);
        if (m.matches()) {
            final String tagName = m.group(1);
            if (null == node.getAttributes()) {
                node.setAttributes(new HashMap<String, String>());
            }
            final String value = applySelector(inst, selparts[0], null);
            if (null != value) {
                if (selparts.length > 1 && !value.equals(selparts[1])) {
                    continue;
                }
                //use add the tag if the value is not null
                orig.add(tagName);
            }
        }
    }
    node.setTags(orig);

    //apply default values which do not have corresponding selector
    final Pattern attribDefPat = Pattern.compile("^([^.]+?)\\.default$");
    //evaluate selectors
    for (final Object o : mapping.keySet()) {
        final String key = (String) o;
        final String value = mapping.getProperty(key);
        final Matcher m = attribDefPat.matcher(key);
        if (m.matches() && (!mapping.containsKey(key + ".selector")
                || "".equals(mapping.getProperty(key + ".selector")))) {
            final String attrName = m.group(1);
            if (null == node.getAttributes()) {
                node.setAttributes(new HashMap<String, String>());
            }
            if (null != value) {
                node.getAttributes().put(attrName, value);
            }
        }
    }

    final Pattern attribPat = Pattern.compile("^([^.]+?)\\.selector$");
    //evaluate selectors
    for (final Object o : mapping.keySet()) {
        final String key = (String) o;
        final String selector = mapping.getProperty(key);
        final Matcher m = attribPat.matcher(key);
        if (m.matches()) {
            final String attrName = m.group(1);
            if (attrName.equals("tags")) {
                //already handled
                continue;
            }
            if (null == node.getAttributes()) {
                node.setAttributes(new HashMap<String, String>());
            }
            final String value = applySelector(inst, selector, mapping.getProperty(attrName + ".default"));
            if (null != value) {
                //use nodename-settingname to make the setting unique to the node
                node.getAttributes().put(attrName, value);
            }
        }
    }
    //        String hostSel = mapping.getProperty("hostname.selector");
    //        String host = applySelector(inst, hostSel, mapping.getProperty("hostname.default"));
    //        if (null == node.getHostname()) {
    //            System.err.println("Unable to determine hostname for instance: " + inst.getInstanceId());
    //            return null;
    //        }
    String name = node.getNodename();
    if (null == name || "".equals(name)) {
        name = node.getHostname();
    }
    if (null == name || "".equals(name)) {
        name = inst.getInstanceId();
    }
    node.setNodename(name);

    // Set ssh port on hostname if not 22
    String sshport = node.getAttributes().get("sshport");
    if (sshport != null && !sshport.equals("") && !sshport.equals("22")) {
        node.setHostname(node.getHostname() + ":" + sshport);
    }

    return node;
}

From source file:com.mirth.connect.model.util.ImportConverter3_0_0.java

private static void migrateJmsReceiverProperties(DonkeyElement properties) throws MigrationException {
    logger.debug("Migrating JmsReceiverProperties");
    Properties oldProperties = readPropertiesElement(properties);
    properties.setAttribute("class", "com.mirth.connect.connectors.jms.JmsReceiverProperties");
    properties.removeChildren();//  w ww .  j  a v a  2 s.  com

    buildResponseConnectorProperties(properties.addChildElement("responseConnectorProperties"));

    properties.addChildElement("useJndi").setTextContent(readBooleanProperty(oldProperties, "useJndi", false));
    properties.addChildElement("jndiProviderUrl")
            .setTextContent(oldProperties.getProperty("jndiProviderUrl", ""));
    properties.addChildElement("jndiInitialContextFactory")
            .setTextContent(oldProperties.getProperty("jndiInitialFactory", ""));
    properties.addChildElement("jndiConnectionFactoryName")
            .setTextContent(oldProperties.getProperty("connectionFactoryJndiName", ""));
    properties.addChildElement("connectionFactoryClass")
            .setTextContent(oldProperties.getProperty("connectionFactoryClass", ""));
    properties.addChildElement("username").setTextContent(oldProperties.getProperty("username", ""));
    properties.addChildElement("password").setTextContent(oldProperties.getProperty("password", ""));

    String destinationName = oldProperties.getProperty("host", "");
    boolean topic = readBooleanValue(oldProperties, "durable", false);
    boolean durableTopic = topic;

    if (StringUtils.startsWith(destinationName, "topic://")
            || StringUtils.startsWith(destinationName, "//topic:")) {
        destinationName = destinationName.substring(8);
        topic = true;
    } else if (StringUtils.startsWith(destinationName, "//queue:")
            || StringUtils.startsWith(destinationName, "queue://")) {
        destinationName = destinationName.substring(8);
        topic = false;
        durableTopic = false;
    }

    properties.addChildElement("destinationName").setTextContent(destinationName);
    properties.addChildElement("reconnectIntervalMillis").setTextContent("10000");
    properties.addChildElement("clientId").setTextContent(oldProperties.getProperty("clientId", ""));
    properties.addChildElement("topic").setTextContent(Boolean.toString(topic));
    properties.addChildElement("durableTopic").setTextContent(Boolean.toString(durableTopic));
    properties.addChildElement("selector").setTextContent(oldProperties.getProperty("selector", ""));

    DonkeyElement connectionProperties = properties.addChildElement("connectionProperties");
    connectionProperties.setAttribute("class", "linked-hash-map");

    try {
        Properties oldConnectionProperties = readPropertiesElement(
                new DonkeyElement(oldProperties.getProperty("connectionFactoryProperties")));

        for (Object key : oldConnectionProperties.keySet()) {
            String value = oldConnectionProperties.getProperty((String) key);

            DonkeyElement entry = connectionProperties.addChildElement("entry");
            entry.addChildElement("string", (String) key);
            entry.addChildElement("string", value);
        }
    } catch (Exception e) {
        throw new MigrationException(e);
    }
}

From source file:com.photon.phresco.framework.rest.api.FeatureService.java

private FeatureConfigure getTemplateConfigFile(String appDirName, String customerId,
        ServiceManager serviceManager, String featureName, String rootModulePath, String subModuleName)
        throws PhrescoException {
    List<PropertyTemplate> propertyTemplates = new ArrayList<PropertyTemplate>();
    try {/*ww  w.j a v a 2  s .  c  o m*/
        FeatureConfigure featureConfigure = new FeatureConfigure();
        FrameworkServiceUtil frameworkServiceUtil = new FrameworkServiceUtil();
        ProjectInfo projectInfo = Utility.getProjectInfo(rootModulePath, subModuleName);
        List<Configuration> featureConfigurations = frameworkServiceUtil
                .getApplicationProcessor(appDirName, customerId, serviceManager, rootModulePath, subModuleName)
                .preFeatureConfiguration(projectInfo.getAppInfos().get(0), featureName);
        Properties properties = null;
        boolean hasCustomProperty = false;
        if (CollectionUtils.isNotEmpty(featureConfigurations)) {
            for (Configuration featureConfiguration : featureConfigurations) {
                properties = featureConfiguration.getProperties();
                String expandableProp = properties.getProperty("expandable");
                if (StringUtils.isEmpty(expandableProp)) {
                    hasCustomProperty = true;
                } else {
                    hasCustomProperty = Boolean.valueOf(expandableProp);
                }
                if (properties.containsKey("expandable")) {
                    properties.remove("expandable");
                }
                Set<Object> keySet = properties.keySet();
                for (Object key : keySet) {
                    String keyStr = (String) key;
                    if (!"expandable".equalsIgnoreCase(keyStr)) {
                        String dispName = keyStr.replace(".", " ");
                        PropertyTemplate propertyTemplate = new PropertyTemplate();
                        propertyTemplate.setKey(keyStr);
                        propertyTemplate.setName(dispName);
                        propertyTemplates.add(propertyTemplate);
                    }
                }
            }
        }
        featureConfigure.setHasCustomProperty(hasCustomProperty);
        featureConfigure.setProperties(properties);
        featureConfigure.setPropertyTemplates(propertyTemplates);
        return featureConfigure;
    } catch (PhrescoException e) {
        throw new PhrescoException(e);
    }
}

From source file:org.obiba.mica.micaConfig.rest.MicaConfigResource.java

@GET
@Path("/i18n/{locale}.po")
@Produces(MediaType.TEXT_PLAIN)/*  w  w  w.ja  v  a 2 s  . co m*/
public Response getGettextTranslations(@PathParam("locale") String locale,
        @QueryParam("default") boolean _default) throws IOException {
    StreamingOutput stream = os -> {
        try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(os, "UTF-8"))) {
            Properties properties = getGlobalTranslationsAsProperties(locale, _default);
            writer.println("# Translations extracted from Mica");
            writer.println("msgid \"\"");
            writer.println("msgstr \"\"");
            writer.println(String.format("\"Project-Id-Version: Mica %s\\n\"",
                    micaConfigService.getConfig().getMicaVersion()));
            writer.println(String.format("\"PO-Revision-Date: %s\\n\"", new Date()));
            writer.println("\"MIME-Version: 1.0\\n\"");
            writer.println("\"Content-Type: text/plain; charset=UTF-8\\n\"");
            writer.println("\"Content-Transfer-Encoding: 8bit\\n\"");
            writer.println(String.format("\"Language: %s\\n\"", locale));
            writer.println();
            properties.keySet().stream().sorted().forEach(key -> {
                writer.println(String.format("msgid \"%s\"", key));
                String value = properties.getProperty(key.toString());
                if (!Strings.isNullOrEmpty(value)) {
                    value = value.replaceAll("\\{\\{([\\w]+)\\}\\}", "@$1");
                }
                writer.println(String.format("msgstr \"%s\"", value));
                writer.println();
            });
            writer.flush();
        }
    };
    return Response.ok(stream).build();
}

From source file:org.apache.geode.internal.cache.CacheServerLauncher.java

/**
 * Process information contained in the options map and add to the command line of the subprocess
 * as needed.//from   w ww  .j  ava 2 s.c om
 */
protected void addToServerCommand(final List<String> commandLine, final Map<String, Object> options) {
    final ListWrapper<String> commandLineWrapper = new ListWrapper<String>(commandLine);

    if (Boolean.TRUE.equals(options.get(REBALANCE))) {
        commandLineWrapper.add("-rebalance");
    }

    commandLineWrapper.add((String) options.get(DISABLE_DEFAULT_SERVER));
    commandLineWrapper.add((String) options.get(SERVER_PORT));
    commandLineWrapper.add((String) options.get(SERVER_BIND_ADDRESS_NAME));

    String criticalHeapThreshold = (String) options.get(CRITICAL_HEAP_PERCENTAGE);
    if (criticalHeapThreshold != null) {
        commandLineWrapper.add(criticalHeapThreshold);
    }
    String evictionHeapThreshold = (String) options.get(EVICTION_HEAP_PERCENTAGE);
    if (evictionHeapThreshold != null) {
        commandLineWrapper.add(evictionHeapThreshold);
    }

    String criticalOffHeapThreshold = (String) options.get(CRITICAL_OFF_HEAP_PERCENTAGE);
    if (criticalOffHeapThreshold != null) {
        commandLineWrapper.add(criticalOffHeapThreshold);
    }
    String evictionOffHeapThreshold = (String) options.get(EVICTION_OFF_HEAP_PERCENTAGE);
    if (evictionOffHeapThreshold != null) {
        commandLineWrapper.add(evictionOffHeapThreshold);
    }

    final Properties props = (Properties) options.get(PROPERTIES);

    for (final Object key : props.keySet()) {
        commandLineWrapper.add(key + "=" + props.getProperty(key.toString()));
    }

    if (props.getProperty(LOG_FILE) == null && CacheServerLauncher.isLoggingToStdOut()) {
        // Do not allow the cache server to log to stdout; override the logger with
        // #defaultLogFileName
        commandLineWrapper.add(LOG_FILE + "=" + defaultLogFileName);
    }
}