Example usage for java.util Properties stringPropertyNames

List of usage examples for java.util Properties stringPropertyNames

Introduction

In this page you can find the example usage for java.util Properties stringPropertyNames.

Prototype

public Set<String> stringPropertyNames() 

Source Link

Document

Returns an unmodifiable set of keys from this property list where the key and its corresponding value are strings, including distinct keys in the default property list if a key of the same name has not already been found from the main properties list.

Usage

From source file:org.apache.nifi.minifi.bootstrap.util.ConfigTransformerTest.java

@Test
public void testNifiPropertiesVariableRegistry()
        throws IOException, ConfigurationChangeException, SchemaLoaderException {
    Properties initialProperties = new Properties();
    try (InputStream pre216PropertiesStream = ConfigTransformerTest.class.getClassLoader()
            .getResourceAsStream("MINIFI-277/nifi.properties")) {
        initialProperties.load(pre216PropertiesStream);
    }/*from  w  ww. java 2s. c  om*/
    initialProperties.setProperty(ConfigTransformer.NIFI_VERSION_KEY, ConfigTransformer.NIFI_VERSION);

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    try (InputStream configStream = ConfigTransformerTest.class.getClassLoader()
            .getResourceAsStream("MINIFI-277/config.yml")) {
        ConfigSchema configSchema = SchemaLoader.loadConfigSchemaFromYaml(configStream);
        ConfigTransformer.writeNiFiProperties(configSchema, outputStream);
    }
    Properties properties = new Properties();
    properties.load(new ByteArrayInputStream(outputStream.toByteArray()));

    for (String name : initialProperties.stringPropertyNames()) {
        assertEquals("Property key " + name + " doesn't match.", initialProperties.getProperty(name),
                properties.getProperty(name));
    }
}

From source file:edu.dfci.cccb.mev.controllers.HeatmapController.java

@Override
public void afterPropertiesSet() throws Exception {
    new Thread() {
        public void run() {

            Properties definitions = new Properties() {
                private static final long serialVersionUID = 1L;

                {//w  ww.  java 2s . c om
                    try {
                        load(Heatmaps.class.getResourceAsStream("/configuration/heatmap.globals.properties"));
                    } catch (IOException e) {
                        log.warn("Unable to load global heatmaps", e);
                    }
                }
            };

            Properties annotations = new Properties() {
                private static final long serialVersionUID = 1L;

                {
                    try {
                        load(Heatmaps.class
                                .getResourceAsStream("/configuration/heatmap.globals.annotation.properties"));
                    } catch (NullPointerException | IOException e) {
                        log.warn("Unable to load global heatmap annotations");
                    }
                }
            };

            for (String key : definitions.stringPropertyNames()) {
                File data = new File(definitions.get(key).toString());
                log.debug("Loading " + data + " as " + key);
                if (!data.exists()) {
                    log.debug("File " + data + " not found");
                    continue;
                } else
                    try {
                        log.debug(data + " found");
                        Heatmap heatmap;
                        global.put(key,
                                heatmap = heatmapBuilder.build(new FileInputStream(data), data.length(), key));
                        log.debug("Loaded " + data);
                        String location;
                        try {
                            if ((location = annotations.getProperty(key + ".column")) != null
                                    && new File(location).exists())
                                heatmap.setColumnAnnotations(new FileInputStream(location));
                            log.debug("Loaded column annotations for " + key + " from " + location);
                        } catch (IOException e) {
                            log.warn("Unable to load column annotations for " + key, e);
                        }
                        try {
                            if ((location = annotations.getProperty(key + ".row")) != null
                                    && new File(location).exists())
                                heatmap.setRowAnnotations(new FileInputStream(location));
                            log.debug("Loaded row annotations for " + key + " from " + location);
                        } catch (IOException e) {
                            log.warn("Unable to load row annotations for " + key, e);
                        }
                    } catch (IOException e) {
                        log.warn("Unable to load global heatmap " + key + " data at " + data, e);
                    }
            }

            log.info("Finished loading global heatmaps");
        }
    }.start();
}

From source file:org.apache.zeppelin.interpreter.launcher.SparkInterpreterLauncher.java

@Override
protected Map<String, String> buildEnvFromProperties(InterpreterLaunchContext context) {
    Map<String, String> env = super.buildEnvFromProperties(context);
    Properties sparkProperties = new Properties();
    String sparkMaster = getSparkMaster(properties);
    for (String key : properties.stringPropertyNames()) {
        if (RemoteInterpreterUtils.isEnvString(key)) {
            env.put(key, properties.getProperty(key));
        }/*from   w  w  w. j  av a 2s. co m*/
        if (isSparkConf(key, properties.getProperty(key))) {
            sparkProperties.setProperty(key, toShellFormat(properties.getProperty(key)));
        }
    }

    setupPropertiesForPySpark(sparkProperties);
    setupPropertiesForSparkR(sparkProperties);
    if (isYarnMode() && getDeployMode().equals("cluster")) {
        env.put("ZEPPELIN_SPARK_YARN_CLUSTER", "true");
        sparkProperties.setProperty("spark.yarn.submit.waitAppCompletion", "false");
    }

    StringBuilder sparkConfBuilder = new StringBuilder();
    if (sparkMaster != null) {
        sparkConfBuilder.append(" --master " + sparkMaster);
    }
    if (isYarnMode() && getDeployMode().equals("cluster")) {
        if (sparkProperties.containsKey("spark.files")) {
            sparkProperties.put("spark.files", sparkProperties.getProperty("spark.files") + ","
                    + zConf.getConfDir() + "/log4j_yarn_cluster.properties");
        } else {
            sparkProperties.put("spark.files", zConf.getConfDir() + "/log4j_yarn_cluster.properties");
        }
    }
    for (String name : sparkProperties.stringPropertyNames()) {
        sparkConfBuilder.append(" --conf " + name + "=" + sparkProperties.getProperty(name));
    }
    String useProxyUserEnv = System.getenv("ZEPPELIN_IMPERSONATE_SPARK_PROXY_USER");
    if (context.getOption().isUserImpersonate()
            && (StringUtils.isBlank(useProxyUserEnv) || !useProxyUserEnv.equals("false"))) {
        sparkConfBuilder.append(" --proxy-user " + context.getUserName());
    }
    Path localRepoPath = Paths.get(zConf.getInterpreterLocalRepoPath(), context.getInterpreterSettingId());
    if (isYarnMode() && getDeployMode().equals("cluster") && Files.exists(localRepoPath)
            && Files.isDirectory(localRepoPath)) {
        try {
            StreamSupport
                    .stream(Files.newDirectoryStream(localRepoPath, entry -> Files.isRegularFile(entry))
                            .spliterator(), false)
                    .map(jar -> jar.toAbsolutePath().toString()).reduce((x, y) -> x.concat(",").concat(y))
                    .ifPresent(extraJars -> sparkConfBuilder.append(" --jars ").append(extraJars));
        } catch (IOException e) {
            LOGGER.error("Cannot make a list of additional jars from localRepo: {}", localRepoPath, e);
        }

    }

    env.put("ZEPPELIN_SPARK_CONF", sparkConfBuilder.toString());

    // set these env in the order of
    // 1. interpreter-setting
    // 2. zeppelin-env.sh
    // It is encouraged to set env in interpreter setting, but just for backward compatability,
    // we also fallback to zeppelin-env.sh if it is not specified in interpreter setting.
    for (String envName : new String[] { "SPARK_HOME", "SPARK_CONF_DIR", "HADOOP_CONF_DIR" }) {
        String envValue = getEnv(envName);
        if (envValue != null) {
            env.put(envName, envValue);
        }
    }

    String keytab = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB);
    String principal = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL);

    if (!StringUtils.isBlank(keytab) && !StringUtils.isBlank(principal)) {
        env.put("ZEPPELIN_SERVER_KERBEROS_KEYTAB", keytab);
        env.put("ZEPPELIN_SERVER_KERBEROS_PRINCIPAL", principal);
        LOGGER.info("Run Spark under secure mode with keytab: " + keytab + ", principal: " + principal);
    } else {
        LOGGER.info("Run Spark under non-secure mode as no keytab and principal is specified");
    }
    LOGGER.debug("buildEnvFromProperties: " + env);
    return env;

}

From source file:org.efaps.esjp.common.uiform.Field_Base.java

/**
 * Renders a field that contains the values from a
 * SystemConfigurationObjectAttribute.//  w w  w. jav  a  2 s. c  o  m
 * Properties:
 * <table>
 *  <tr><th>Property</th><th>Value</th><th>Obligatory</th></tr>
 *  <tr><td>SystemConfigurationUUID</td><td>UUID of the SystemConfiguration the
 *  ObjectAttribute will be search in.</td><td>true</td></tr>
 * </table>
  * @param _parameter    Parameter as passed from the eFaps API
 * @return html snipplet
 * @throws EFapsException on error
 */
public Return systemConfigurationObjectFieldValue(final Parameter _parameter) throws EFapsException {
    final Return ret = new Return();
    final String configurationUUID = getProperty(_parameter, "SystemConfigurationUUID");
    if (configurationUUID != null) {
        final UUID uuid = UUID.fromString(configurationUUID);
        final SystemConfiguration config = SystemConfiguration.get(uuid);
        if (config != null) {
            final IUIValue uiValue = (IUIValue) _parameter.get(ParameterValues.UIOBJECT);
            final Properties confProps = config.getObjectAttributeValueAsProperties(_parameter.getInstance());
            final List<String> keys = new ArrayList<>(confProps.stringPropertyNames());
            Collections.sort(keys);
            if (Display.EDITABLE.equals(uiValue.getDisplay())) {
                final StringBuilder propStr = new StringBuilder();
                for (final String key : keys) {
                    propStr.append(key).append("=").append(confProps.getProperty(key)).append("\n");
                }
                ret.put(ReturnValues.VALUES, propStr.toString());
            } else {
                final StringBuilder html = new StringBuilder();
                html.append("<table>");
                for (final String key : keys) {
                    html.append("<tr>").append("<td>").append(key).append("</td>").append("<td>")
                            .append(confProps.getProperty(key)).append("</td>").append("</tr>");
                }
                html.append("</table>");
                ret.put(ReturnValues.SNIPLETT, html.toString());
            }
        }
    }
    return ret;
}

From source file:org.wso2.carbon.policy.mgt.core.dao.impl.PolicyDAOImpl.java

@Override
public boolean addPolicyCriteriaProperties(List<PolicyCriterion> policyCriteria)
        throws PolicyManagerDAOException {
    Connection conn;/*from w  w w.ja v  a  2s.c o  m*/
    PreparedStatement stmt = null;
    try {
        conn = this.getConnection();
        String query = "INSERT INTO DM_POLICY_CRITERIA_PROPERTIES (POLICY_CRITERION_ID, PROP_KEY, PROP_VALUE, "
                + "CONTENT) VALUES (?, ?, ?, ?)";
        stmt = conn.prepareStatement(query);

        for (PolicyCriterion criterion : policyCriteria) {
            Properties prop = criterion.getProperties();
            for (String name : prop.stringPropertyNames()) {

                stmt.setInt(1, criterion.getId());
                stmt.setString(2, name);
                stmt.setString(3, prop.getProperty(name));
                stmt.setBytes(4, PolicyManagerUtil.getBytes(criterion.getObjectMap()));
                stmt.addBatch();
            }
            stmt.executeBatch();
        }
        //   stmt.executeUpdate();

    } catch (SQLException | IOException e) {
        throw new PolicyManagerDAOException(
                "Error occurred while inserting the criterion properties " + "to database", e);
    } finally {
        PolicyManagementDAOUtil.cleanupResources(stmt, null);
    }
    return false;
}

From source file:dinistiq.Dinistiq.java

/**
 * Create a dinistiq context from the given class resolver and optional external beans.
 * Add all the external named beans from thei given map for later lookup to the context as well
 * and be sure that your class resolver takes the resources in the dinistiq/ path of your
 * class path into cosideration.// ww  w  . ja  va2 s. c o  m
 *
 * @param classResolver resolver to us when resolving all types of classes
 * @param externalBeans map of beans with their id (name) as the key
 * @throws java.lang.Exception thrown with a readable message if something goes wrong
 */
public Dinistiq(ClassResolver classResolver, Map<String, Object> externalBeans) throws Exception {
    // measure time for init process
    long start = System.currentTimeMillis();

    Map<String, Set<Object>> dependencies = new HashMap<>();

    // Use all externally provided beans
    if (externalBeans != null) {
        beans.putAll(externalBeans);
        for (String externalBeanName : externalBeans.keySet()) {
            dependencies.put(externalBeanName, new HashSet<>());
        } // for
    } // if

    // Add system properties to scope and split potential URL values
    for (Object keyObject : System.getProperties().keySet()) {
        String key = keyObject.toString();
        beans.put(key, System.getProperty(key));
        storeUrlParts(key, System.getProperty(key), beans);
    } // for
      // Add environment to scope and split potential URL values
    for (String key : environment.keySet()) {
        storeUrlParts(key, environment.get(key), beans);
    } // for
    LOG.debug("() initial beans {}", beans);

    // Read bean list from properties files mapping names to names of the classes to be instanciated
    Properties beanlist = new Properties();
    SortedSet<String> propertiesFilenames = classResolver.getProperties(PRODUCT_BASE_PATH + "/");
    LOG.debug("() checking {} files for properties", propertiesFilenames.size());
    for (String propertyResource : propertiesFilenames) {
        LOG.debug("() check {}", propertyResource);
        // ignore subfolders!
        if (propertyResource.indexOf('/', PRODUCT_BASE_PATH.length() + 1) < 0) {
            LOG.debug("() resource {}", propertyResource);
            beanlist.load(Thread.currentThread().getContextClassLoader().getResourceAsStream(propertyResource));
        } // if
    } // for
    List<Class<?>> classList = new ArrayList<>();
    List<String> nameList = new ArrayList<>();
    for (String key : beanlist.stringPropertyNames()) {
        String className = beanlist.getProperty(key);
        if (MAP_TYPE.equals(className)) {
            beans.put(key, new HashMap<>());
            dependencies.put(key, new HashSet<>());
        } else {
            // expect java.lang.Xyz("value")
            int idx = className.indexOf('(');
            if (className.startsWith(JAVALANG_PREFIX) && (idx > 0)) {
                String value = getReferenceValue(className.substring(idx + 2, className.length() - 2))
                        .toString();
                className = className.substring(0, idx);
                LOG.debug("() instanciating {} :{}", value, className);
                Class<? extends Object> c = Class.forName(className.substring(0, idx));
                Object instance = c.getConstructor(String.class).newInstance(value);
                LOG.info("() storing value {} :{} - {}", key, instance.getClass().getName(), instance);
                beans.put(key, instance);
                dependencies.put(key, new HashSet<>());
            } else {
                boolean setType = className.startsWith(SET_TYPE);
                if ((setType || className.startsWith(LIST_TYPE)) && (idx > 0)) {
                    String values[] = getReferenceValue(className.substring(idx + 1, className.length() - 1))
                            .toString().split(",");
                    Collection<String> instance = setType ? new HashSet<>(Arrays.asList(values))
                            : Arrays.asList(values);
                    LOG.debug("() collection {} (set {}): {}", key, setType, instance);
                    beans.put(key, instance);
                    dependencies.put(key, new HashSet<>());
                } else {
                    LOG.debug("() listing {}", className);
                    Class<? extends Object> c = Class.forName(className);
                    classList.add(c);
                    nameList.add(key);
                } // if
            } // if
        } // if
    } // for
    LOG.info("() beanlist {}", beanlist);

    // List annotated beans
    final Set<Class<Object>> classes = classResolver.getAnnotated(Singleton.class);
    LOG.info("() number of annotated beans {}", classes.size());
    for (Class<? extends Object> c : classes) {
        classList.add(c);
        nameList.add(null);
    } // for
    LOG.debug("() beans {}", beans.keySet());

    // Instanciate beans from the properties files and from annotations taking constructor injection dependencies into account
    int ripCord = 10;
    while ((ripCord > 0) && (!classList.isEmpty())) {
        LOG.debug("() trying {} beans: {}", nameList.size(), classList);
        ripCord--;
        List<Class<?>> restClassList = new ArrayList<>();
        List<String> restNameList = new ArrayList<>();
        for (int i = 0; i < classList.size(); i++) {
            try {
                createAndRegisterInstance(dependencies, classList.get(i), nameList.get(i));
            } catch (Exception e) {
                LOG.warn("() will retry {} later: {} - {}", classList.get(i), e.getClass().getName(),
                        e.getMessage());
                restClassList.add(classList.get(i));
                restNameList.add(nameList.get(i));
            } // try/catch
        } // for
        classList = restClassList;
        nameList = restNameList;
    } // while

    // Fill in injections and note needed dependencies
    for (String key : new HashSet<>(beans.keySet())) {
        injectDependencies(dependencies, key, beans.get(key));
    } // for

    // sort beans according to dependencies
    LOG.info("() sorting beans according to dependencies");
    ripCord = 10;
    while ((ripCord > 0) && (!dependencies.isEmpty())) {
        ripCord--;
        LOG.info("() {} beans left", dependencies.size());
        Set<String> deletions = new HashSet<>();
        for (String key : dependencies.keySet()) {
            LOG.debug("() checking if {} with {} dependencies can be safely put into the ordered list {}", key,
                    dependencies.get(key).size(), dependencies.get(key));
            boolean dependenciesMet = true;
            for (Object dep : dependencies.get(key)) {
                boolean isMet = orderedBeans.contains(dep);
                LOG.debug("() {} depends on {} :{} missing? {} collection= {}", key, dep,
                        dep.getClass().getName(), !isMet, (dep instanceof Collection));
                dependenciesMet = dependenciesMet && isMet;
            } // for
            if (dependenciesMet) {
                LOG.info("() adding {} to the list {}", key, orderedBeans);
                orderedBeans.add(beans.get(key));
                deletions.add(key);
            } // if
        } // for
        for (String key : deletions) {
            dependencies.remove(key);
        } // for
    } // while
    if (dependencies.size() > 0) {
        throw new Exception("Circular bean injection and initialization dependencies detected after "
                + (System.currentTimeMillis() - start) + "ms" + " " + dependencies);
    } // if

    // Call Post Construct
    LOG.info("() calling post construct on ordered beans {}", orderedBeans);
    for (Object bean : orderedBeans) {
        LOG.info("() bean {}", bean);
        callPostConstruct(bean);
    } // for
    LOG.info("() calling post construct for the rest of the beans");
    for (String key : beans.keySet()) {
        Object bean = beans.get(key);
        if (!orderedBeans.contains(bean) && !String.class.isAssignableFrom(bean.getClass())) {
            LOG.warn("() bean without dependencies to call post construct method on {} :{}", key,
                    bean.getClass().getSimpleName());
            callPostConstruct(bean);
        } // if
    } // for
    LOG.info("() setup completed after {}ms", (System.currentTimeMillis() - start));
}

From source file:adalid.commons.velocity.Writer.java

private void putStrings(VelocityContext context, Properties properties) {
    String string1;/*from   ww w  .ja v a2 s  .  c  om*/
    String string2;
    String velocityKey;
    Set<String> stringPropertyNames = properties.stringPropertyNames();
    for (String name : stringPropertyNames) {
        if (StringUtils.endsWithIgnoreCase(name, DOT_STRING)) {
            string1 = StringUtils.removeEndIgnoreCase(name, DOT_STRING);
            string2 = StringUtils.trimToEmpty(properties.getProperty(name));
            velocityKey = StrUtils.getCamelCase(string1, true);
            context.put(velocityKey, string2);
        }
    }
}

From source file:org.apache.nifi.minifi.bootstrap.util.ConfigTransformerTest.java

@Test
public void testNifiPropertiesOverrides()
        throws IOException, ConfigurationChangeException, SchemaLoaderException {
    Properties pre216Properties = new Properties();
    try (InputStream pre216PropertiesStream = ConfigTransformerTest.class.getClassLoader()
            .getResourceAsStream("MINIFI-216/nifi.properties.before")) {
        pre216Properties.load(pre216PropertiesStream);
    }/*from  ww w .  j  a v  a  2 s .c  o  m*/
    pre216Properties.setProperty(ConfigTransformer.NIFI_VERSION_KEY, ConfigTransformer.NIFI_VERSION);

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    try (InputStream configStream = ConfigTransformerTest.class.getClassLoader()
            .getResourceAsStream("MINIFI-216/configOverrides.yml")) {
        ConfigSchema configSchema = SchemaLoader.loadConfigSchemaFromYaml(configStream);
        assertTrue(configSchema.getNifiPropertiesOverrides().size() > 0);
        for (Map.Entry<String, String> entry : configSchema.getNifiPropertiesOverrides().entrySet()) {
            pre216Properties.setProperty(entry.getKey(), entry.getValue());
        }
        ConfigTransformer.writeNiFiProperties(configSchema, outputStream);
    }
    Properties properties = new Properties();
    properties.load(new ByteArrayInputStream(outputStream.toByteArray()));

    for (String name : pre216Properties.stringPropertyNames()) {
        assertEquals("Property key " + name + " doesn't match.", pre216Properties.getProperty(name),
                properties.getProperty(name));
    }
}

From source file:org.apache.oozie.client.OozieClient.java

public void writeToXml(Properties props, OutputStream out) throws IOException {
    try {//from  ww w  . j  a  v  a  2  s .c o  m
        Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
        Element conf = doc.createElement("configuration");
        doc.appendChild(conf);
        conf.appendChild(doc.createTextNode("\n"));
        for (String name : props.stringPropertyNames()) { // Properties whose key or value is not of type String are omitted.
            String value = props.getProperty(name);
            Element propNode = doc.createElement("property");
            conf.appendChild(propNode);

            Element nameNode = doc.createElement("name");
            nameNode.appendChild(doc.createTextNode(name.trim()));
            propNode.appendChild(nameNode);

            Element valueNode = doc.createElement("value");
            valueNode.appendChild(doc.createTextNode(value.trim()));
            propNode.appendChild(valueNode);

            conf.appendChild(doc.createTextNode("\n"));
        }

        DOMSource source = new DOMSource(doc);
        StreamResult result = new StreamResult(out);
        TransformerFactory transFactory = TransformerFactory.newInstance();
        Transformer transformer = transFactory.newTransformer();
        transformer.transform(source, result);
        if (getDebugMode() > 0) {
            result = new StreamResult(System.out);
            transformer.transform(source, result);
            System.out.println();
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
}