Example usage for java.util Properties entrySet

List of usage examples for java.util Properties entrySet

Introduction

In this page you can find the example usage for java.util Properties entrySet.

Prototype

@Override
    public Set<Map.Entry<Object, Object>> entrySet() 

Source Link

Usage

From source file:org.apache.falcon.recipe.util.RecipeProcessBuilderUtils.java

private static void bindCustomProperties(final org.apache.falcon.entity.v0.process.Properties customProperties,
        final Properties recipeProperties) {
    List<Property> propertyList = new ArrayList<>();

    for (Map.Entry<Object, Object> recipeProperty : recipeProperties.entrySet()) {
        if (RecipeToolOptions.OPTIONSMAP.get(recipeProperty.getKey().toString()) == null) {
            addProperty(propertyList, (String) recipeProperty.getKey(), (String) recipeProperty.getValue());
        }/*ww  w .  j a va  2 s . c o  m*/
    }

    customProperties.getProperties().addAll(propertyList);
}

From source file:com.netflix.astyanax.thrift.ThriftUtils.java

/**
 * Convert a Properties object into a tree
 * @param props//from w  w  w.ja  v  a  2  s  .c  om
 * @return
 */
public static Map<String, Object> propertiesToMap(Properties props) {
    Map<String, Object> root = Maps.newTreeMap();
    for (Entry<Object, Object> prop : props.entrySet()) {
        String[] parts = StringUtils.split((String) prop.getKey(), ".");
        Map<String, Object> node = root;
        for (int i = 0; i < parts.length - 1; i++) {
            if (!node.containsKey(parts[i])) {
                node.put(parts[i], new LinkedHashMap<String, Object>());
            }
            node = (Map<String, Object>) node.get(parts[i]);
        }
        node.put(parts[parts.length - 1], (String) prop.getValue());
    }
    return root;
}

From source file:de.huberlin.german.korpling.laudatioteitool.App.java

private static Properties readConfig(String location) {
    Properties props = new Properties();
    FileInputStream inStream = null;
    try {//from  w  w w.j  av a 2  s  .  co m
        inStream = new FileInputStream(location);
        props.load(inStream);

        // output the values
        for (Map.Entry<Object, Object> e : props.entrySet()) {
            log.info(messages.getString("SETTING OPTION FROM CONFIG"), e.getKey(), e.getValue());
        }

    } catch (FileNotFoundException ex) {
        log.warn(messages.getString("CONFIG FILE NOT FOUND"));
    } catch (IOException ex) {
        log.warn(messages.getString("CONFIG FILE NOT READABLE"));
    } finally {
        if (inStream != null) {
            try {
                inStream.close();
            } catch (IOException ex) {
                // ignore
            }
        }
    }
    return props;
}

From source file:org.apache.falcon.extensions.util.ExtensionProcessBuilderUtils.java

private static void bindCustomProperties(final org.apache.falcon.entity.v0.process.Properties customProperties,
        final Properties extensionProperties) {
    List<Property> propertyList = new ArrayList<>();

    for (Map.Entry<Object, Object> extensionProperty : extensionProperties.entrySet()) {
        if (ExtensionProperties.getOptionsMap().get(extensionProperty.getKey().toString()) == null) {
            addProperty(propertyList, (String) extensionProperty.getKey(),
                    (String) extensionProperty.getValue());
        }//from  w w  w .ja  v  a2  s . co  m
    }

    customProperties.getProperties().addAll(propertyList);
}

From source file:org.eclipse.gemini.blueprint.test.internal.util.PropertiesUtil.java

/**
 * Apply placeholder expansion to the given properties object.
 * //  w w w .  j  ava 2s .  c o  m
 * Will return a new properties object, containing the expanded entries. Note that both keys and values will be
 * expanded.
 * 
 * @param props
 * @return
 */
public static Properties expandProperties(Properties props) {
    Assert.notNull(props);

    Set entrySet = props.entrySet();

    Properties newProps = (props instanceof OrderedProperties ? new OrderedProperties() : new Properties());

    for (Iterator iter = entrySet.iterator(); iter.hasNext();) {
        // first expand the keys
        Map.Entry entry = (Map.Entry) iter.next();
        String key = (String) entry.getKey();
        String value = (String) entry.getValue();

        String resultKey = expandProperty(key, props);
        String resultValue = expandProperty(value, props);

        // replace old entry

        newProps.put(resultKey, resultValue);
    }

    return newProps;
}

From source file:com.sun.socialsite.config.Config.java

private static Map<String, String> getPropertiesFromResource(String resourceName) throws IOException {
    Map<String, String> results = null;
    InputStream in = null;/*w ww .  j  a  v a  2  s  . c o m*/
    try {
        in = Config.class.getResourceAsStream(resourceName);
        if (in != null) {
            Properties props = new Properties();
            props.load(in);
            results = new HashMap<String, String>();
            for (Map.Entry entry : props.entrySet()) {
                results.put((String) (entry.getKey()), (String) (entry.getValue()));
            }
        }
    } finally {
        if (in != null)
            in.close();
    }
    return results;
}

From source file:com.sun.socialsite.config.Config.java

private static Map<String, String> getPropertiesFromFile(File propertiesFile) throws IOException {
    Map<String, String> results = null;
    if (propertiesFile != null && propertiesFile.exists()) {
        InputStream in = null;/*from w w  w  . j ava2s . co  m*/
        try {
            in = new FileInputStream(propertiesFile);
            Properties props = new Properties();
            props.load(in);
            results = new HashMap<String, String>();
            for (Map.Entry entry : props.entrySet()) {
                results.put((String) (entry.getKey()), (String) (entry.getValue()));
            }
        } finally {
            if (in != null)
                in.close();
        }
    }
    return results;
}

From source file:org.paxml.util.PaxmlUtils.java

/**
 * Trim the property names and values and return in a new Properties object.
 * //from  w w w  .ja v  a2 s.  c o m
 * @param props
 *            properties
 * @return the new Properties object contained the trimmed names and values.
 */
public static Properties trimProperties(Properties props) {
    Properties result = new Properties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        String key = entry.getKey().toString();
        String value = entry.getValue().toString();
        result.put(key.trim(), value.trim());
    }
    return result;
}

From source file:gobblin.util.test.RetentionTestHelper.java

/**
*
* Does gobblin retention for test data. {@link DatasetCleaner} which does retention in production can not be directly called as we need to resolve some
* runtime properties like ${testNameTempPath}. This directory contains all the setup data created for a test by {@link RetentionTestDataGenerator#setup()}.
* It is unique for each test./*from w ww .j  a  v a 2  s. co  m*/
* The default {@link ConfigClient} used by {@link DatasetCleaner} connects to config store configs. We need to provide a
* mock {@link ConfigClient} since the configs are in classpath and not on config store.
*
* @param retentionConfigClasspathResource this is the same jobProps/config files used while running a real retention job
* @param testNameTempPath temp path for this test where test data is generated
*/
public static void clean(FileSystem fs, Path retentionConfigClasspathResource,
        Optional<Path> additionalJobPropsClasspathResource, Path testNameTempPath) throws Exception {

    Properties additionalJobProps = new Properties();
    if (additionalJobPropsClasspathResource.isPresent()) {
        try (final InputStream stream = RetentionTestHelper.class.getClassLoader()
                .getResourceAsStream(additionalJobPropsClasspathResource.get().toString())) {
            additionalJobProps.load(stream);
        }
    }

    if (retentionConfigClasspathResource.getName().endsWith(".job")) {

        Properties jobProps = new Properties();
        try (final InputStream stream = RetentionTestHelper.class.getClassLoader()
                .getResourceAsStream(retentionConfigClasspathResource.toString())) {
            jobProps.load(stream);
            for (Entry<Object, Object> entry : jobProps.entrySet()) {
                jobProps.put(entry.getKey(), StringUtils.replace((String) entry.getValue(),
                        "${testNameTempPath}", testNameTempPath.toString()));
            }
        }

        MultiCleanableDatasetFinder finder = new MultiCleanableDatasetFinder(fs, jobProps);
        for (Dataset dataset : finder.findDatasets()) {
            ((CleanableDataset) dataset).clean();
        }
    } else {
        Config testConfig = ConfigFactory.parseResources(retentionConfigClasspathResource.toString())
                .withFallback(ConfigFactory.parseMap(ImmutableMap.of("testNameTempPath",
                        PathUtils.getPathWithoutSchemeAndAuthority(testNameTempPath).toString())))
                .resolve();

        ConfigClient client = mock(ConfigClient.class);
        when(client.getConfig(any(String.class))).thenReturn(testConfig);
        Properties jobProps = new Properties();
        jobProps.setProperty(CleanableDatasetBase.SKIP_TRASH_KEY, Boolean.toString(true));
        jobProps.setProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI, "dummy");

        jobProps.putAll(additionalJobProps);

        @SuppressWarnings("unchecked")
        DatasetsFinder<CleanableDataset> finder = (DatasetsFinder<CleanableDataset>) GobblinConstructorUtils
                .invokeFirstConstructor(
                        Class.forName(
                                testConfig.getString(MultiCleanableDatasetFinder.DATASET_FINDER_CLASS_KEY)),
                        ImmutableList.of(fs, jobProps, testConfig, client),
                        ImmutableList.of(fs, jobProps, client));

        for (CleanableDataset dataset : finder.findDatasets()) {
            dataset.clean();
        }
    }
}

From source file:org.apache.gobblin.util.test.RetentionTestHelper.java

/**
*
* Does gobblin retention for test data. {@link DatasetCleaner} which does retention in production can not be directly called as we need to resolve some
* runtime properties like ${testNameTempPath}. This directory contains all the setup data created for a test by {@link RetentionTestDataGenerator#setup()}.
* It is unique for each test.//from  w  ww .  j av a 2s .c  o m
* The default {@link ConfigClient} used by {@link DatasetCleaner} connects to config store configs. We need to provide a
* mock {@link ConfigClient} since the configs are in classpath and not on config store.
*
* @param retentionConfigClasspathResource this is the same jobProps/config files used while running a real retention job
* @param testNameTempPath temp path for this test where test data is generated
*/
public static void clean(FileSystem fs, Path retentionConfigClasspathResource,
        Optional<Path> additionalJobPropsClasspathResource, Path testNameTempPath) throws Exception {

    Properties additionalJobProps = new Properties();
    if (additionalJobPropsClasspathResource.isPresent()) {
        try (final InputStream stream = RetentionTestHelper.class.getClassLoader()
                .getResourceAsStream(additionalJobPropsClasspathResource.get().toString())) {
            additionalJobProps.load(stream);
        }
    }

    if (retentionConfigClasspathResource.getName().endsWith(".job")) {

        Properties jobProps = new Properties();
        try (final InputStream stream = RetentionTestHelper.class.getClassLoader()
                .getResourceAsStream(retentionConfigClasspathResource.toString())) {
            jobProps.load(stream);
            for (Entry<Object, Object> entry : jobProps.entrySet()) {
                jobProps.put(entry.getKey(), StringUtils.replace((String) entry.getValue(),
                        "${testNameTempPath}", testNameTempPath.toString()));
            }
        }

        MultiCleanableDatasetFinder finder = new MultiCleanableDatasetFinder(fs, jobProps);
        for (Dataset dataset : finder.findDatasets()) {
            ((CleanableDataset) dataset).clean();
        }
    } else {
        Config testConfig = ConfigFactory.parseResources(retentionConfigClasspathResource.toString())
                .withFallback(ConfigFactory.parseMap(ImmutableMap.of("testNameTempPath",
                        PathUtils.getPathWithoutSchemeAndAuthority(testNameTempPath).toString())))
                .resolve();

        ConfigClient client = mock(ConfigClient.class);
        when(client.getConfig(any(String.class))).thenReturn(testConfig);
        Properties jobProps = new Properties();
        jobProps.setProperty(CleanableDatasetBase.SKIP_TRASH_KEY, Boolean.toString(true));
        jobProps.setProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI, "dummy");
        jobProps.setProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_ENABLED, "true");

        jobProps.putAll(additionalJobProps);

        @SuppressWarnings("unchecked")
        DatasetsFinder<CleanableDataset> finder = (DatasetsFinder<CleanableDataset>) GobblinConstructorUtils
                .invokeFirstConstructor(
                        Class.forName(
                                testConfig.getString(MultiCleanableDatasetFinder.DATASET_FINDER_CLASS_KEY)),
                        ImmutableList.of(fs, jobProps, testConfig, client),
                        ImmutableList.of(fs, jobProps, client));

        for (CleanableDataset dataset : finder.findDatasets()) {
            dataset.clean();
        }
    }
}