List of usage examples for java.util Properties entrySet
@Override
public Set<Map.Entry<Object, Object>> entrySet()
From source file:de.alpharogroup.lang.PropertiesUtils.java
/** * Finds redundant values from the given Properties object and saves it to a Map. * /*from w w w . j ava2 s. c o m*/ * @param properties * The Properties to check. * @return A map that contains the redundant value as the key of the map and a List(as value of * the map) of keys that have the redundant value. */ public static Map<String, List<String>> findRedundantValues(final Properties properties) { final Map<String, List<String>> reverseEntries = new LinkedHashMap<>(); for (final Map.Entry<Object, Object> entry : properties.entrySet()) { final String key = (String) entry.getKey(); final String value = (String) entry.getValue(); if (!reverseEntries.containsKey(value)) { final List<String> keys = new ArrayList<>(); keys.add(key); reverseEntries.put(value, keys); } else { final List<String> keys = reverseEntries.get(value); keys.add(key); } } final Map<String, List<String>> redundantValues = new LinkedHashMap<>(); for (final Map.Entry<String, List<String>> entry : reverseEntries.entrySet()) { final String key = entry.getKey(); final List<String> keys = entry.getValue(); if (1 < keys.size()) { redundantValues.put(key, keys); } } return redundantValues; }
From source file:org.apache.hama.zookeeper.QuorumPeer.java
/** * Parse ZooKeeper's zoo.cfg, injecting Hama Configuration variables in. This * method is used for testing so we can pass our own InputStream. * /* w w w . j av a 2 s. co m*/ * @param conf Configuration to use for injecting variables. * @param inputStream InputStream to read from. * @return Properties parsed from config stream with variables substituted. * @throws IOException if anything goes wrong parsing config */ public static Properties parseZooCfg(Configuration conf, InputStream inputStream) throws IOException { Properties properties = new Properties(); try { properties.load(inputStream); } catch (IOException e) { String msg = "fail to read properties from " + ZOOKEEPER_CONFIG_NAME; LOG.fatal(msg); throw new IOException(msg, e); } for (Entry<Object, Object> entry : properties.entrySet()) { String value = entry.getValue().toString().trim(); String key = entry.getKey().toString().trim(); StringBuilder newValue = new StringBuilder(); int varStart = value.indexOf(VARIABLE_START); int varEnd = 0; while (varStart != -1) { varEnd = value.indexOf(VARIABLE_END, varStart); if (varEnd == -1) { String msg = "variable at " + varStart + " has no end marker"; LOG.fatal(msg); throw new IOException(msg); } String variable = value.substring(varStart + VARIABLE_START_LENGTH, varEnd); String substituteValue = System.getProperty(variable); if (substituteValue == null) { substituteValue = conf.get(variable); } if (substituteValue == null) { String msg = "variable " + variable + " not set in system property " + "or hama configs"; LOG.fatal(msg); throw new IOException(msg); } newValue.append(substituteValue); varEnd += VARIABLE_END_LENGTH; varStart = value.indexOf(VARIABLE_START, varEnd); } // Special case for 'hama.cluster.distributed' property being 'true' if (key.startsWith("server.")) { if (conf.get(CLUSTER_DISTRIBUTED).equals(CLUSTER_IS_DISTRIBUTED) && value.startsWith("localhost")) { String msg = "The server in zoo.cfg cannot be set to localhost " + "in a fully-distributed setup because it won't be reachable. " + "See \"Getting Started\" for more information."; LOG.fatal(msg); throw new IOException(msg); } } newValue.append(value.substring(varEnd)); properties.setProperty(key, newValue.toString()); } return properties; }
From source file:org.apache.hive.hcatalog.common.HCatUtil.java
public static HiveConf getHiveConf(Configuration conf) throws IOException { HiveConf hiveConf = new HiveConf(conf, HCatUtil.class); //copy the hive conf into the job conf and restore it //in the backend context if (conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null) { conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, HCatUtil.serialize(hiveConf.getAllProperties())); } else {//from w w w .j a va2 s .co m //Copy configuration properties into the hive conf Properties properties = (Properties) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_HIVE_CONF)); for (Map.Entry<Object, Object> prop : properties.entrySet()) { if (prop.getValue() instanceof String) { hiveConf.set((String) prop.getKey(), (String) prop.getValue()); } else if (prop.getValue() instanceof Integer) { hiveConf.setInt((String) prop.getKey(), (Integer) prop.getValue()); } else if (prop.getValue() instanceof Boolean) { hiveConf.setBoolean((String) prop.getKey(), (Boolean) prop.getValue()); } else if (prop.getValue() instanceof Long) { hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue()); } else if (prop.getValue() instanceof Float) { hiveConf.setFloat((String) prop.getKey(), (Float) prop.getValue()); } } } if (conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) { hiveConf.setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE)); } return hiveConf; }
From source file:ca.uhn.fhir.context.ModelScanner.java
static Set<Class<? extends IBase>> scanVersionPropertyFile(Set<Class<? extends IBase>> theDatatypes, Map<String, Class<? extends IBaseResource>> theResourceTypes, FhirVersionEnum theVersion, Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> theExistingElementDefinitions) { Set<Class<? extends IBase>> retVal = new HashSet<Class<? extends IBase>>(); InputStream str = theVersion.getVersionImplementation().getFhirVersionPropertiesFile(); Properties prop = new Properties(); try {/*from www .j av a 2 s.c o m*/ prop.load(str); for (Entry<Object, Object> nextEntry : prop.entrySet()) { String nextKey = nextEntry.getKey().toString(); String nextValue = nextEntry.getValue().toString(); if (nextKey.startsWith("datatype.")) { if (theDatatypes != null) { try { // Datatypes @SuppressWarnings("unchecked") Class<? extends IBase> dtType = (Class<? extends IBase>) Class.forName(nextValue); if (theExistingElementDefinitions.containsKey(dtType)) { continue; } retVal.add(dtType); if (IElement.class.isAssignableFrom(dtType)) { @SuppressWarnings("unchecked") Class<? extends IElement> nextClass = (Class<? extends IElement>) dtType; theDatatypes.add(nextClass); } else if (IBaseDatatype.class.isAssignableFrom(dtType)) { @SuppressWarnings("unchecked") Class<? extends IBaseDatatype> nextClass = (Class<? extends IBaseDatatype>) dtType; theDatatypes.add(nextClass); } else { ourLog.warn("Class is not assignable from " + IElement.class.getSimpleName() + " or " + IBaseDatatype.class.getSimpleName() + ": " + nextValue); continue; } } catch (ClassNotFoundException e) { throw new ConfigurationException("Unknown class[" + nextValue + "] for data type definition: " + nextKey.substring("datatype.".length()), e); } } } else if (nextKey.startsWith("resource.")) { // Resources String resName = nextKey.substring("resource.".length()).toLowerCase(); try { @SuppressWarnings("unchecked") Class<? extends IBaseResource> nextClass = (Class<? extends IBaseResource>) Class .forName(nextValue); if (theExistingElementDefinitions.containsKey(nextClass)) { continue; } if (!IBaseResource.class.isAssignableFrom(nextClass)) { throw new ConfigurationException("Class is not assignable from " + IBaseResource.class.getSimpleName() + ": " + nextValue); } theResourceTypes.put(resName, nextClass); } catch (ClassNotFoundException e) { throw new ConfigurationException("Unknown class[" + nextValue + "] for resource definition: " + nextKey.substring("resource.".length()), e); } } else { throw new ConfigurationException( "Unexpected property in version property file: " + nextKey + "=" + nextValue); } } } catch (IOException e) { throw new ConfigurationException("Failed to load model property file from classpath: " + "/ca/uhn/fhir/model/dstu/model.properties"); } finally { IOUtils.closeQuietly(str); } return retVal; }
From source file:org.apache.jmeter.save.SaveService.java
private static void initProps() { // Load the alias properties try {/*w w w. j a v a 2s . co m*/ fileVersion = getChecksumForPropertiesFile(); } catch (IOException | NoSuchAlgorithmException e) { log.fatalError("Can't compute checksum for saveservice properties file", e); throw new JMeterError("JMeter requires the checksum of saveservice properties file to continue", e); } try { Properties nameMap = loadProperties(); // now create the aliases for (Map.Entry<Object, Object> me : nameMap.entrySet()) { String key = (String) me.getKey(); String val = (String) me.getValue(); if (!key.startsWith("_")) { // $NON-NLS-1$ makeAlias(key, val); } else { // process special keys if (key.equalsIgnoreCase("_version")) { // $NON-NLS-1$ propertiesVersion = val; log.info("Using SaveService properties version " + propertiesVersion); } else if (key.equalsIgnoreCase("_file_version")) { // $NON-NLS-1$ log.info("SaveService properties file version is now computed by a checksum," + "the property _file_version is not used anymore and can be removed."); } else if (key.equalsIgnoreCase("_file_encoding")) { // $NON-NLS-1$ fileEncoding = val; log.info("Using SaveService properties file encoding " + fileEncoding); } else { key = key.substring(1);// Remove the leading "_" try { final String trimmedValue = val.trim(); if (trimmedValue.equals("collection") // $NON-NLS-1$ || trimmedValue.equals("mapping")) { // $NON-NLS-1$ registerConverter(key, JMXSAVER, true); registerConverter(key, JTLSAVER, true); } else { registerConverter(key, JMXSAVER, false); registerConverter(key, JTLSAVER, false); } } catch (IllegalAccessException | InstantiationException | ClassNotFoundException | IllegalArgumentException | SecurityException | InvocationTargetException | NoSuchMethodException e1) { log.warn("Can't register a converter: " + key, e1); } } } } } catch (IOException e) { log.fatalError("Bad saveservice properties file", e); throw new JMeterError("JMeter requires the saveservice properties file to continue"); } }
From source file:org.apache.hama.zookeeper.QuorumPeer.java
/** * Return the ZK Quorum servers string given zk properties returned by * makeZKProps//w w w.j ava2s. c o m * * @param properties the ZK properties * @return Quorum servers String */ public static String getZKQuorumServersString(Properties properties) { String clientPort = null; List<String> servers = new ArrayList<String>(); // The clientPort option may come after the server.X hosts, so we need to // grab everything and then create the final host:port comma separated list. boolean anyValid = false; for (Entry<Object, Object> property : properties.entrySet()) { String key = property.getKey().toString().trim(); String value = property.getValue().toString().trim(); if (key.equals("clientPort")) { clientPort = value; } else if (key.startsWith("server.")) { String host = value.substring(0, value.indexOf(':')); servers.add(host); try { // noinspection ResultOfMethodCallIgnored InetAddress.getByName(host); anyValid = true; } catch (UnknownHostException e) { LOG.warn(StringUtils.stringifyException(e)); } } } if (!anyValid) { LOG.error("no valid quorum servers found in " + Constants.ZOOKEEPER_CONFIG_NAME); return null; } if (clientPort == null) { LOG.error("no clientPort found in " + Constants.ZOOKEEPER_CONFIG_NAME); return null; } if (servers.isEmpty()) { LOG.fatal("No server.X lines found in conf/zoo.cfg. Hama must have a " + "ZooKeeper cluster configured for its operation."); return null; } StringBuilder hostPortBuilder = new StringBuilder(); for (int i = 0; i < servers.size(); ++i) { String host = servers.get(i); if (i > 0) { hostPortBuilder.append(','); } hostPortBuilder.append(host); hostPortBuilder.append(':'); hostPortBuilder.append(clientPort); } return hostPortBuilder.toString(); }
From source file:com.asakusafw.lang.compiler.cli.BatchCompilerCli.java
private static Map<String, String> parseProperties(CommandLine cmd, Option option) { Properties properties = cmd.getOptionProperties(option.getLongOpt()); Map<String, String> results = new TreeMap<>(); for (Map.Entry<Object, Object> entry : properties.entrySet()) { results.put((String) entry.getKey(), (String) entry.getValue()); }// ww w .j a v a 2s . c om return results; }
From source file:org.apache.hadoop.hive.llap.cli.LlapServiceDriver.java
static void populateConfWithLlapProperties(Configuration conf, Properties properties) { for (Entry<Object, Object> props : properties.entrySet()) { String key = (String) props.getKey(); if (HiveConf.getLlapDaemonConfVars().contains(key)) { conf.set(key, (String) props.getValue()); } else {//w w w. ja v a2 s. co m if (key.startsWith(HiveConf.PREFIX_LLAP) || key.startsWith(HiveConf.PREFIX_HIVE_LLAP)) { LOG.warn("Adding key [{}] even though it is not in the set of known llap-server keys"); conf.set(key, (String) props.getValue()); } else { LOG.warn("Ignoring unknown llap server parameter: [{}]", key); } } } }
From source file:bear.core.BearMain.java
static Optional<ClassLoader> createDepsClassLoader(List<File> folders) { File bootstrapFile = new File(BEAR_DIR, "bootstrap.properties"); if (!bootstrapFile.exists()) return Optional.absent(); FileInputStream fis = null;// www . j a v a 2 s . c o m try { Properties properties = new Properties(); fis = new FileInputStream(bootstrapFile); properties.load(fis); Set<Map.Entry<String, String>> entries = (Set) properties.entrySet(); for (Map.Entry<String, String> entry : entries) { String key = entry.getKey(); if (!key.startsWith("logger.")) continue; key = StringUtils.substringAfter(key, "logger."); LoggingBooter.changeLogLevel(key, Level.toLevel(entry.getValue())); } String property = "bearMain.customFolders"; String customFolders = properties.getProperty(property, null); if (customFolders != null) { List<String> temp = COMMA_SPLITTER.splitToList(customFolders); for (String s : temp) { File file = new File(s); if (!file.exists()) { throw new NoSuchFileException("dir does not exist (:" + property + "): " + s); } folders.add(file); } } return new MavenBooter(properties).loadArtifacts(properties); } catch (Exception e) { throw Exceptions.runtime(e); } finally { IOUtils.closeQuietly(fis); } }
From source file:edu.kit.dama.dataworkflow.util.DataWorkflowHelper.java
/** * Abort the staging process for all input objects of the provided task. The * associated download entities will be set to DOWNLOAD_REMOVED. * * @param pTask The task for which the staging should be aborted. *//*w ww . j a va 2 s.c om*/ public static void abortStaging(DataWorkflowTask pTask) { try { //update status Properties dataMap = pTask.getObjectTransferMapAsObject(); Set<Entry<Object, Object>> entries = dataMap.entrySet(); IAuthorizationContext ctx = DataWorkflowHelper.getTaskContext(pTask); for (Entry<Object, Object> entry : entries) { String objectId = (String) entry.getKey(); Long transferId = Long.parseLong((String) entry.getValue()); LOGGER.debug("Checking download status for object {} with download id {}", objectId, transferId); if (DownloadInformationServiceLocal.getSingleton().updateStatus(transferId, DOWNLOAD_STATUS.DOWNLOAD_REMOVED.getId(), "Download aborted.", ctx) != 1) { LOGGER.warn("Download with id {} was not properly aborted.", transferId); } else { LOGGER.debug("Download with id {} aborted.", transferId); } } } catch (IOException ex) { LOGGER.error("Failed to obtain object-transfer map. Unable to abort staging.", ex); } }