List of usage examples for java.util Properties entrySet
@Override
public Set<Map.Entry<Object, Object>> entrySet()
From source file:org.apache.geode.cache.client.internal.PoolImpl.java
public RegionService createAuthenticatedCacheView(Properties properties) { if (!this.multiuserSecureModeEnabled) { throw new UnsupportedOperationException( "Operation not supported when multiuser-authentication is false."); }//from w w w . j ava2 s. c om if (properties == null || properties.isEmpty()) { throw new IllegalArgumentException("Security properties cannot be empty."); } Cache cache = CacheFactory.getInstance(InternalDistributedSystem.getAnyInstance()); Properties props = new Properties(); for (Entry<Object, Object> entry : properties.entrySet()) { props.setProperty((String) entry.getKey(), (String) entry.getValue()); } ProxyCache proxy = new ProxyCache(props, (InternalCache) cache, this); synchronized (this.proxyCacheList) { this.proxyCacheList.add(proxy); } return proxy; }
From source file:com.cloudera.sqoop.metastore.hsqldb.HsqldbJobStorage.java
@Override /** {@inheritDoc} */ public JobData read(String jobName) throws IOException { try {//from w w w . jav a 2s. com if (!jobExists(jobName)) { LOG.error("Cannot restore job: " + jobName); LOG.error("(No such job)"); throw new IOException("Cannot restore missing job " + jobName); } LOG.debug("Restoring job: " + jobName); Properties schemaProps = getV0Properties(jobName, PROPERTY_CLASS_SCHEMA); Properties sqoopOptProps = getV0Properties(jobName, PROPERTY_CLASS_SQOOP_OPTIONS); Properties configProps = getV0Properties(jobName, PROPERTY_CLASS_CONFIG); // Check that we're not using a saved job from a previous // version whose functionality has been deprecated. String thisPropSetId = schemaProps.getProperty(PROPERTY_SET_KEY); LOG.debug("System property set: " + CUR_PROPERTY_SET_ID); LOG.debug("Stored property set: " + thisPropSetId); if (!CUR_PROPERTY_SET_ID.equals(thisPropSetId)) { LOG.warn("The property set present in this database was written by"); LOG.warn("an incompatible version of Sqoop. This may result in an"); LOG.warn("incomplete operation."); // TODO(aaron): Should this fail out-right? } String toolName = schemaProps.getProperty(SQOOP_TOOL_KEY); if (null == toolName) { // Don't know what tool to create. throw new IOException("Incomplete metadata; missing " + SQOOP_TOOL_KEY); } SqoopTool tool = SqoopTool.getTool(toolName); if (null == tool) { throw new IOException("Error in job metadata: invalid tool " + toolName); } Configuration conf = new Configuration(); for (Map.Entry<Object, Object> entry : configProps.entrySet()) { conf.set(entry.getKey().toString(), entry.getValue().toString()); } SqoopOptions opts = new SqoopOptions(); opts.setConf(conf); opts.loadProperties(sqoopOptProps); // Set the job connection information for this job. opts.setJobName(jobName); opts.setStorageDescriptor(connectedDescriptor); return new JobData(opts, tool); } catch (SQLException sqlE) { throw new IOException("Error communicating with database", sqlE); } }
From source file:ml.shifu.shifu.core.processor.TrainModelProcessor.java
/** * update some fields of conf based on current project * /*from w ww . j a v a 2s.co m*/ * @throws IOException */ private void generateGlobalConf() throws IOException { if (HDFSUtils.getLocalFS().exists(new Path(globalDefaultConfFile.getName()))) { // local project already have global conf, so we do not copy it again LOG.info("Project already has global conf"); return; } Configuration globalConf = new Configuration(false); globalConf.addResource(globalDefaultConfFile); // set training data path globalConf.set("shifu.application.training-data-path", super.getPathFinder().getNormalizedDataPath()); // set workers instance number based on training data files number int fileNumber = HDFSUtils.getFileNumber(HDFSUtils.getFS(), new Path(super.getPathFinder().getNormalizedDataPath())); globalConf.set("shifu.worker.instances", Integer.toString(fileNumber)); // set backup workers as 1:10 int backupWorkerNumber = (fileNumber / 10) > 0 ? fileNumber / 10 : 1; globalConf.set("shifu.worker.instances.backup", Integer.toString(backupWorkerNumber)); // set model conf globalConf.set("shifu.application.model-conf", super.getPathFinder().getModelConfigPath(SourceType.LOCAL)); // set column conf globalConf.set("shifu.application.column-conf", super.getPathFinder().getColumnConfigPath(SourceType.LOCAL)); // set python script if (this.modelConfig.getNormalize().getNormType() == NormType.ZSCALE_INDEX) { // Running wide and deep globalConf.set("shifu.application.python-script-path", super.getPathFinder().getScriptPath("scripts/wnp_ssgd_not_embadding.py")); setSelectedColumnForWideDeep(globalConf); } else { // Running normal NN globalConf.set("shifu.application.python-script-path", super.getPathFinder().getScriptPath("scripts/ssgd_monitor.py")); // set selected column number; target column number; weight column number setSelectedTargetAndWeightColumnNumber(globalConf); } // set shell to lauch python globalConf.set("shifu.application.python-shell-path", super.getPathFinder().getScriptPath("bin/dist_pytrain.sh")); // set application name globalConf.set("shifu.application.name", "Shifu Tensorflow: " + modelConfig.getBasic().getName()); // set yarn queue globalConf.set("shifu.yarn.queue", Environment.getProperty(Environment.HADOOP_JOB_QUEUE, "default")); // set data total count globalConf.set("shifu.application.total-training-data-number", Long.toString(columnConfigList.get(0).getTotalCount())); globalConf.set("shifu.application.epochs", this.modelConfig.getTrain().getNumTrainEpochs() + ""); // set hdfs tmp model path globalConf.set("shifu.application.tmp-model-path", super.getPathFinder().getTmpModelsPath(SourceType.HDFS)); // set hdfs final model path globalConf.set("shifu.application.final-model-path", super.getPathFinder().getModelsPath(SourceType.HDFS)); // add all shifuconf, this includes 'shifu train -Dk=v' <k,v> pairs and it will override default keys set above. Properties shifuConfigMap = Environment.getProperties(); for (Map.Entry<Object, Object> entry : shifuConfigMap.entrySet()) { globalConf.set(entry.getKey().toString(), entry.getValue().toString()); } OutputStream os = null; try { // Write user's overridden conf to an xml to be localized. os = new FileOutputStream(globalDefaultConfFile.getName()); globalConf.writeXml(os); } catch (IOException e) { throw new RuntimeException( "Failed to create " + globalDefaultConfFile.getName() + " conf file. Exiting.", e); } finally { if (os != null) { os.close(); } } }
From source file:com.udps.hive.jdbc.HiveConnection.java
public HiveConnection(String uri, Properties info) throws SQLException { setupLoginTimeout();// w w w .java 2 s.co m jdbcURI = uri; // parse the connection uri Utils.JdbcConnectionParams connParams; try { connParams = Utils.parseURL(uri); } catch (IllegalArgumentException e) { throw new SQLException(e); } // extract parsed connection parameters: // JDBC URL: // jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list // each list: <key1>=<val1>;<key2>=<val2> and so on // sess_var_list -> sessConfMap // hive_conf_list -> hiveConfMap // hive_var_list -> hiveVarMap host = connParams.getHost(); port = connParams.getPort(); sessConfMap = connParams.getSessionVars(); hiveConfMap = connParams.getHiveConfs(); hiveVarMap = connParams.getHiveVars(); for (Map.Entry<Object, Object> kv : info.entrySet()) { if ((kv.getKey() instanceof String)) { String key = (String) kv.getKey(); if (key.startsWith(HIVE_VAR_PREFIX)) { hiveVarMap.put(key.substring(HIVE_VAR_PREFIX.length()), info.getProperty(key)); } else if (key.startsWith(HIVE_CONF_PREFIX)) { hiveConfMap.put(key.substring(HIVE_CONF_PREFIX.length()), info.getProperty(key)); } } } isEmbeddedMode = connParams.isEmbeddedMode(); if (isEmbeddedMode) { client = new EmbeddedThriftBinaryCLIService(); } else { // extract user/password from JDBC connection properties if its not // supplied in the // connection URL if (info.containsKey(HIVE_AUTH_USER)) { sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER)); if (info.containsKey(HIVE_AUTH_PASSWD)) { sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD)); } } if (info.containsKey(HIVE_AUTH_TYPE)) { sessConfMap.put(HIVE_AUTH_TYPE, info.getProperty(HIVE_AUTH_TYPE)); } // open the client transport openTransport(); // set up the client client = new TCLIService.Client(new TBinaryProtocol(transport)); } // add supported protocols supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V4); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V7); // open client session openSession(connParams); }
From source file:com.netflix.blitz4j.LoggingConfiguration.java
/** * Convert appenders specified by the property * <code>log4j.logger.asyncAppender</code> to the blitz4j Asynchronous * appenders./*from w w w . ja v a 2s . c o m*/ * * @param props * - The properties that need to be passed into the log4j for * configuration. * @throws ConfigurationException * @throws FileNotFoundException */ private void convertConfiguredAppendersToAsync(Properties props) throws ConfigurationException, FileNotFoundException { for (Map.Entry<String, String> originalAsyncAppenderMapEntry : originalAsyncAppenderNameMap.entrySet()) { String asyncAppenderName = originalAsyncAppenderMapEntry.getValue(); props.setProperty(LOG4J_APPENDER_PREFIX + LOG4J_APPENDER_DELIMITER + asyncAppenderName, AsyncAppender.class.getName()); // Set the original appender so that it can be fetched later after // configuration String originalAppenderName = originalAsyncAppenderMapEntry.getKey(); props.setProperty(LOG4J_APPENDER_PREFIX + LOG4J_APPENDER_DELIMITER + asyncAppenderName + LOG4J_APPENDER_DELIMITER + PROP_LOG4J_ORIGINAL_APPENDER_NAME, originalAppenderName); // Set the batcher to reject the collector request instead of it // participating in processing ConfigurationManager.getConfigInstance().setProperty("batcher." + AsyncAppender.class.getName() + "." + originalAppenderName + "." + "rejectWhenFull", true); // Set the default value of the processing max threads to 1, if a // value is not specified int maxThreads = ConfigurationManager.getConfigInstance().getInt( "batcher." + AsyncAppender.class.getName() + "." + originalAppenderName + "." + "maxThreads", 0); if (maxThreads == 0) { ConfigurationManager.getConfigInstance().setProperty("batcher." + AsyncAppender.class.getName() + "." + originalAppenderName + "." + "maxThreads", "1"); } for (Map.Entry mapEntry : props.entrySet()) { String key = mapEntry.getKey().toString(); if ((key.contains(LOG4J_PREFIX) || key.contains(ROOT_CATEGORY) || key.contains(ROOT_LOGGER)) && !key.contains(PROP_LOG4J_ASYNC_APPENDERS) && !key.contains(PROP_LOG4J_ORIGINAL_APPENDER_NAME)) { Object value = mapEntry.getValue(); if (value != null && !((String) value).contains(asyncAppenderName)) { String convertedString = value.toString().replace(originalAppenderName, asyncAppenderName); mapEntry.setValue(convertedString); } } } } }
From source file:org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter.java
/** * A utility method to convert global properties to the Map form for the given * propertyComponent./* w w w . jav a 2 s . c om*/ * <p> * Mappings can be specified using the same method defined for * normal mapping properties files but with a prefix of * <code>metadata.extracter</code>, the extracter bean name, and the propertyComponent. * For example: * * metadata.extracter.TikaAuto.extract.namespace.prefix.my=http://DummyMappingMetadataExtracter * metadata.extracter.TikaAuto.extract.namespace.prefix.cm=http://www.alfresco.org/model/content/1.0 * metadata.extracter.TikaAuto.extract.dc\:description=cm:description, my:customDescription * */ private Map<Object, Object> getRelevantGlobalProperties(String propertyComponent) { if (applicationContext == null) { logger.info("ApplicationContext not set"); return null; } Properties globalProperties = (Properties) applicationContext.getBean("global-properties"); if (globalProperties == null) { logger.info("Could not get global-properties"); return null; } Map<Object, Object> relevantGlobalPropertiesMap = new HashMap<Object, Object>(); String propertyPrefix = PROPERTY_PREFIX_METADATA + beanName + propertyComponent; for (Entry<Object, Object> globalEntry : globalProperties.entrySet()) { if (((String) globalEntry.getKey()).startsWith(propertyPrefix)) { relevantGlobalPropertiesMap.put(((String) globalEntry.getKey()).replace(propertyPrefix, ""), globalEntry.getValue()); } } return relevantGlobalPropertiesMap; }
From source file:edu.unc.lib.dl.util.IngestProperties.java
/** * @throws IOException/*w w w . ja v a 2 s .co m*/ * @throws FileNotFoundException * @throws ClassNotFoundException * */ private void load() throws Exception { if (log.isDebugEnabled()) { log.debug("loading ingest properties from " + this.propFile.getAbsolutePath()); } Properties props = new Properties(); try (FileInputStream in = new FileInputStream(propFile)) { props.load(in); } this.submitter = props.getProperty("submitter"); String submitterGroups = props.getProperty("submitterGroups"); if (submitterGroups != null) { this.submitterGroups = submitterGroups; } String er = props.getProperty("email.recipients"); if (er != null) { this.emailRecipients = er.split(","); } this.message = props.getProperty("message"); String bytes = props.getProperty("managedBytes"); if (bytes != null) { try { this.managedBytes = Long.parseLong(bytes); } catch (NumberFormatException e) { } } String subTime = props.getProperty("submissionTime"); if (subTime != null) { try { this.submissionTime = Long.parseLong(subTime); } catch (NumberFormatException e) { throw new Error("Unexpected submissionTime exception", e); } } String finTime = props.getProperty("finishedTime"); if (finTime != null) { try { this.finishedTime = Long.parseLong(finTime); } catch (NumberFormatException e) { throw new Error("Unexpected finishedTime exception", e); } } String stTime = props.getProperty("startTime"); if (stTime != null) { try { this.startTime = Long.parseLong(stTime); } catch (NumberFormatException e) { throw new Error("Unexpected startTime exception", e); } } this.originalDepositId = props.getProperty("originalDepositId"); this.containerPlacements = new HashMap<PID, ContainerPlacement>(); for (Entry<Object, Object> e : props.entrySet()) { String key = (String) e.getKey(); if (key.startsWith("placement")) { String s = (String) e.getValue(); String[] vals = s.split(","); ContainerPlacement p = new ContainerPlacement(); p.pid = new PID(vals[0]); p.parentPID = new PID(vals[1]); if (2 < vals.length && !"null".equals(vals[2])) { p.designatedOrder = Integer.parseInt(vals[2]); } if (3 < vals.length && !"null".equals(vals[3])) { p.sipOrder = Integer.parseInt(vals[3]); } if (4 < vals.length && !"null".equals(vals[4])) { p.label = vals[4]; } this.containerPlacements.put(p.pid, p); } } }
From source file:org.schemaspy.Config.java
/** * Return all of the configuration options as a List of Strings, with * each parameter and its value as a separate element. * * @return/* ww w . j a v a 2 s.c om*/ * @throws IOException */ public List<String> asList() throws IOException { List<String> params = new ArrayList<>(); if (originalDbSpecificOptions != null) { for (String key : originalDbSpecificOptions.keySet()) { String value = originalDbSpecificOptions.get(key); if (!key.startsWith("-")) key = "-" + key; params.add(key); params.add(value); } } if (isEncodeCommentsEnabled()) params.add("-ahic"); if (isEvaluateAllEnabled()) params.add("-all"); if (!isHtmlGenerationEnabled()) params.add("-nohtml"); if (!isImpliedConstraintsEnabled()) params.add("-noimplied"); if (!isLogoEnabled()) params.add("-nologo"); if (isMeterEnabled()) params.add("-meter"); if (!isNumRowsEnabled()) params.add("-norows"); if (!isViewsEnabled()) params.add("-noviews"); if (isRankDirBugEnabled()) params.add("-rankdirbug"); if (isRailsEnabled()) params.add("-rails"); if (isSingleSignOn()) params.add("-sso"); if (isSchemaDisabled()) params.add("-noschema"); String value = getDriverPath(); if (value != null) { params.add("-dp"); params.add(value); } params.add("-css"); params.add(getCss()); params.add("-charset"); params.add(getCharset()); params.add("-font"); params.add(getFont()); params.add("-fontsize"); params.add(String.valueOf(getFontSize())); params.add("-t"); params.add(getDbType()); isHighQuality(); // query to set renderer correctly isLowQuality(); // query to set renderer correctly params.add("-renderer"); // instead of -hq and/or -lq params.add(getRenderer()); value = getDescription(); if (value != null) { params.add("-desc"); params.add(value); } value = getPassword(); if (value != null && !isPromptForPasswordEnabled()) { // note that we don't pass -pfp since child processes // won't have a console params.add("-p"); params.add(value); } value = getCatalog(); if (value != null) { params.add("-cat"); params.add(value); } value = getSchema(); if (value != null) { params.add("-s"); params.add(value); } value = getUser(); if (value != null) { params.add("-u"); params.add(value); } value = getConnectionPropertiesFile(); if (value != null) { params.add("-connprops"); params.add(value); } else { Properties props = getConnectionProperties(); if (!props.isEmpty()) { params.add("-connprops"); StringBuilder buf = new StringBuilder(); for (Entry<Object, Object> entry : props.entrySet()) { buf.append(entry.getKey()); buf.append(ESCAPED_EQUALS); buf.append(entry.getValue()); buf.append(';'); } params.add(buf.toString()); } } value = getDb(); if (value != null) { params.add("-db"); params.add(value); } value = getHost(); if (value != null) { params.add("-host"); params.add(value); } if (getPort() != null) { params.add("-port"); params.add(getPort().toString()); } value = getServer(); if (value != null) { params.add("-server"); params.add(value); } value = getMeta(); if (value != null) { params.add("-meta"); params.add(value); } value = getTemplateDirectory(); if (value != null) { params.add("-template"); params.add(value); } if (getGraphvizDir() != null) { params.add("-gv"); params.add(getGraphvizDir().toString()); } params.add("-loglevel"); params.add(getLogLevel().toString().toLowerCase()); params.add("-sqlFormatter"); params.add(getSqlFormatter().getClass().getName()); params.add("-i"); params.add(getTableInclusions().toString()); params.add("-I"); params.add(getTableExclusions().toString()); params.add("-X"); params.add(getColumnExclusions().toString()); params.add("-x"); params.add(getIndirectColumnExclusions().toString()); params.add("-dbthreads"); params.add(String.valueOf(getMaxDbThreads())); params.add("-maxdet"); params.add(String.valueOf(getMaxDetailedTables())); params.add("-o"); params.add(getOutputDir().toString()); return params; }
From source file:com.buaa.cfs.conf.Configuration.java
private void overlay(Properties to, Properties from) { for (Entry<Object, Object> entry : from.entrySet()) { to.put(entry.getKey(), entry.getValue()); }//www .ja va 2s . c o m }
From source file:com.buaa.cfs.conf.Configuration.java
@Override public void write(DataOutput out) throws IOException { Properties props = getProps(); WritableUtils.writeVInt(out, props.size()); for (Entry<Object, Object> item : props.entrySet()) { com.buaa.cfs.io.Text.writeString(out, (String) item.getKey()); com.buaa.cfs.io.Text.writeString(out, (String) item.getValue()); WritableUtils.writeCompressedStringArray(out, updatingResource.get(item.getKey())); }//from ww w . ja va 2 s .co m }