List of usage examples for java.util Properties entrySet
@Override
public Set<Map.Entry<Object, Object>> entrySet()
From source file:com.adito.server.Main.java
private void displaySystemInfo() throws SocketException { ///*ww w . j av a2 s . co m*/ if (useDevConfig) { log.warn("Development environment enabled. Do not use this on a production server."); } if (log.isInfoEnabled()) { log.info("Version is " + ContextHolder.getContext().getVersion()); log.info("Java version is " + SystemProperties.get("java.version")); log.info("Server is installed on " + hostname + "/" + hostAddress); log.info("Configuration: " + CONF_DIR.getAbsolutePath()); } if (SystemProperties.get("java.vm.name", "").indexOf("GNU") > -1 || SystemProperties.get("java.vm.name", "").indexOf("libgcj") > -1) { System.out.println("********** WARNING **********"); System.out.println("The system has detected that the Java runtime is GNU/GCJ"); System.out.println("Adito does not work correctly with this Java runtime"); System.out.println("you should reconfigure with a different runtime"); System.out.println("*****************************"); log.warn("********** WARNING **********"); log.warn("The system has detected that the Java runtime is GNU/GCJ"); log.warn("Adito may not work correctly with this Java runtime"); log.warn("you should reconfigure with a different runtime"); log.warn("*****************************"); } Enumeration e = NetworkInterface.getNetworkInterfaces(); while (e.hasMoreElements()) { NetworkInterface netface = (NetworkInterface) e.nextElement(); if (log.isInfoEnabled()) log.info("Net interface: " + netface.getName()); Enumeration e2 = netface.getInetAddresses(); while (e2.hasMoreElements()) { InetAddress ip = (InetAddress) e2.nextElement(); if (log.isInfoEnabled()) log.info("IP address: " + ip.toString()); } } if (log.isInfoEnabled()) log.info("System properties follow:"); Properties sysProps = System.getProperties(); for (Iterator i = sysProps.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); int idx = 0; String val = (String) entry.getValue(); while (true) { if (entry.getKey().equals("java.class.path")) { StringTokenizer t = new StringTokenizer(entry.getValue().toString(), SystemProperties.get("path.separator", ",")); while (t.hasMoreTokens()) { String s = t.nextToken(); if (log.isInfoEnabled()) log.info("java.class.path=" + s); } break; } else { if ((val.length() - idx) > 256) { if (log.isInfoEnabled()) log.info(" " + entry.getKey() + "=" + val.substring(idx, idx + 256)); idx += 256; } else { if (log.isInfoEnabled()) log.info(" " + entry.getKey() + "=" + val.substring(idx)); break; } } } } }
From source file:com.sslexplorer.server.Main.java
private void displaySystemInfo() throws SocketException { ///* ww w.j a v a2s . co m*/ if (useDevConfig) { log.warn("Development environment enabled. Do not use this on a production server."); } if (log.isInfoEnabled()) { log.info("Version is " + ContextHolder.getContext().getVersion()); log.info("Java version is " + SystemProperties.get("java.version")); log.info("Server is installed on " + hostname + "/" + hostAddress); log.info("Configuration: " + CONF_DIR.getAbsolutePath()); } if (SystemProperties.get("java.vm.name", "").indexOf("GNU") > -1 || SystemProperties.get("java.vm.name", "").indexOf("libgcj") > -1) { System.out.println("********** WARNING **********"); System.out.println("The system has detected that the Java runtime is GNU/GCJ"); System.out.println("SSL-Explorer does not work correctly with this Java runtime"); System.out.println("you should reconfigure with a different runtime"); System.out.println("*****************************"); log.warn("********** WARNING **********"); log.warn("The system has detected that the Java runtime is GNU/GCJ"); log.warn("SSL-Explorer may not work correctly with this Java runtime"); log.warn("you should reconfigure with a different runtime"); log.warn("*****************************"); } Enumeration e = NetworkInterface.getNetworkInterfaces(); while (e.hasMoreElements()) { NetworkInterface netface = (NetworkInterface) e.nextElement(); if (log.isInfoEnabled()) log.info("Net interface: " + netface.getName()); Enumeration e2 = netface.getInetAddresses(); while (e2.hasMoreElements()) { InetAddress ip = (InetAddress) e2.nextElement(); if (log.isInfoEnabled()) log.info("IP address: " + ip.toString()); } } if (log.isInfoEnabled()) log.info("System properties follow:"); Properties sysProps = System.getProperties(); for (Iterator i = sysProps.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); int idx = 0; String val = (String) entry.getValue(); while (true) { if (entry.getKey().equals("java.class.path")) { StringTokenizer t = new StringTokenizer(entry.getValue().toString(), SystemProperties.get("path.separator", ",")); while (t.hasMoreTokens()) { String s = t.nextToken(); if (log.isInfoEnabled()) log.info("java.class.path=" + s); } break; } else { if ((val.length() - idx) > 256) { if (log.isInfoEnabled()) log.info(" " + entry.getKey() + "=" + val.substring(idx, idx + 256)); idx += 256; } else { if (log.isInfoEnabled()) log.info(" " + entry.getKey() + "=" + val.substring(idx)); break; } } } } }
From source file:org.jahia.services.usermanager.ldap.LDAPUserGroupProvider.java
/** * Map jahia properties to ldap properties * * @param searchCriteria// ww w . ja va2 s .c o m * @param configProperties * @return */ private Properties mapJahiaPropertiesToLDAP(Properties searchCriteria, Map<String, String> configProperties) { if (searchCriteria.size() == 0) { return searchCriteria; } Properties p = new Properties(); if (searchCriteria.containsKey("*")) { p.setProperty("*", searchCriteria.getProperty("*")); if (searchCriteria.size() == 1) { return p; } } for (Map.Entry<Object, Object> entry : searchCriteria.entrySet()) { if (configProperties.containsKey(entry.getKey())) { p.setProperty(configProperties.get(entry.getKey()), (String) entry.getValue()); } else if (!entry.getKey().equals("*") && !entry.getKey().equals(JahiaUserManagerService.MULTI_CRITERIA_SEARCH_OPERATION)) { return null; } } return p; }
From source file:org.apache.oozie.action.hadoop.PigMain.java
@Override protected void run(String[] args) throws Exception { System.out.println();//from w w w.j a v a 2 s. c o m System.out.println("Oozie Pig action configuration"); System.out.println("================================================================="); // loading action conf prepared by Oozie Configuration actionConf = new Configuration(false); String actionXml = System.getProperty("oozie.action.conf.xml"); if (actionXml == null) { throw new RuntimeException("Missing Java System Property [oozie.action.conf.xml]"); } if (!new File(actionXml).exists()) { throw new RuntimeException("Action Configuration XML file [" + actionXml + "] does not exist"); } actionConf.addResource(new Path("file:///", actionXml)); setYarnTag(actionConf); setApplicationTags(actionConf, TEZ_APPLICATION_TAGS); Properties pigProperties = new Properties(); for (Map.Entry<String, String> entry : actionConf) { pigProperties.setProperty(entry.getKey(), entry.getValue()); } // propagate delegation related props from launcher job to Pig job String jobTokenFile = getFilePathFromEnv("HADOOP_TOKEN_FILE_LOCATION"); if (jobTokenFile != null) { pigProperties.setProperty("mapreduce.job.credentials.binary", jobTokenFile); pigProperties.setProperty("tez.credentials.path", jobTokenFile); System.out.println("------------------------"); System.out.println("Setting env property for mapreduce.job.credentials.binary to:" + jobTokenFile); System.out.println("------------------------"); System.setProperty("mapreduce.job.credentials.binary", jobTokenFile); } else { System.out.println("Non-kerberos execution"); } //setting oozie workflow id as caller context id for pig String callerId = "oozie:" + System.getProperty("oozie.job.id"); pigProperties.setProperty("pig.log.trace.id", callerId); OutputStream os = new FileOutputStream("pig.properties"); pigProperties.store(os, ""); os.close(); logMasking("pig.properties:", Arrays.asList("password"), pigProperties.entrySet()); List<String> arguments = new ArrayList<String>(); String script = actionConf.get(PigActionExecutor.PIG_SCRIPT); if (script == null) { throw new RuntimeException("Action Configuration does not have [oozie.pig.script] property"); } if (!new File(script).exists()) { throw new RuntimeException("Error: Pig script file [" + script + "] does not exist"); } printScript(script, ""); arguments.add("-file"); arguments.add(script); String[] params = MapReduceMain.getStrings(actionConf, PigActionExecutor.PIG_PARAMS); for (String param : params) { arguments.add("-param"); arguments.add(param); } String hadoopJobId = System.getProperty("oozie.launcher.job.id"); if (hadoopJobId == null) { throw new RuntimeException("Launcher Hadoop Job ID system property not set"); } String logFile = new File("pig-oozie-" + hadoopJobId + ".log").getAbsolutePath(); String pigLogLevel = actionConf.get("oozie.pig.log.level", "INFO"); String rootLogLevel = actionConf.get("oozie.action." + LauncherMapper.ROOT_LOGGER_LEVEL, "INFO"); // append required PIG properties to the default hadoop log4j file log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A, B"); log4jProperties.setProperty("log4j.logger.org.apache.pig", pigLogLevel + ", A, B"); log4jProperties.setProperty("log4j.additivity.org.apache.pig", "false"); log4jProperties.setProperty("log4j.appender.A", "org.apache.log4j.ConsoleAppender"); log4jProperties.setProperty("log4j.appender.A.layout", "org.apache.log4j.PatternLayout"); log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d [%t] %-5p %c %x - %m%n"); log4jProperties.setProperty("log4j.appender.B", "org.apache.log4j.FileAppender"); log4jProperties.setProperty("log4j.appender.B.file", logFile); log4jProperties.setProperty("log4j.appender.B.layout", "org.apache.log4j.PatternLayout"); log4jProperties.setProperty("log4j.appender.B.layout.ConversionPattern", "%d [%t] %-5p %c %x - %m%n"); log4jProperties.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl", "INFO, B"); String localProps = new File("piglog4j.properties").getAbsolutePath(); try (OutputStream os1 = new FileOutputStream(localProps)) { log4jProperties.store(os1, ""); } arguments.add("-log4jconf"); arguments.add(localProps); // print out current directory File localDir = new File(localProps).getParentFile(); System.out.println("Current (local) dir = " + localDir.getAbsolutePath()); String pigLog = "pig-" + hadoopJobId + ".log"; arguments.add("-logfile"); arguments.add(pigLog); String[] pigArgs = MapReduceMain.getStrings(actionConf, PigActionExecutor.PIG_ARGS); for (String pigArg : pigArgs) { if (DISALLOWED_PIG_OPTIONS.contains(pigArg)) { throw new RuntimeException("Error: Pig argument " + pigArg + " is not supported"); } arguments.add(pigArg); } if (actionConf.getBoolean(LOG_EXPANDED_PIG_SCRIPT, true) // To avoid Pig running the embedded scripts on dryrun && ScriptEngine.getSupportedScriptLang(script) == null) { logExpandedScript(script, arguments); } System.out.println("Pig command arguments :"); for (String arg : arguments) { System.out.println(" " + arg); } LauncherMainHadoopUtils.killChildYarnJobs(actionConf); System.out.println("================================================================="); System.out.println(); System.out.println(">>> Invoking Pig command line now >>>"); System.out.println(); System.out.flush(); System.out.println(); runPigJob(new String[] { "-version" }, null, true, false); System.out.println(); System.out.flush(); boolean hasStats = Boolean.parseBoolean(actionConf.get(EXTERNAL_STATS_WRITE)); runPigJob(arguments.toArray(new String[arguments.size()]), pigLog, false, hasStats); System.out.println(); System.out.println("<<< Invocation of Pig command completed <<<"); System.out.println(); // For embedded python or for version of pig lower than 0.8, pig stats are not supported. // So retrieving hadoop Ids here File file = new File(System.getProperty(EXTERNAL_CHILD_IDS)); if (!file.exists()) { writeExternalChildIDs(logFile, PIG_JOB_IDS_PATTERNS, "Pig"); } }
From source file:org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter.java
/** * A utility method to convert mapping properties to the Map form. * /* w w w .j a v a 2 s. c o m*/ * @see #setMappingProperties(Properties) */ protected Map<String, Set<QName>> readMappingProperties(Properties mappingProperties) { return readMappingProperties(mappingProperties.entrySet()); }
From source file:org.alfresco.repo.content.metadata.AbstractMappingMetadataExtracter.java
/** * A utility method to convert mapping properties to the Map form. * <p>//from w w w . j a va2s. c om * Different from readMappingProperties in that keys are the Alfresco QNames * and values are file metadata properties. * * @see #setMappingProperties(Properties) */ protected Map<QName, Set<String>> readEmbedMappingProperties(Properties mappingProperties) { return readEmbedMappingProperties(mappingProperties.entrySet()); }
From source file:org.jets3t.apps.uploader.Uploader.java
/** * Retrieves a signed PUT URL from the given URL address. * The URL must point at a server-side script or service that accepts POST messages. * The POST message will include parameters for all the items in uploaderProperties, * that is everything in the file uploader.properties plus all the applet's parameters. * Based on this input, the server-side script decides whether to allow access and return * a signed PUT URL.// w ww .jav a 2s . c o m * * @param credsProviderParamName * the name of the parameter containing the server URL target for the PUT request. * @return * the AWS credentials provided by the server-side script if access was allowed, null otherwise. * * @throws HttpException * @throws Exception */ private GatekeeperMessage contactGatewayServer(S3Object[] objects) throws Exception { // Retrieve credentials from URL location value by the property 'credentialsServiceUrl'. String gatekeeperUrl = uploaderProperties.getStringProperty("gatekeeperUrl", "Missing required property gatekeeperUrl"); /* * Build Gatekeeper request. */ GatekeeperMessage gatekeeperMessage = new GatekeeperMessage(); gatekeeperMessage.addApplicationProperties(userInputProperties); // Add User inputs as application properties. gatekeeperMessage.addApplicationProperties(parametersMap); // Add any Applet/Application parameters as application properties. // Make the Uploader's identifier available to Gatekeeper for version compatibility checking (if necessary) gatekeeperMessage.addApplicationProperty(GatekeeperMessage.PROPERTY_CLIENT_VERSION_ID, UPLOADER_VERSION_ID); // If a prior failure has occurred, add information about this failure. if (priorFailureException != null) { gatekeeperMessage.addApplicationProperty(GatekeeperMessage.PROPERTY_PRIOR_FAILURE_MESSAGE, priorFailureException.getMessage()); // Now reset the prior failure variable. priorFailureException = null; } // Add all S3 objects as candiates for PUT signing. for (int i = 0; i < objects.length; i++) { SignatureRequest signatureRequest = new SignatureRequest(SignatureRequest.SIGNATURE_TYPE_PUT, objects[i].getKey()); signatureRequest.setObjectMetadata(objects[i].getMetadataMap()); gatekeeperMessage.addSignatureRequest(signatureRequest); } /* * Build HttpClient POST message. */ // Add all properties/parameters to credentials POST request. HttpPost postMethod = new HttpPost(gatekeeperUrl); Properties properties = gatekeeperMessage.encodeToProperties(); Iterator<Map.Entry<Object, Object>> propsIter = properties.entrySet().iterator(); List<NameValuePair> parameters = new ArrayList<NameValuePair>(properties.size()); while (propsIter.hasNext()) { Map.Entry<Object, Object> entry = propsIter.next(); String fieldName = (String) entry.getKey(); String fieldValue = (String) entry.getValue(); parameters.add(new BasicNameValuePair(fieldName, fieldValue)); } postMethod.setEntity(new UrlEncodedFormEntity(parameters)); // Create Http Client if necessary, and include User Agent information. if (httpClientGatekeeper == null) { httpClientGatekeeper = initHttpConnection(); } // Try to detect any necessary proxy configurations. try { HttpHost proxyHost = PluginProxyUtil.detectProxy(new URL(gatekeeperUrl)); if (proxyHost != null) { httpClientGatekeeper.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxyHost); } ((DefaultHttpClient) httpClientGatekeeper).setCredentialsProvider(this); } catch (Throwable t) { log.debug("No proxy detected"); } // Perform Gateway request. log.debug("Contacting Gatekeeper at: " + gatekeeperUrl); HttpResponse response = null; try { response = httpClientGatekeeper.execute(postMethod); int responseCode = response.getStatusLine().getStatusCode(); String contentType = response.getFirstHeader("Content-Type").getValue(); if (responseCode == 200) { InputStream responseInputStream = null; Header encodingHeader = response.getFirstHeader("Content-Encoding"); if (encodingHeader != null && "gzip".equalsIgnoreCase(encodingHeader.getValue())) { log.debug("Inflating gzip-encoded response"); responseInputStream = new GZIPInputStream(response.getEntity().getContent()); } else { responseInputStream = response.getEntity().getContent(); } if (responseInputStream == null) { throw new IOException("No response input stream available from Gatekeeper"); } Properties responseProperties = new Properties(); try { responseProperties.load(responseInputStream); } finally { responseInputStream.close(); } GatekeeperMessage gatekeeperResponseMessage = GatekeeperMessage .decodeFromProperties(responseProperties); // Check for Gatekeeper Error Code in response. String gatekeeperErrorCode = gatekeeperResponseMessage.getApplicationProperties() .getProperty(GatekeeperMessage.APP_PROPERTY_GATEKEEPER_ERROR_CODE); if (gatekeeperErrorCode != null) { log.warn("Received Gatekeeper error code: " + gatekeeperErrorCode); failWithFatalError(gatekeeperErrorCode); return null; } if (gatekeeperResponseMessage.getSignatureRequests().length != objects.length) { throw new Exception("The Gatekeeper service did not provide the necessary " + objects.length + " response items"); } return gatekeeperResponseMessage; } else { log.debug("The Gatekeeper did not permit a request. Response code: " + responseCode + ", Response content type: " + contentType); throw new Exception("The Gatekeeper did not permit your request"); } } catch (Exception e) { throw new Exception("Gatekeeper did not respond", e); } finally { EntityUtils.consume(response.getEntity()); } }
From source file:org.apache.nifi.minifi.bootstrap.RunMiNiFi.java
public void env() { final Logger logger = cmdLogger; final Status status = getStatus(logger); if (status.getPid() == null) { logger.info("Apache MiNiFi is not running"); return;// ww w. j ava 2 s. co m } final Class<?> virtualMachineClass; try { virtualMachineClass = Class.forName("com.sun.tools.attach.VirtualMachine"); } catch (final ClassNotFoundException cnfe) { logger.error( "Seems tools.jar (Linux / Windows JDK) or classes.jar (Mac OS) is not available in classpath"); return; } final Method attachMethod; final Method detachMethod; try { attachMethod = virtualMachineClass.getMethod("attach", String.class); detachMethod = virtualMachineClass.getDeclaredMethod("detach"); } catch (final Exception e) { logger.error("Methods required for getting environment not available", e); return; } final Object virtualMachine; try { virtualMachine = attachMethod.invoke(null, status.getPid()); } catch (final Throwable t) { logger.error("Problem attaching to MiNiFi", t); return; } try { final Method getSystemPropertiesMethod = virtualMachine.getClass().getMethod("getSystemProperties"); final Properties sysProps = (Properties) getSystemPropertiesMethod.invoke(virtualMachine); for (Entry<Object, Object> syspropEntry : sysProps.entrySet()) { logger.info(syspropEntry.getKey().toString() + " = " + syspropEntry.getValue().toString()); } } catch (Throwable t) { throw new RuntimeException(t); } finally { try { detachMethod.invoke(virtualMachine); } catch (final Exception e) { logger.warn("Caught exception detaching from process", e); } } }
From source file:gemlite.core.internal.db.DBSynchronizer.java
/** * Initialize this {@link DBSynchronizer} instance, creating a new JDBC * connection to the backend database as per the provided parameter.<BR> * /*from w ww . j av a2s . c om*/ * The recommended format of the parameter string is: <BR> * * file=<path> <BR> * * The file is a properties file specifying the driver, JDBC URL, user and * password.<BR> * * Driver=<driver-class><BR> * URL=<JDBC URL><BR> * User=<user name><BR> * <BR> * Secret=<encrypted password><BR> * Transformation=<transformation for the encryption cipher><BR> * KeySize=<size of the private key to use for encryption><BR> * -- OR --<BR> * Password=<password><BR> * * The password provided in the "Secret" property should be an encrypted one * generated using the "gfxd encrypt-password external" command, else the * "Password" property can be used to specify the password in plain-text. * The "Transformation" and "KeySize" properties optionally specify the * transformation and key size used for encryption else the defaults are * used ("AES" and 128 respectively). User and password are optional and * when not provided then JDBC URL will be used as is for connection. * * The above properties may also be provided inline like below:<BR> * <BR> * <driver-class>,<JDBC * URL>[,<user>[,<password>|secret * =<secret>][,transformation=<transformation>][,keysize=<key * size>]<BR> * <BR> * The user and password parts are optional and can be possibly embedded in * the JDBC URL itself. The password can be encrypted one generated using * the "gfxd encrypt-password external" command in which case it should be * prefixed with "secret=". It can also specify the transformation and * keysize using the optional "transformation=..." and "keysize=..." * properties. */ public void init(String initParamStr) { this.driver = null; this.driverClass = null; this.dbUrl = null; this.userName = null; this.passwd = null; this.numErrorTries = 0; // check the new "file=<properties file>" option first if (initParamStr.startsWith("file=")) { String propsFile = initParamStr.substring("file=".length()); FileInputStream fis = null; final Properties props = new Properties(); try { fis = new FileInputStream(propsFile); props.load(fis); } catch (Exception e) { throw helper.newRuntimeException(String.format(DB_SYNCHRONIZER__9, propsFile), e); } finally { try { if (fis != null) { fis.close(); } } catch (Exception e) { // ignored } } try { for (Map.Entry<Object, Object> entry : props.entrySet()) { String key = ((String) entry.getKey()).trim(); String value = ((String) entry.getValue()).trim(); if (DBDRIVER.equalsIgnoreCase(key)) { this.driverClass = value; } else if (DBURL.equalsIgnoreCase(key)) { this.dbUrl = value; } else if (USER.equalsIgnoreCase(key)) { this.userName = value; } else if (PASSWORD.equalsIgnoreCase(key)) { this.passwd = value; } else if (ERRORFILE.equalsIgnoreCase(key)) { this.errorFile = value; } else if (ERRORTRIES.equalsIgnoreCase(key)) { this.numErrorTries = Integer.parseInt(value); } else { throw new IllegalArgumentException(String.format(DB_SYNCHRONIZER__11, key, propsFile)); } } } catch (IllegalArgumentException e) { throw e; } catch (Exception e) { throw helper.newRuntimeException(String.format(DB_SYNCHRONIZER__9, propsFile), e); } if (this.driverClass == null || this.driverClass.length() == 0 || this.dbUrl == null || this.dbUrl.length() == 0) { throw new IllegalArgumentException(String.format(DB_SYNCHRONIZER__10, propsFile)); } } else { inlineInit(initParamStr); } // helper.createEventErrorLogger(errorFile); this.initConnection(); }
From source file:org.apache.hadoop.hive.metastore.PersistenceManagerProvider.java
/** * Properties specified in hive-default.xml override the properties specified * in jpox.properties./*from ww w.j av a 2 s. c om*/ */ @SuppressWarnings("nls") private static Properties getDataSourceProps(Configuration conf) { Properties prop = new Properties(); correctAutoStartMechanism(conf); // First, go through and set all our values for datanucleus and javax.jdo parameters. This // has to be a separate first step because we don't set the default values in the config object. for (ConfVars var : MetastoreConf.dataNucleusAndJdoConfs) { String confVal = MetastoreConf.getAsString(conf, var); String varName = var.getVarname(); Object prevVal = prop.setProperty(varName, confVal); if (MetastoreConf.isPrintable(varName)) { LOG.debug("Overriding {} value {} from jpox.properties with {}", varName, prevVal, confVal); } } // Now, we need to look for any values that the user set that MetastoreConf doesn't know about. // TODO Commenting this out for now, as it breaks because the conf values aren't getting properly // interpolated in case of variables. See HIVE-17788. /* for (Map.Entry<String, String> e : conf) { if (e.getKey().startsWith("datanucleus.") || e.getKey().startsWith("javax.jdo.")) { // We have to handle this differently depending on whether it is a value known to // MetastoreConf or not. If it is, we need to get the default value if a value isn't // provided. If not, we just set whatever the user has set. Object prevVal = prop.setProperty(e.getKey(), e.getValue()); if (LOG.isDebugEnabled() && MetastoreConf.isPrintable(e.getKey())) { LOG.debug("Overriding " + e.getKey() + " value " + prevVal + " from jpox.properties with " + e.getValue()); } } } */ // Password may no longer be in the conf, use getPassword() try { String passwd = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.PWD); if (org.apache.commons.lang.StringUtils.isNotEmpty(passwd)) { // We can get away with the use of varname here because varname == hiveName for PWD prop.setProperty(ConfVars.PWD.getVarname(), passwd); } } catch (IOException err) { throw new RuntimeException("Error getting metastore password: " + err.getMessage(), err); } if (LOG.isDebugEnabled()) { for (Entry<Object, Object> e : prop.entrySet()) { if (MetastoreConf.isPrintable(e.getKey().toString())) { LOG.debug("{} = {}", e.getKey(), e.getValue()); } } } return prop; }