List of usage examples for java.util Properties containsKey
@Override public boolean containsKey(Object key)
From source file:gobblin.scheduler.JobScheduler.java
/** * Schedule a job.//from w w w . ja va 2 s.c o m * * <p> * This method does what {@link #scheduleJob(Properties, JobListener)} does, and additionally it * allows the caller to pass in additional job data and the {@link Job} implementation class. * </p> * * @param jobProps Job configuration properties * @param jobListener {@link JobListener} used for callback, * can be <em>null</em> if no callback is needed. * @param additionalJobData additional job data in a {@link Map} * @param jobClass Quartz job class * @throws JobException when there is anything wrong * with scheduling the job */ public void scheduleJob(Properties jobProps, JobListener jobListener, Map<String, Object> additionalJobData, Class<? extends Job> jobClass) throws JobException { Preconditions.checkArgument(jobProps.containsKey(ConfigurationKeys.JOB_NAME_KEY), "A job must have a job name specified by job.name"); String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY); if (this.scheduledJobs.containsKey(jobName)) { LOG.warn("Job " + jobName + " has already been scheduled"); return; } // Check if the job has been disabled boolean disabled = Boolean.valueOf(jobProps.getProperty(ConfigurationKeys.JOB_DISABLED_KEY, "false")); if (disabled) { LOG.info("Skipping disabled job " + jobName); return; } if (!jobProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) { // A job without a cron schedule is considered a one-time job jobProps.setProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "true"); // Submit the job to run this.jobExecutor.execute(new NonScheduledJobRunner(jobProps, jobListener)); return; } if (jobListener != null) { this.jobListenerMap.put(jobName, jobListener); } // Build a data map that gets passed to the job JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(JOB_SCHEDULER_KEY, this); jobDataMap.put(PROPERTIES_KEY, jobProps); jobDataMap.put(JOB_LISTENER_KEY, jobListener); jobDataMap.putAll(additionalJobData); // Build a Quartz job JobDetail job = JobBuilder.newJob(jobClass) .withIdentity(jobName, Strings.nullToEmpty(jobProps.getProperty(ConfigurationKeys.JOB_GROUP_KEY))) .withDescription(Strings.nullToEmpty(jobProps.getProperty(ConfigurationKeys.JOB_DESCRIPTION_KEY))) .usingJobData(jobDataMap).build(); try { // Schedule the Quartz job with a trigger built from the job configuration Trigger trigger = getTrigger(job.getKey(), jobProps); this.scheduler.getScheduler().scheduleJob(job, trigger); LOG.info(String.format("Scheduled job %s. Next run: %s.", job.getKey(), trigger.getNextFireTime())); } catch (SchedulerException se) { LOG.error("Failed to schedule job " + jobName, se); throw new JobException("Failed to schedule job " + jobName, se); } this.scheduledJobs.put(jobName, job.getKey()); }
From source file:gobblin.data.management.copy.hive.HiveDatasetFinder.java
@SuppressWarnings("unchecked") //SupressWarning justification : CONFIG_STORE_DATASET_URI_BUILDER_CLASS must be of type Function<DbAndTable, String>. //It is safe to throw RuntimeException otherwise protected HiveDatasetFinder(FileSystem fs, Properties properties, HiveMetastoreClientPool clientPool, EventSubmitter eventSubmitter, ConfigClient configClient) throws IOException { this.properties = properties; this.clientPool = clientPool; this.fs = fs; String whitelistKey = HIVE_DATASET_PREFIX + "." + WhitelistBlacklist.WHITELIST; Preconditions.checkArgument(properties.containsKey(DB_KEY) || properties.containsKey(whitelistKey), String.format("Must specify %s or %s.", DB_KEY, whitelistKey)); Config config = ConfigFactory.parseProperties(properties); if (properties.containsKey(DB_KEY)) { this.whitelistBlacklist = new WhitelistBlacklist(this.properties.getProperty(DB_KEY) + "." + this.properties.getProperty(TABLE_PATTERN_KEY, DEFAULT_TABLE_PATTERN), ""); } else {//from ww w . java 2s. c o m this.whitelistBlacklist = new WhitelistBlacklist(config.getConfig(HIVE_DATASET_PREFIX)); } this.eventSubmitter = Optional.fromNullable(eventSubmitter); this.configStoreUri = StringUtils .isNotBlank(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI)) ? Optional.of(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI)) : Optional.<String>absent(); this.datasetConfigPrefix = properties.getProperty(HIVE_DATASET_CONFIG_PREFIX_KEY, DEFAULT_HIVE_DATASET_CONIFG_PREFIX); this.configClient = configClient; try { this.configStoreDatasetUriBuilder = properties.containsKey(CONFIG_STORE_DATASET_URI_BUILDER_CLASS) ? (Function<Table, String>) ConstructorUtils.invokeConstructor( Class.forName(properties.getProperty(CONFIG_STORE_DATASET_URI_BUILDER_CLASS))) : DEFAULT_CONFIG_STORE_DATASET_URI_BUILDER; } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException | ClassNotFoundException e) { throw new RuntimeException(e); } this.jobConfig = ConfigUtils.propertiesToConfig(properties); }
From source file:org.apache.axis2.transport.mail.MailTransportSender.java
/** * Initialize the Mail sender and be ready to send messages * @param cfgCtx the axis2 configuration context * @param transportOut the transport-out description * @throws org.apache.axis2.AxisFault on error *//*from w w w . j a v a2 s.com*/ public void init(ConfigurationContext cfgCtx, TransportOutDescription transportOut) throws AxisFault { super.init(cfgCtx, transportOut); // initialize SMTP session Properties props = new Properties(); List<Parameter> params = transportOut.getParameters(); for (Parameter p : params) { props.put(p.getName(), p.getValue()); } if (props.containsKey(MailConstants.MAIL_SMTP_FROM)) { try { smtpFromAddress = new InternetAddress((String) props.get(MailConstants.MAIL_SMTP_FROM)); } catch (AddressException e) { handleException("Invalid default 'From' address : " + props.get(MailConstants.MAIL_SMTP_FROM), e); } } if (props.containsKey(MailConstants.MAIL_SMTP_BCC)) { try { smtpBccAddresses = InternetAddress.parse((String) props.get(MailConstants.MAIL_SMTP_BCC)); } catch (AddressException e) { handleException("Invalid default 'Bcc' address : " + props.get(MailConstants.MAIL_SMTP_BCC), e); } } if (props.containsKey(MailConstants.TRANSPORT_MAIL_FORMAT)) { defaultMailFormat = (String) props.get(MailConstants.TRANSPORT_MAIL_FORMAT); } smtpUsername = (String) props.get(MailConstants.MAIL_SMTP_USERNAME); smtpPassword = (String) props.get(MailConstants.MAIL_SMTP_PASSWORD); if (smtpUsername != null && smtpPassword != null) { session = Session.getInstance(props, new Authenticator() { public PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(smtpUsername, smtpPassword); } }); } else { session = Session.getInstance(props, null); } MailUtils.setupLogging(session, log, transportOut); // set the synchronise callback table if (cfgCtx.getProperty(BaseConstants.CALLBACK_TABLE) == null) { cfgCtx.setProperty(BaseConstants.CALLBACK_TABLE, new ConcurrentHashMap()); } }
From source file:com.baidu.qa.service.test.parser.CaseFolderParserImpl.java
/** * ?suitecase//from ww w .ja v a2 s .c om * @param casepath case * @return ?case? * @throws Exception */ private CaseData parsecase(String casepath) throws Exception { CaseData casedata = new CaseData(); File caseinfo = new File(casepath + Constant.FILENAME_CASEINFO); File input = new File(casepath + Constant.FILENAME_INPUT); // ?case? InputStream in_caseinfo = new BufferedInputStream( new FileInputStream(casepath + Constant.FILENAME_CASEINFO)); Properties Info_caseinfo = new Properties(); try { Info_caseinfo.load(in_caseinfo); casedata.setCaseid(Info_caseinfo.getProperty("caseid")); casedata.setDesc(Info_caseinfo.getProperty("desc")); casedata.setAction(Info_caseinfo.getProperty("action")); casedata.setStatus(Info_caseinfo.getProperty("status")); if (Info_caseinfo.containsKey("requesttype")) { casedata.setRequesttype(Info_caseinfo.getProperty("requesttype")); } //?var? if (Info_caseinfo.containsKey("var") && Info_caseinfo.getProperty("var").trim().length() != 0) { casedata.setHasVar(true); } in_caseinfo.close(); // ?case??vargen?? casedata.setVarGen(VariableGenerator.getGenerator(casepath + Constant.FILENAME_CASEINFO)); casedata = parseCaseinfo(casedata, casepath); return casedata; } catch (IOException e) { throw new RuntimeException("parse case folder error", e.getCause()); } }
From source file:org.apache.geode.internal.util.CollectionUtilsJUnitTest.java
@Test public void testCreateMultipleProperties() { Map<String, String> map = new HashMap<>(3); map.put("one", "A"); map.put("two", "B"); map.put("six", "C"); Properties properties = CollectionUtils.createProperties(map); assertNotNull(properties);//from w w w. j a v a2 s. co m assertFalse(properties.isEmpty()); assertEquals(map.size(), properties.size()); for (Entry<String, String> entry : map.entrySet()) { assertTrue(properties.containsKey(entry.getKey())); assertEquals(entry.getValue(), properties.get(entry.getKey())); } }
From source file:org.accada.reader.hal.ControllerProperties.java
/** * Gets the parameter with the specified name from the appropriate properties file. * /*w w w .j av a 2 s . c o m*/ * @param param parameter name. * @return the value of the parameter. * @throws Exception. */ public String getParameter(String param) throws Exception { String value = null; InputStream in; Properties props = new Properties(); log.debug("Trying to get Parameter " + param + " from file " + propsFile); //possible Errors are propageted and further processed as HardwareExceptions in = this.getClass().getResourceAsStream("/props/" + propsFile); if (in == null) { log.debug("Properties-File not found."); throw new IOException("Properties file not found."); } //possible Errors are propageted and further processed as HardwareExceptions props.load(in); in.close(); if (props.containsKey(param)) { value = props.getProperty(param); log.debug("Property found: " + param + " = " + value); return value; } else { String message = "Property not found: " + param; log.debug(message); throw new Exception(message); } }
From source file:com.liferay.portal.configuration.ConfigurationImpl.java
protected void updateBasePath(ClassLoader classLoader, String name) { InputStream inputStream = null; try {//from ww w . j a v a 2 s.c o m URL url = classLoader.getResource(name + Conventions.PROPERTIES_EXTENSION); if (url == null) { return; } String protocol = url.getProtocol(); if (!protocol.equals("file")) { return; } Properties properties = new Properties(); inputStream = url.openStream(); properties.load(inputStream); if (properties.containsKey("base.path")) { return; } String fileName = StringUtil.replace(url.getFile(), "%20", StringPool.SPACE); File file = new File(fileName); if (!file.exists() || !file.canWrite()) { if (_log.isWarnEnabled()) { _log.warn("Unable to write " + file); } return; } Writer writer = new FileWriter(file, true); StringBundler sb = new StringBundler(4); sb.append(StringPool.OS_EOL); sb.append(StringPool.OS_EOL); sb.append("base.path="); String basePath = url.getPath(); int pos = basePath.lastIndexOf(StringPool.SLASH + name + Conventions.PROPERTIES_EXTENSION); if (pos != -1) { basePath = basePath.substring(0, pos); } sb.append(basePath); writer.write(sb.toString()); writer.close(); } catch (Exception e) { _log.error(e, e); } finally { StreamUtil.cleanUp(inputStream); } }
From source file:org.apache.gobblin.data.management.copy.hive.HiveDatasetFinder.java
@SuppressWarnings("unchecked") //SupressWarning justification : CONFIG_STORE_DATASET_URI_BUILDER_CLASS must be of type Function<DbAndTable, String>. //It is safe to throw RuntimeException otherwise protected HiveDatasetFinder(FileSystem fs, Properties properties, HiveMetastoreClientPool clientPool, EventSubmitter eventSubmitter, ConfigClient configClient) throws IOException { this.properties = properties; this.clientPool = clientPool; this.fs = fs; String whitelistKey = HIVE_DATASET_PREFIX + "." + WhitelistBlacklist.WHITELIST; Preconditions.checkArgument(properties.containsKey(DB_KEY) || properties.containsKey(whitelistKey), String.format("Must specify %s or %s.", DB_KEY, whitelistKey)); Config config = ConfigFactory.parseProperties(properties); if (properties.containsKey(DB_KEY)) { this.whitelistBlacklist = new WhitelistBlacklist(this.properties.getProperty(DB_KEY) + "." + this.properties.getProperty(TABLE_PATTERN_KEY, DEFAULT_TABLE_PATTERN), ""); } else {/* ww w . j a v a2s .c o m*/ this.whitelistBlacklist = new WhitelistBlacklist(config.getConfig(HIVE_DATASET_PREFIX)); } this.eventSubmitter = Optional.fromNullable(eventSubmitter); this.configStoreUri = StringUtils .isNotBlank(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI)) ? Optional.of(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI)) : Optional.<String>absent(); if (!Boolean.valueOf(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_ENABLED, ConfigurationKeys.DEFAULT_CONFIG_MANAGEMENT_STORE_ENABLED))) { this.configStoreUri = Optional.<String>absent(); } this.datasetConfigPrefix = properties.getProperty(HIVE_DATASET_CONFIG_PREFIX_KEY, DEFAULT_HIVE_DATASET_CONIFG_PREFIX); this.configClient = configClient; try { this.configStoreDatasetUriBuilder = properties.containsKey(CONFIG_STORE_DATASET_URI_BUILDER_CLASS) ? (Function<Table, String>) ConstructorUtils.invokeConstructor( Class.forName(properties.getProperty(CONFIG_STORE_DATASET_URI_BUILDER_CLASS))) : DEFAULT_CONFIG_STORE_DATASET_URI_BUILDER; } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException | ClassNotFoundException e) { throw new RuntimeException(e); } this.jobConfig = ConfigUtils.propertiesToConfig(properties); }
From source file:com.piusvelte.hydra.ConnectionManager.java
private ConnectionManager(ServletContext ctx) { ctx.log("Hydra ConnectionManager instantiated"); String[] fullPathParts;//from ww w .j av a2 s . c o m if (System.getProperty("os.name").startsWith("Windows")) { fullPathParts = ctx.getRealPath(File.separator).split("\\\\", -1); sHydraDir = fullPathParts[0] + File.separator + WIN_DIR; } else { fullPathParts = ctx.getRealPath(File.separator).split(File.separator, -1); sHydraDir = fullPathParts[0] + File.separator + NIX_DIR; } sHydraDir += File.separator + "hydra"; ctx.log("Working Directory: " + sHydraDir); if (fullPathParts.length > 2) { if (fullPathParts.length > 3) { sHydraDir += File.separator + fullPathParts[fullPathParts.length - 3]; } sHydraDir += File.separator + fullPathParts[fullPathParts.length - 2]; } mQueueRetryInterval = QueueThread.DEFAULT_QUEUERETRYINTERVAL; File hydraDir = new File(sHydraDir); if (hydraDir.exists()) { sHydraDir += File.separator; InputStream is = null; try { is = new FileInputStream(sHydraDir + HYDRA_PROPERTIES); } catch (FileNotFoundException e1) { ctx.log("The properties file at " + (sHydraDir + HYDRA_PROPERTIES) + " could not be found."); e1.printStackTrace(); } if (is != null) { Properties properties = new Properties(); try { properties.load(is); ctx.log("Hydra properties file read"); if (properties.containsKey(sPassphrase)) passphrase = properties.getProperty(sPassphrase); sQueueFile = sHydraDir + "queue"; tokenFile = sHydraDir + "tokens"; try { loadTokens(); } catch (Exception e) { e.printStackTrace(); } if (properties.containsKey(sQueueRetryInterval)) mQueueRetryInterval = Integer.parseInt(properties.getProperty(sQueueRetryInterval)); if (properties.containsKey(sDatabases)) { String[] databaseAliases = properties.getProperty(sDatabases).split(",", -1); String[] databaseProperties = new String[] { sType, sDatabase, sHost, sPort, sUsername, sPassword, sConnections, sDASU, sDASP, sSQLENVINIT }; for (String databaseAlias : databaseAliases) { HashMap<String, String> database = new HashMap<String, String>(); for (String databaseProperty : databaseProperties) { database.put(databaseProperty, properties.getProperty(databaseAlias + "." + databaseProperty, "")); } synchronized (databaseLock) { sDatabaseSettings.put(databaseAlias, database); sDatabaseConnections.put(databaseAlias, new ArrayList<DatabaseConnection>()); queuedDatabaseRequests.put(databaseAlias, 0); } } } } catch (IOException e) { e.printStackTrace(); } } else { initProps(); } } else if (hydraDir.mkdirs()) { sHydraDir += File.separator; initProps(); } else { ctx.log("properties doesn't exist, and creating it failed at: " + sHydraDir); } }
From source file:com.mcreations.usb.windows.WindowsUsbServices.java
/** Set variables from user-specified properties */ private void checkProperties() { Properties p = null; try {//from w w w .j a va2 s. co m p = UsbHostManager.getProperties(); } catch (Exception e) { return; } try { if (p.containsKey(TOPOLOGY_UPDATE_DELAY_KEY)) topologyUpdateDelay = Integer.decode(p.getProperty(TOPOLOGY_UPDATE_DELAY_KEY)).intValue(); } catch (Exception e) { } try { if (p.containsKey(TOPOLOGY_UPDATE_NEW_DEVICE_DELAY_KEY)) topologyUpdateNewDeviceDelay = Integer.decode(p.getProperty(TOPOLOGY_UPDATE_NEW_DEVICE_DELAY_KEY)) .intValue(); } catch (Exception e) { } try { if (p.containsKey(TOPOLOGY_UPDATE_USE_POLLING_KEY)) topologyUpdateUsePolling = Boolean.valueOf(p.getProperty(TOPOLOGY_UPDATE_USE_POLLING_KEY)) .booleanValue(); } catch (Exception e) { } try { if (p.containsKey(TRACING_KEY)) JavaxUsb.setTracing(Boolean.valueOf(p.getProperty(TRACING_KEY)).booleanValue()); } catch (Exception e) { } //FIXME - the names of the tracers should be more generically processed try { if (p.containsKey(TRACE_DEFAULT_KEY)) JavaxUsb.setTraceType(Boolean.valueOf(p.getProperty(TRACE_DEFAULT_KEY)).booleanValue(), "default"); } catch (Exception e) { } try { if (p.containsKey(TRACE_HOTPLUG_KEY)) JavaxUsb.setTraceType(Boolean.valueOf(p.getProperty(TRACE_HOTPLUG_KEY)).booleanValue(), "hotplug"); } catch (Exception e) { } try { if (p.containsKey(TRACE_XFER_KEY)) JavaxUsb.setTraceType(Boolean.valueOf(p.getProperty(TRACE_XFER_KEY)).booleanValue(), "xfer"); } catch (Exception e) { } try { if (p.containsKey(TRACE_URB_KEY)) JavaxUsb.setTraceType(Boolean.valueOf(p.getProperty(TRACE_URB_KEY)).booleanValue(), "urb"); } catch (Exception e) { } try { if (p.containsKey(TRACE_LEVEL_KEY)) JavaxUsb.setTraceLevel(Integer.decode(p.getProperty(TRACE_LEVEL_KEY)).intValue()); } catch (Exception e) { } }