List of usage examples for java.util Properties containsKey
@Override public boolean containsKey(Object key)
From source file:gobblin.runtime.local.LocalJobManager.java
/** * Schedule a job./* ww w.ja v a 2 s . c o m*/ * * <p> * This method calls the Quartz scheduler to scheduler the job. * </p> * * @param jobProps Job configuration properties * @param jobListener {@link JobListener} used for callback, * can be <em>null</em> if no callback is needed. * @throws gobblin.runtime.JobException when there is anything wrong * with scheduling the job */ public void scheduleJob(Properties jobProps, JobListener jobListener) throws JobException { Preconditions.checkNotNull(jobProps); String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY); // Check if the job has been disabled boolean disabled = Boolean.valueOf(jobProps.getProperty(ConfigurationKeys.JOB_DISABLED_KEY, "false")); if (disabled) { LOG.info("Skipping disabled job " + jobName); return; } if (!jobProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) { // A job without a cron schedule is considered a one-time job jobProps.setProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "true"); // Run the job without going through the scheduler runJob(jobProps, jobListener); return; } if (jobListener != null) { this.jobListenerMap.put(jobName, jobListener); } // Build a data map that gets passed to the job JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(JOB_MANAGER_KEY, this); jobDataMap.put(PROPERTIES_KEY, jobProps); jobDataMap.put(JOB_LISTENER_KEY, jobListener); // Build a Quartz job JobDetail job = JobBuilder.newJob(GobblinJob.class) .withIdentity(jobName, Strings.nullToEmpty(jobProps.getProperty(ConfigurationKeys.JOB_GROUP_KEY))) .withDescription(Strings.nullToEmpty(jobProps.getProperty(ConfigurationKeys.JOB_DESCRIPTION_KEY))) .usingJobData(jobDataMap).build(); try { if (this.scheduler.checkExists(job.getKey())) { throw new JobException(String.format("Job %s has already been scheduled", jobName)); } // Schedule the Quartz job with a trigger built from the job configuration this.scheduler.scheduleJob(job, getTrigger(job.getKey(), jobProps)); } catch (SchedulerException se) { LOG.error("Failed to schedule job " + jobName, se); throw new JobException("Failed to schedule job " + jobName, se); } this.scheduledJobs.put(jobName, job.getKey()); }
From source file:org.apache.zeppelin.interpreter.InterpreterSetting.java
public Properties getJavaProperties() { Properties jProperties = new Properties(); Map<String, InterpreterProperty> iProperties = (Map<String, InterpreterProperty>) properties; for (Map.Entry<String, InterpreterProperty> entry : iProperties.entrySet()) { if (entry.getValue().getValue() != null) { jProperties.setProperty(entry.getKey().trim(), entry.getValue().getValue().toString().trim()); }// www.java2s .co m } if (!jProperties.containsKey("zeppelin.interpreter.output.limit")) { jProperties.setProperty("zeppelin.interpreter.output.limit", conf.getInt(ZEPPELIN_INTERPRETER_OUTPUT_LIMIT) + ""); } if (!jProperties.containsKey("zeppelin.interpreter.max.poolsize")) { jProperties.setProperty("zeppelin.interpreter.max.poolsize", conf.getInt(ZEPPELIN_INTERPRETER_MAX_POOL_SIZE) + ""); } String interpreterLocalRepoPath = conf.getInterpreterLocalRepoPath(); //TODO(zjffdu) change it to interpreterDir/{interpreter_name} jProperties.setProperty("zeppelin.interpreter.localRepo", interpreterLocalRepoPath + "/" + id); return jProperties; }
From source file:org.apache.sqoop.orm.ClassWriter.java
private String toJavaType(String columnName, int sqlType) { Properties mapping = options.getMapColumnJava(); if (mapping.containsKey(columnName)) { String type = mapping.getProperty(columnName); if (LOG.isDebugEnabled()) { LOG.info("Overriding type of column " + columnName + " to " + type); }// www. j av a 2 s . co m return type; } return connManager.toJavaType(tableName, columnName, sqlType); }
From source file:com.mirth.connect.server.controllers.DefaultExtensionController.java
@Override public void initPlugins() { // Order all the plugins by their weight before loading any of them. Map<String, String> pluginNameMap = new HashMap<String, String>(); NavigableMap<Integer, List<String>> weightedPlugins = new TreeMap<Integer, List<String>>(); for (PluginMetaData pmd : getPluginMetaData().values()) { if (isExtensionEnabled(pmd.getName())) { if (pmd.getServerClasses() != null) { for (PluginClass pluginClass : pmd.getServerClasses()) { String clazzName = pluginClass.getName(); int weight = pluginClass.getWeight(); pluginNameMap.put(clazzName, pmd.getName()); List<String> classList = weightedPlugins.get(weight); if (classList == null) { classList = new ArrayList<String>(); weightedPlugins.put(weight, classList); }/*from www . jav a2 s .com*/ classList.add(clazzName); } } } else { logger.warn("Plugin \"" + pmd.getName() + "\" is not enabled."); } } // Load the plugins in order of their weight for (List<String> classList : weightedPlugins.descendingMap().values()) { for (String clazzName : classList) { String pluginName = pluginNameMap.get(clazzName); try { ServerPlugin serverPlugin = (ServerPlugin) Class.forName(clazzName).newInstance(); if (serverPlugin instanceof ServicePlugin) { ServicePlugin servicePlugin = (ServicePlugin) serverPlugin; /* * load any properties that may currently be in the database */ Properties currentProperties = getPluginProperties(pluginName); /* get the default properties for the plugin */ Properties defaultProperties = servicePlugin.getDefaultProperties(); /* * if there are any properties that not currently set, set them to the the * default */ for (Object key : defaultProperties.keySet()) { if (!currentProperties.containsKey(key)) { currentProperties.put(key, defaultProperties.get(key)); } } /* save the properties to the database */ setPluginProperties(pluginName, currentProperties); /* * initialize the plugin with those properties and add it to the list of * loaded plugins */ servicePlugin.init(currentProperties); servicePlugins.put(servicePlugin.getPluginPointName(), servicePlugin); serverPlugins.add(servicePlugin); logger.debug("sucessfully loaded server plugin: " + serverPlugin.getPluginPointName()); } if (serverPlugin instanceof ChannelPlugin) { ChannelPlugin channelPlugin = (ChannelPlugin) serverPlugin; channelPlugins.put(channelPlugin.getPluginPointName(), channelPlugin); serverPlugins.add(channelPlugin); logger.debug( "sucessfully loaded server channel plugin: " + serverPlugin.getPluginPointName()); } if (serverPlugin instanceof CodeTemplateServerPlugin) { CodeTemplateServerPlugin codeTemplateServerPlugin = (CodeTemplateServerPlugin) serverPlugin; codeTemplateServerPlugins.put(codeTemplateServerPlugin.getPluginPointName(), codeTemplateServerPlugin); serverPlugins.add(codeTemplateServerPlugin); logger.debug("sucessfully loaded server code template plugin: " + serverPlugin.getPluginPointName()); } if (serverPlugin instanceof DataTypeServerPlugin) { DataTypeServerPlugin dataTypePlugin = (DataTypeServerPlugin) serverPlugin; dataTypePlugins.put(dataTypePlugin.getPluginPointName(), dataTypePlugin); serverPlugins.add(dataTypePlugin); logger.debug( "sucessfully loaded server data type plugin: " + serverPlugin.getPluginPointName()); } if (serverPlugin instanceof ResourcePlugin) { ResourcePlugin resourcePlugin = (ResourcePlugin) serverPlugin; resourcePlugins.put(resourcePlugin.getPluginPointName(), resourcePlugin); serverPlugins.add(resourcePlugin); logger.debug("Successfully loaded resource plugin: " + resourcePlugin.getPluginPointName()); } if (serverPlugin instanceof TransmissionModeProvider) { TransmissionModeProvider transmissionModeProvider = (TransmissionModeProvider) serverPlugin; transmissionModeProviders.put(transmissionModeProvider.getPluginPointName(), transmissionModeProvider); serverPlugins.add(transmissionModeProvider); logger.debug("Successfully loaded transmission mode provider plugin: " + transmissionModeProvider.getPluginPointName()); } if (serverPlugin instanceof AuthorizationPlugin) { AuthorizationPlugin authorizationPlugin = (AuthorizationPlugin) serverPlugin; if (this.authorizationPlugin != null) { throw new Exception("Multiple Authorization Plugins are not permitted."); } this.authorizationPlugin = authorizationPlugin; serverPlugins.add(authorizationPlugin); logger.debug("sucessfully loaded server authorization plugin: " + serverPlugin.getPluginPointName()); } } catch (Exception e) { logger.error("Error instantiating plugin: " + pluginName, e); } } } }
From source file:jdao.JDAO.java
public static DataSource createDataSourceByProperties(String file, DataSource dataSource, Properties properties) { try {/*from w ww .j av a2s.c om*/ adjustPropertiesForEnvParameters(properties); if (dataSource == null) { if (properties.containsKey("jdaoDriverClassName")) { Class.forName(properties.getProperty("jdaoDriverClassName")); } if (properties.containsKey("jdaoDataSourceClassName")) { dataSource = (DataSource) Thread.currentThread().getContextClassLoader() .loadClass(properties.getProperty("jdaoDataSourceClassName")).newInstance(); } else { return (BasicDataSource) BasicDataSourceFactory.createDataSource(properties); } } BeanUtils.populate(dataSource, (Map) properties); } catch (Exception xe) { log("Error processing datasource: " + file, xe); return null; } return dataSource; }
From source file:org.apache.zeppelin.interpreter.launcher.SparkInterpreterLauncher.java
@Override protected Map<String, String> buildEnvFromProperties(InterpreterLaunchContext context) { Map<String, String> env = super.buildEnvFromProperties(context); Properties sparkProperties = new Properties(); String sparkMaster = getSparkMaster(properties); for (String key : properties.stringPropertyNames()) { if (RemoteInterpreterUtils.isEnvString(key)) { env.put(key, properties.getProperty(key)); }/*w ww . j a va2 s .co m*/ if (isSparkConf(key, properties.getProperty(key))) { sparkProperties.setProperty(key, toShellFormat(properties.getProperty(key))); } } setupPropertiesForPySpark(sparkProperties); setupPropertiesForSparkR(sparkProperties); if (isYarnMode() && getDeployMode().equals("cluster")) { env.put("ZEPPELIN_SPARK_YARN_CLUSTER", "true"); sparkProperties.setProperty("spark.yarn.submit.waitAppCompletion", "false"); } StringBuilder sparkConfBuilder = new StringBuilder(); if (sparkMaster != null) { sparkConfBuilder.append(" --master " + sparkMaster); } if (isYarnMode() && getDeployMode().equals("cluster")) { if (sparkProperties.containsKey("spark.files")) { sparkProperties.put("spark.files", sparkProperties.getProperty("spark.files") + "," + zConf.getConfDir() + "/log4j_yarn_cluster.properties"); } else { sparkProperties.put("spark.files", zConf.getConfDir() + "/log4j_yarn_cluster.properties"); } } for (String name : sparkProperties.stringPropertyNames()) { sparkConfBuilder.append(" --conf " + name + "=" + sparkProperties.getProperty(name)); } String useProxyUserEnv = System.getenv("ZEPPELIN_IMPERSONATE_SPARK_PROXY_USER"); if (context.getOption().isUserImpersonate() && (StringUtils.isBlank(useProxyUserEnv) || !useProxyUserEnv.equals("false"))) { sparkConfBuilder.append(" --proxy-user " + context.getUserName()); } Path localRepoPath = Paths.get(zConf.getInterpreterLocalRepoPath(), context.getInterpreterSettingId()); if (isYarnMode() && getDeployMode().equals("cluster") && Files.exists(localRepoPath) && Files.isDirectory(localRepoPath)) { try { StreamSupport .stream(Files.newDirectoryStream(localRepoPath, entry -> Files.isRegularFile(entry)) .spliterator(), false) .map(jar -> jar.toAbsolutePath().toString()).reduce((x, y) -> x.concat(",").concat(y)) .ifPresent(extraJars -> sparkConfBuilder.append(" --jars ").append(extraJars)); } catch (IOException e) { LOGGER.error("Cannot make a list of additional jars from localRepo: {}", localRepoPath, e); } } env.put("ZEPPELIN_SPARK_CONF", sparkConfBuilder.toString()); // set these env in the order of // 1. interpreter-setting // 2. zeppelin-env.sh // It is encouraged to set env in interpreter setting, but just for backward compatability, // we also fallback to zeppelin-env.sh if it is not specified in interpreter setting. for (String envName : new String[] { "SPARK_HOME", "SPARK_CONF_DIR", "HADOOP_CONF_DIR" }) { String envValue = getEnv(envName); if (envValue != null) { env.put(envName, envValue); } } String keytab = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB); String principal = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL); if (!StringUtils.isBlank(keytab) && !StringUtils.isBlank(principal)) { env.put("ZEPPELIN_SERVER_KERBEROS_KEYTAB", keytab); env.put("ZEPPELIN_SERVER_KERBEROS_PRINCIPAL", principal); LOGGER.info("Run Spark under secure mode with keytab: " + keytab + ", principal: " + principal); } else { LOGGER.info("Run Spark under non-secure mode as no keytab and principal is specified"); } LOGGER.debug("buildEnvFromProperties: " + env); return env; }
From source file:org.apache.juddi.v3.client.mapping.wsdl.WSDL2UDDI.java
/** * Required Properties are: businessName, for example: 'Apache' * nodeName, for example: 'uddi.example.org_80' keyDomain, for example: * juddi.apache.org/*from w w w. ja v a 2s .co m*/ * * Optional Properties are: lang: for example: 'nl' * * @param clerk - can be null if register/unregister methods are not * used. * @param urlLocalizer - A reference to an custom * @param properties - required values keyDomain, businessKey, nodeName * @throws ConfigurationException */ public WSDL2UDDI(UDDIClerk clerk, URLLocalizer urlLocalizer, Properties properties) throws ConfigurationException { super(); if (properties == null) { throw new IllegalArgumentException("properties"); } this.clerk = clerk; this.urlLocalizer = urlLocalizer; this.properties = properties; if (clerk != null) { if (!properties.containsKey("keyDomain")) { throw new ConfigurationException("Property keyDomain is a required property when using WSDL2UDDI."); } if (!properties.containsKey("businessKey") && !properties.containsKey("businessName")) { throw new ConfigurationException( "Either property businessKey, or businessName, is a required property when using WSDL2UDDI."); } if (!properties.containsKey("nodeName")) { if (properties.containsKey("serverName") && properties.containsKey("serverPort")) { String nodeName = properties.getProperty("serverName") + "_" + properties.getProperty("serverPort"); properties.setProperty("nodeName", nodeName); } else { throw new ConfigurationException( "Property nodeName is not defined and is a required property when using WSDL2UDDI."); } } } //Obtaining values from the properties this.keyDomainURI = "uddi:" + properties.getProperty("keyDomain") + ":"; if (properties.contains(Property.BUSINESS_KEY)) { this.businessKey = properties.getProperty(Property.BUSINESS_KEY); } else { //using the BusinessKey Template, and the businessName to construct the key this.businessKey = UDDIKeyConvention.getBusinessKey(properties); } this.lang = properties.getProperty(Property.LANG, Property.DEFAULT_LANG); }
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.algorithm.pisa.PISAAlgorithm.java
/** * Constructs an adapter for a PISA selector. * //from ww w . ja va 2 s . co m * @param name the name of the PISA selector * @param problem the problem being solved * @param variation the variation operator * @param properties additional properties for the PISA selector * configuration file * @throws IOException if an I/O error occurred */ public PISAAlgorithm(String name, Problem problem, Variation variation, Properties properties) throws IOException { super(problem); this.variation = variation; TypedProperties typedProperties = new TypedProperties(properties); String command = Settings.getPISACommand(name); String configuration = Settings.getPISAConfiguration(name); int pollRate = Settings.getPISAPollRate(); if (command == null) { throw new IllegalArgumentException("missing command"); } //This is slightly unsafe since the actual files used by //PISA add the arc, cfg, ini, sel and sta extensions. This //dependency on files for communication is part of PISA's //design. filePrefix = File.createTempFile("pisa", "").getCanonicalPath(); //ensure the seed property is set if (!properties.containsKey("seed")) { properties.setProperty("seed", Integer.toString(PRNG.nextInt())); } //write the configuration file if one is not specified if (configuration == null) { PrintWriter writer = null; configuration = new File(filePrefix + "par").getCanonicalPath(); try { writer = new PrintWriter(new BufferedWriter(new FileWriter(configuration))); for (String parameter : Settings.getPISAParameters(name)) { writer.print(parameter); writer.print(' '); writer.println(typedProperties.getString(parameter, Settings.getPISAParameterDefaultValue(name, parameter))); } } finally { if (writer != null) { writer.close(); } } } //construct the command line call to start the PISA selector selector = new ProcessBuilder(ArrayUtils.addAll(Settings.parseCommand(command), configuration, filePrefix, Double.toString(pollRate / (double) 1000))); //ensure population size is a multiple of the # of parents int populationSize = (int) typedProperties.getDouble("populationSize", 100); while (populationSize % variation.getArity() != 0) { populationSize++; } //configure the remaining options alpha = populationSize; mu = (int) typedProperties.getDouble("mu", alpha); lambda = (int) typedProperties.getDouble("lambda", alpha); state = new State(new File(filePrefix + "sta")); solutions = new HashMap<Integer, Solution>(); }
From source file:org.apache.geode.distributed.internal.ClusterConfigurationService.java
private void persistSecuritySettings(final Region<String, Configuration> configRegion) { Properties securityProps = cache.getDistributedSystem().getSecurityProperties(); Configuration clusterPropertiesConfig = configRegion.get(ClusterConfigurationService.CLUSTER_CONFIG); if (clusterPropertiesConfig == null) { clusterPropertiesConfig = new Configuration(ClusterConfigurationService.CLUSTER_CONFIG); configRegion.put(ClusterConfigurationService.CLUSTER_CONFIG, clusterPropertiesConfig); }/*w w w . j a v a2 s .c om*/ // put security-manager and security-post-processor in the cluster config Properties clusterProperties = clusterPropertiesConfig.getGemfireProperties(); if (securityProps.containsKey(SECURITY_MANAGER)) { clusterProperties.setProperty(SECURITY_MANAGER, securityProps.getProperty(SECURITY_MANAGER)); } if (securityProps.containsKey(SECURITY_POST_PROCESSOR)) { clusterProperties.setProperty(SECURITY_POST_PROCESSOR, securityProps.getProperty(SECURITY_POST_PROCESSOR)); } }
From source file:de.tu_dortmund.ub.hb_ng.SolRDF.java
private String doSearchRequest(Properties query, String format) throws LinkedDataStorageException { String result = null;/*w w w . ja v a 2s . c o m*/ int timeout = Integer.parseInt(this.config.getProperty("storage.lmf.timeout")); try { String solr_url = this.config.getProperty("storage.lmf.endpoint.search") + "q=" + URLEncoder.encode(query.getProperty("q"), "UTF-8"); if (query.containsKey("start")) { solr_url += "&start=" + query.getProperty("start"); } if (query.containsKey("fq")) { for (String f : query.getProperty("fq").split(";")) { if (!f.contains(":")) { solr_url += "&fq=" + URLEncoder.encode(f, "UTF-8"); } else { String[] facet = f.split(":"); solr_url += "&fq=" + facet[0] + ":\"" + URLEncoder.encode(f.replace(facet[0] + ":", "").replaceAll("\"", ""), "UTF-8") + "\""; } } } if (query.containsKey("rows")) { solr_url += "&rows=" + query.getProperty("rows"); } if (query.containsKey("sort")) { solr_url += "&sort=" + query.getProperty("sort"); } switch (format) { case "html": { solr_url += "&wt=xml&indent=true"; break; } case "xml": { solr_url += "&wt=xml&indent=true"; break; } case "json": { solr_url += "&wt=json&indent=true"; break; } } String solr_params = "&facet=true&facet.field=lmf.type&facet.missing=true&facet.mincount=1"; solr_url += solr_params; this.logger.info("Solr-URL: " + solr_url); RequestConfig defaultRequestConfig = RequestConfig.custom().setSocketTimeout(timeout) .setConnectTimeout(timeout).setConnectionRequestTimeout(timeout).build(); CloseableHttpClient httpclient = HttpClients.custom().setDefaultRequestConfig(defaultRequestConfig) .build(); try { HttpGet httpGet = new HttpGet(solr_url); CloseableHttpResponse httpResponse = null; long start = System.nanoTime(); try { httpResponse = httpclient.execute(httpGet); } catch (ConnectTimeoutException | SocketTimeoutException e) { this.logger.info("[" + this.getClass().getName() + "] " + e.getClass().getName() + ": " + e.getMessage()); httpResponse = httpclient.execute(httpGet); } long elapsed = System.nanoTime() - start; this.logger.info("[" + this.getClass().getName() + "] lmf request - " + (elapsed / 1000.0 / 1000.0 / 1000.0) + " s"); try { int statusCode = httpResponse.getStatusLine().getStatusCode(); HttpEntity httpEntity = httpResponse.getEntity(); switch (statusCode) { case 200: { StringWriter writer = new StringWriter(); IOUtils.copy(httpEntity.getContent(), writer, "UTF-8"); result = writer.toString(); break; } default: { this.logger.error("[" + this.getClass().getName() + "] " + new Date() + " - ERROR: Requesting Linked Media Framework: " + solr_url + " - " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); throw new LinkedDataStorageException( statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); } } EntityUtils.consume(httpEntity); } finally { httpResponse.close(); } } catch (ConnectTimeoutException | SocketTimeoutException e) { this.logger.error("[" + this.getClass().getName() + "] " + new Date() + " - ERROR: Requesting Linked Media Framework: " + solr_url + " - timeout"); throw new LinkedDataStorageException(e.getMessage(), e.getCause()); } finally { httpclient.close(); } } catch (Exception e) { this.logger.error("[" + this.getClass().getName() + "] " + new Date() + " - ERROR: Requesting Linked Media Framework " + " - " + e.getMessage()); throw new LinkedDataStorageException(e.getMessage(), e.getCause()); } return result; }