List of usage examples for java.util Properties containsKey
@Override public boolean containsKey(Object key)
From source file:org.apache.falcon.oozie.process.NativeOozieProcessWorkflowBuilder.java
private void copyPropsWithoutOverride(Properties buildProps, Properties suppliedProps) { if (suppliedProps == null || suppliedProps.isEmpty()) { return;//w w w. j a va 2s. c o m } for (String propertyName : suppliedProps.stringPropertyNames()) { if (buildProps.containsKey(propertyName)) { LOG.warn("User provided property {} is already declared in the entity and will be ignored.", propertyName); continue; } String propertyValue = suppliedProps.getProperty(propertyName); buildProps.put(propertyName, propertyValue); } }
From source file:iddb.runtime.cache.impl.CacheImpl.java
public CacheImpl() throws UnavailableCacheException { Properties props = new Properties(); try {/*from www . j a v a 2 s.c o m*/ props.load(getClass().getClassLoader().getResourceAsStream("memcache.properties")); client = new MemcachedClient( new InetSocketAddress(props.getProperty("host"), Integer.parseInt(props.getProperty("port")))); if (props.containsKey("expiration")) expiration = Integer.parseInt(props.getProperty("expiration")); if (props.containsKey("prefix")) prefix = props.getProperty("prefix"); else prefix = "ipdb"; } catch (Exception e) { log.error("Unable to load cache properties [{}]", e.getMessage()); throw new UnavailableCacheException(); } setNamespace("default"); log.debug("Initialized memcache instance."); }
From source file:org.cruxframework.crux.tools.servicemap.ServiceMapper.java
/** * Generates Remote Service map/*w w w . j a va 2s . com*/ */ public void generateRestServicesMap() { try { File metaInfFile = getMetaInfFile(); File serviceMapFile = new File(metaInfFile, "crux-rest"); if (serviceMapFile.exists() && !isOverride()) { logger.info("REST Service map already exists. Skipping generation..."); return; } initializeScannerURLs(); Set<String> restServices = ClassScanner.searchClassesByAnnotation(RestService.class); Properties cruxRest = new Properties(); if (restServices != null) { for (String service : restServices) { try { Class<?> serviceClass = Class.forName(service); RestService annot = serviceClass.getAnnotation(RestService.class); if (cruxRest.containsKey(annot.value())) { throw new ServiceMapperException("Duplicated rest service [{" + annot.value() + "}]. Overiding previous registration..."); } cruxRest.put(annot.value(), service); } catch (ClassNotFoundException e) { throw new ServiceMapperException("Error initializing rest service class.", e); } } } cruxRest.store(new FileOutputStream(serviceMapFile), "Crux RestServices implementations"); } catch (IOException e) { throw new ServiceMapperException("Error creating rest service map", e); } }
From source file:org.string_db.psicquic.AppProperties.java
/** * Read all property files and fill in the fields * * @throws ExceptionInInitializerError/*from w w w.j a va 2 s . com*/ */ private AppProperties() throws ExceptionInInitializerError { Properties props = new Properties(); try { final FileInputStream inStream = new FileInputStream(CONFIG_DIR + "psicquic.properties"); props.load(inStream); inStream.close(); } catch (Exception e) { throw new ExceptionInInitializerError("can't load properties: " + e.getMessage()); } if (!props.containsKey("solr_url")) { throw new ExceptionInInitializerError("solr_url property missing!"); } solrUrl = props.getProperty("solr_url"); logger.info("solr at: " + solrUrl); ctx = new AnnotationConfigApplicationContext(AppConfig.class, DriverDataSourceConfig.class); }
From source file:org.apache.ode.daohib.bpel.BpelDAOConnectionFactoryImpl.java
/** * @see org.apache.ode.bpel.dao.BpelDAOConnectionFactory#init(java.util.Properties) */// w w w . j a v a2 s . c o m public void init(Properties initialProps) { if (_ds == null) { String errmsg = "setDataSource() not called!"; __log.fatal(errmsg); throw new IllegalStateException(errmsg); } if (_tm == null) { String errmsg = "setTransactionManager() not called!"; __log.fatal(errmsg); throw new IllegalStateException(errmsg); } if (initialProps == null) initialProps = new Properties(); // Don't want to pollute original properties Properties properties = new Properties(); for (Object prop : initialProps.keySet()) { properties.put(prop, initialProps.get(prop)); } // Note that we don't allow the following properties to be overriden by // the client. if (properties.containsKey(Environment.CONNECTION_PROVIDER)) __log.warn("Ignoring user-specified Hibernate property: " + Environment.CONNECTION_PROVIDER); if (properties.containsKey(Environment.TRANSACTION_MANAGER_STRATEGY)) __log.warn("Ignoring user-specified Hibernate property: " + Environment.TRANSACTION_MANAGER_STRATEGY); if (properties.containsKey(Environment.SESSION_FACTORY_NAME)) __log.warn("Ignoring user-specified Hibernate property: " + Environment.SESSION_FACTORY_NAME); properties.put(Environment.CONNECTION_PROVIDER, DataSourceConnectionProvider.class.getName()); properties.put(Environment.TRANSACTION_MANAGER_STRATEGY, HibernateTransactionManagerLookup.class.getName()); properties.put(Environment.TRANSACTION_STRATEGY, "org.hibernate.transaction.JTATransactionFactory"); properties.put(Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta"); // Isolation levels override; when you use a ConnectionProvider, this has no effect String level = System.getProperty("ode.connection.isolation", "2"); properties.put(Environment.ISOLATION, level); if (__log.isDebugEnabled()) { Enumeration<?> names = properties.propertyNames(); __log.debug("Properties passed to Hibernate:"); while (names.hasMoreElements()) { String name = (String) names.nextElement(); __log.debug(name + "=" + properties.getProperty(name)); } } _sessionManager = createSessionManager(properties, _ds, _tm); }
From source file:org.apache.hive.hcatalog.pig.HCatStorer.java
/** * @param location databaseName.tableName *//* ww w . j a va 2 s .c o m*/ @Override public void setStoreLocation(String location, Job job) throws IOException { Configuration config = job.getConfiguration(); config.set(INNER_SIGNATURE, INNER_SIGNATURE_PREFIX + "_" + sign); Properties udfProps = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[] { sign }); String[] userStr = location.split("\\."); if (udfProps.containsKey(HCatConstants.HCAT_PIG_STORER_LOCATION_SET)) { for (Enumeration<Object> emr = udfProps.keys(); emr.hasMoreElements();) { PigHCatUtil.getConfigFromUDFProperties(udfProps, config, emr.nextElement().toString()); } Credentials crd = jobCredentials.get(INNER_SIGNATURE_PREFIX + "_" + sign); if (crd != null) { job.getCredentials().addAll(crd); } } else { Job clone = new Job(job.getConfiguration()); OutputJobInfo outputJobInfo; if (userStr.length == 2) { outputJobInfo = OutputJobInfo.create(userStr[0], userStr[1], partitions); } else if (userStr.length == 1) { outputJobInfo = OutputJobInfo.create(null, userStr[0], partitions); } else { throw new FrontendException( "location " + location + " is invalid. It must be of the form [db.]table", PigHCatUtil.PIG_EXCEPTION_CODE); } Schema schema = (Schema) ObjectSerializer.deserialize(udfProps.getProperty(PIG_SCHEMA)); if (schema != null) { pigSchema = schema; } if (pigSchema == null) { throw new FrontendException("Schema for data cannot be determined.", PigHCatUtil.PIG_EXCEPTION_CODE); } String externalLocation = (String) udfProps .getProperty(HCatConstants.HCAT_PIG_STORER_EXTERNAL_LOCATION); if (externalLocation != null) { outputJobInfo.setLocation(externalLocation); } try { HCatOutputFormat.setOutput(job, outputJobInfo); } catch (HCatException he) { // pass the message to the user - essentially something about // the table // information passed to HCatOutputFormat was not right throw new PigException(he.getMessage(), PigHCatUtil.PIG_EXCEPTION_CODE, he); } HCatSchema hcatTblSchema = HCatOutputFormat.getTableSchema(job.getConfiguration()); try { doSchemaValidations(pigSchema, hcatTblSchema); } catch (HCatException he) { throw new FrontendException(he.getMessage(), PigHCatUtil.PIG_EXCEPTION_CODE, he); } computedSchema = convertPigSchemaToHCatSchema(pigSchema, hcatTblSchema); HCatOutputFormat.setSchema(job, computedSchema); udfProps.setProperty(COMPUTED_OUTPUT_SCHEMA, ObjectSerializer.serialize(computedSchema)); // We will store all the new /changed properties in the job in the // udf context, so the the HCatOutputFormat.setOutput and setSchema // methods need not be called many times. for (Entry<String, String> keyValue : job.getConfiguration()) { String oldValue = clone.getConfiguration().getRaw(keyValue.getKey()); if ((oldValue == null) || (keyValue.getValue().equals(oldValue) == false)) { udfProps.put(keyValue.getKey(), keyValue.getValue()); } } //Store credentials in a private hash map and not the udf context to // make sure they are not public. jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + sign, job.getCredentials()); udfProps.put(HCatConstants.HCAT_PIG_STORER_LOCATION_SET, true); } }
From source file:com.mongodb.hadoop.hive.MongoStorageHandler.java
/** * Helper function to copy properties// ww w. ja va 2s .c om */ private void copyJobProperties(final Properties from, final Map<String, String> to) { // Copy Hive-specific properties used directly by // HiveMongoInputFormat, BSONSerDe. if (from.containsKey(serdeConstants.LIST_COLUMNS)) { to.put(serdeConstants.LIST_COLUMNS, (String) from.get(serdeConstants.LIST_COLUMNS)); } if (from.containsKey(serdeConstants.LIST_COLUMN_TYPES)) { to.put(serdeConstants.LIST_COLUMN_TYPES, (String) from.get(serdeConstants.LIST_COLUMN_TYPES)); } if (from.containsKey(MONGO_COLS)) { to.put(MONGO_COLS, (String) from.get(MONGO_COLS)); } if (from.containsKey(TABLE_LOCATION)) { to.put(TABLE_LOCATION, (String) from.get(TABLE_LOCATION)); } // First, merge properties from the given properties file, if there // was one. These can be overwritten by other table properties later. String propertiesFilePathString = from.getProperty(PROPERTIES_FILE_PATH); if (propertiesFilePathString != null) { try { Properties properties = getProperties(getConf(), propertiesFilePathString); for (Map.Entry<Object, Object> prop : properties.entrySet()) { String key = (String) prop.getKey(); String value = (String) prop.getValue(); if (key.equals(MONGO_URI)) { // Copy to input/output URI. to.put(MongoConfigUtil.INPUT_URI, value); to.put(MongoConfigUtil.OUTPUT_URI, value); } else { to.put(key, value); } } } catch (IOException e) { LOG.error("Error while trying to read properties file " + propertiesFilePathString, e); } } // Copy general connector properties, such as ones defined in // MongoConfigUtil. These are all prefixed with "mongo.". for (Entry<Object, Object> entry : from.entrySet()) { String key = (String) entry.getKey(); if (key.startsWith("mongo.")) { to.put(key, (String) from.get(key)); } } // Update the keys for MONGO_URI per MongoConfigUtil. if (from.containsKey(MONGO_URI)) { String mongoURIStr = (String) from.get(MONGO_URI); to.put(MongoConfigUtil.INPUT_URI, mongoURIStr); to.put(MongoConfigUtil.OUTPUT_URI, mongoURIStr); } }
From source file:org.commonjava.maven.ext.manip.io.ModelIO.java
/** * Recursively resolve a property value. * * @param userProperties/*from w w w. j a v a 2s. co m*/ * @param p * @param key * @return the value of the key */ private String resolveProperty(Properties userProperties, Properties p, String key) { String result = ""; String child = key.substring(2, key.length() - 1); if (p.containsKey(child) && !userProperties.containsKey(child)) { result = p.getProperty(child); if (result.startsWith("${")) { result = resolveProperty(userProperties, p, result); } } return result; }
From source file:de.qucosa.servlet.MetsDisseminatorServlet.java
/** * Writes a METS representation to the response object if a proper PID was given as query parameter. * * @param request The HTTP request//ww w . jav a 2 s .com * @param response The HTTP response * @throws IOException if something failes when writing the response */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { try { final Set<String> allowedRequestParameters = new LinkedHashSet<>(); allowedRequestParameters.add("pid"); allowedRequestParameters.add("supplement"); final Properties requestProperties = new PropertyCollector().source(startupProperties) .source(request, allowedRequestParameters).collect(); if (!requestProperties.containsKey("pid")) { throw new MissingRequiredParameter("pid"); } if (!requestProperties.containsKey(PROP_FEDORA_HOST_URL)) { String targetUrlFromRequest = extractTargetUrlFromRequest(request); requestProperties.setProperty(PROP_FEDORA_HOST_URL, targetUrlFromRequest); } if (!requestProperties.containsKey(PROP_FEDORA_CONTENT_URL)) { requestProperties.setProperty(PROP_FEDORA_CONTENT_URL, requestProperties.getProperty(PROP_FEDORA_HOST_URL)); } final String basicAuthCredentials = extractBasicAuthCredentials(request); if (!basicAuthCredentials.isEmpty()) { requestProperties.setProperty(PROP_FEDORA_CREDENTIALS, basicAuthCredentials); } final FedoraClient client; if (basicAuthCredentials.isEmpty() && fedoraClientPool != null) { client = fedoraClientPool.borrowObject(); } else { client = attemptToCreateFedoraClientFrom(requestProperties); } if (client == null) { throw new AuthenticationException("No connection obtainable. Credentials missing?"); } final FedoraRepository fedoraRepository = new FedoraRepository(client, cache); if (fedoraClientPool != null) { fedoraClientPool.returnObject(client); } final MetsDisseminator metsDisseminator = new MetsDisseminator(fedoraRepository, requestProperties.getProperty(PROP_FEDORA_CONTENT_URL), implementationVersion()); metsDisseminator.disseminate(requestProperties.getProperty("pid"), "yes".equals(requestProperties.getProperty("supplement")), response.getOutputStream()); response.setStatus(HttpServletResponse.SC_OK); response.setContentType("application/mets+xml"); response.setCharacterEncoding("UTF-8"); } catch (AuthenticationException e) { final String msg = "Authentication failed"; log.warn(msg, e.getMessage()); response.sendError(SC_UNAUTHORIZED, msg); } catch (IdentifierCannotBeResolved e) { response.sendError(SC_NOT_FOUND, e.getMessage()); } catch (MissingRequiredParameter e) { response.sendError(SC_BAD_REQUEST, e.getMessage()); } catch (Exception e) { log.error("Unexpected error", e); response.sendError(SC_INTERNAL_SERVER_ERROR, "Internal Server Error"); } }
From source file:org.apache.oodt.cas.workflow.misc.WingsTask.java
private ArrayList<String> fetchFromProps(Properties props, String argtype) { ArrayList<String> args = new ArrayList<String>(); int i = 1;//www .ja v a2 s. c o m while (props.containsKey(argtype + i)) { args.add(props.getProperty(argtype + i)); i++; } return args; }