Example usage for java.util Properties containsKey

List of usage examples for java.util Properties containsKey

Introduction

In this page you can find the example usage for java.util Properties containsKey.

Prototype

@Override
    public boolean containsKey(Object key) 

Source Link

Usage

From source file:com.jaspersoft.jasperserver.api.metadata.common.util.RepositoryFileObject.java

public RepositoryService getRepositoryService() {
    if (repositoryService != null) {
        return repositoryService;
    }/*from w ww .j a  v a2  s.c  o m*/

    ApplicationContext ctx = StaticApplicationContext.getApplicationContext();

    if (ctx == null) {
        throw new JSException("StaticApplicationContext not configured in Spring");
    }

    Properties springConfiguration;
    try {
        springConfiguration = ((Properties) ctx.getBean("springConfiguration"));
    } catch (NoSuchBeanDefinitionException e) {
        springConfiguration = new Properties();
        log.debug("RepositoryFileObject#doAttach : no spring configuration properties");
    }

    /*
     * For the "repo" scheme, we expect an "external" representation of the URI
     * that has to be transformed.
     * Otherwise we use the "repoint" scheme, which expects an "internal" representation of the URI
     * that does not have to be transformed so that the same file is used across tenants.
     */
    String repositoryServiceName = "repositoryService";

    if (scheme.equals(RepositoryFileProvider.REPOSITORY_SCHEME)) {
        if (springConfiguration.containsKey("bean.repositoryService")) {
            repositoryServiceName = springConfiguration.getProperty("bean.repositoryService");
        }
    } else {
        if (springConfiguration.containsKey("bean.internalRepositoryService")) {
            repositoryServiceName = springConfiguration.getProperty("bean.internalRepositoryService");
        }
    }

    repositoryService = (RepositoryService) ctx.getBean(repositoryServiceName);
    return repositoryService;
}

From source file:com.google.api.ads.adwords.jaxws.extensions.processors.onfile.ReportProcessorOnFile.java

/**
 * Generate all the mapped reports to the given account IDs.
 *
 * @param dateRangeType the date range type.
 * @param dateStart the starting date./*from   www .  j  av a2  s  .  co m*/
 * @param dateEnd the ending date.
 * @param accountIdsSet the account IDs.
 * @param properties the properties file
 * @throws Exception error reaching the API.
 */
@Override
public void generateReportsForMCC(String userId, String mccAccountId,
        ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd, Set<Long> accountIdsSet,
        Properties properties) throws Exception {

    LOGGER.info("*** Retrieving account IDs ***");

    if (accountIdsSet == null || accountIdsSet.size() == 0) {
        accountIdsSet = this.retrieveAccountIds(userId, mccAccountId);
    } else {
        LOGGER.info("Accounts loaded from file.");
    }

    AdWordsSession.Builder builder = authenticator.authenticate(userId, mccAccountId, false);

    LOGGER.info("*** Generating Reports for " + accountIdsSet.size() + " accounts ***");

    Stopwatch stopwatch = Stopwatch.createStarted();

    Set<ReportDefinitionReportType> reports = this.csvReportEntitiesMapping.getDefinedReports();

    // reports
    for (ReportDefinitionReportType reportType : reports) {
        if (properties.containsKey(reportType.name())) {
            this.downloadAndProcess(userId, mccAccountId, builder, reportType, dateRangeType, dateStart,
                    dateEnd, accountIdsSet, properties);
        }
    }

    this.multipleClientReportDownloader.finalizeExecutorService();

    stopwatch.stop();
    LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000)
            + " seconds ***\n");
}

From source file:org.apache.hive.hcatalog.pig.HCatLoader.java

private void setProjectionSchemaInfoInUDFContext(Job job, Properties udfProps) throws IOException {
    Job localJob = getLocalJobClone(job);
    RequiredFieldList requiredFieldsInfo = (RequiredFieldList) udfProps.get(PRUNE_PROJECTION_INFO);
    boolean localJobConfHasChanged = false;
    final String PROJECTIONS_PUSHED_DOWN_TO_JOB_CONF = "hcat.loader.projections.pushed.down.to.job.conf";
    if (requiredFieldsInfo != null) {
        // pushProjection() was called.
        if (!udfProps.containsKey(PROJECTIONS_PUSHED_DOWN_TO_JOB_CONF)) { // Protect against pushing projections twice.
            // Required-fields were never set.
            // Store projection information in local job-instance.
            ArrayList<Integer> columnIds = Lists
                    .newArrayListWithExpectedSize(requiredFieldsInfo.getFields().size());
            ArrayList<String> columnNames = Lists
                    .newArrayListWithExpectedSize(requiredFieldsInfo.getFields().size());
            for (RequiredField rf : requiredFieldsInfo.getFields()) {
                columnIds.add(rf.getIndex());
                columnNames.add(rf.getAlias());
            }//from   w  w  w .  j a  v  a  2  s.  co  m
            ColumnProjectionUtils.appendReadColumns(localJob.getConfiguration(), columnIds, columnNames);
            outputSchema = phutil.getHCatSchema(requiredFieldsInfo.getFields(), signature, this.getClass());
            HCatInputFormat.setOutputSchema(localJob, outputSchema);
            udfProps.put(PROJECTIONS_PUSHED_DOWN_TO_JOB_CONF, true);
            localJobConfHasChanged = true;
        } else {
            // OutputSchema was already serialized. Skip serialization. Restore from requiredFieldsInfo.
            outputSchema = phutil.getHCatSchema(requiredFieldsInfo.getFields(), signature, this.getClass());
        }
    } else {
        // pushProjection() hasn't been called yet.
        // If this is the Pig backend, no projections were ever pushed. Assume all columns have to be read.
        if (HCatUtil.checkJobContextIfRunningFromBackend(job)) {
            ColumnProjectionUtils.setReadAllColumns(localJob.getConfiguration());
            outputSchema = (HCatSchema) udfProps.get(HCatConstants.HCAT_TABLE_SCHEMA);
            HCatInputFormat.setOutputSchema(localJob, outputSchema);
            localJobConfHasChanged = true;
        }
        // If this is the Pig frontend, pushProjection() might still be called later.
    }

    LOG.debug("outputSchema=" + outputSchema);

    // Store modified localJobConf settings to UDFContext.
    if (localJobConfHasChanged) {
        storeDifferenceToUDFProperties(localJob.getConfiguration(), job.getConfiguration(), udfProps);
    }
}

From source file:com.shootoff.config.Configuration.java

private void readConfigurationFile() throws IOException, ConfigurationException {
    Properties prop = new Properties();

    InputStream inputStream;/*w  ww  . ja  v a 2 s .c om*/

    if (configInput != null) {
        inputStream = configInput;
    } else {
        inputStream = new FileInputStream(configName);
    }

    if (inputStream != null) {
        prop.load(inputStream);
    } else {
        throw new FileNotFoundException("Could not read configuration file " + configName);
    }

    if (prop.containsKey(WEBCAMS_PROP)) {
        List<String> webcamNames = new ArrayList<String>();
        List<String> webcamInternalNames = new ArrayList<String>();

        for (String nameString : prop.getProperty(WEBCAMS_PROP).split(",")) {
            String[] names = nameString.split(":");
            if (names.length > 1) {
                webcamNames.add(names[0]);
                webcamInternalNames.add(names[1]);
            }
        }

        for (Camera webcam : Camera.getWebcams()) {
            int cameraIndex = webcamInternalNames.indexOf(webcam.getName());
            if (cameraIndex >= 0) {
                webcams.put(webcamNames.get(cameraIndex), webcam);
            }
        }
    }

    if (prop.containsKey(DETECTION_RATE_PROP)) {
        setDetectionRate(Integer.parseInt(prop.getProperty(DETECTION_RATE_PROP)));
    }

    if (prop.containsKey(LASER_INTENSITY_PROP)) {
        setLaserIntensity(Integer.parseInt(prop.getProperty(LASER_INTENSITY_PROP)));
    }

    if (prop.containsKey(MARKER_RADIUS_PROP)) {
        setMarkerRadius(Integer.parseInt(prop.getProperty(MARKER_RADIUS_PROP)));
    }

    if (prop.containsKey(IGNORE_LASER_COLOR_PROP)) {
        String colorName = prop.getProperty(IGNORE_LASER_COLOR_PROP);

        if (!colorName.equals("None")) {
            setIgnoreLaserColor(true);
            setIgnoreLaserColorName(colorName);
        }
    }

    if (prop.containsKey(USE_RED_LASER_SOUND_PROP)) {
        setUseRedLaserSound(Boolean.parseBoolean(prop.getProperty(USE_RED_LASER_SOUND_PROP)));
    }

    if (prop.containsKey(RED_LASER_SOUND_PROP)) {
        setRedLaserSound(new File(prop.getProperty(RED_LASER_SOUND_PROP)));
    }

    if (prop.containsKey(USE_GREEN_LASER_SOUND_PROP)) {
        setUseGreenLaserSound(Boolean.parseBoolean(prop.getProperty(USE_GREEN_LASER_SOUND_PROP)));
    }

    if (prop.containsKey(GREEN_LASER_SOUND_PROP)) {
        setGreenLaserSound(new File(prop.getProperty(GREEN_LASER_SOUND_PROP)));
    }

    if (prop.containsKey(USE_VIRTUAL_MAGAZINE_PROP)) {
        setUseVirtualMagazine(Boolean.parseBoolean(prop.getProperty(USE_VIRTUAL_MAGAZINE_PROP)));
    }

    if (prop.containsKey(VIRTUAL_MAGAZINE_CAPACITY_PROP)) {
        setVirtualMagazineCapacity(Integer.parseInt(prop.getProperty(VIRTUAL_MAGAZINE_CAPACITY_PROP)));
    }

    if (prop.containsKey(USE_MALFUNCTIONS_PROP)) {
        setMalfunctions(Boolean.parseBoolean(prop.getProperty(USE_MALFUNCTIONS_PROP)));
    }

    if (prop.containsKey(MALFUNCTIONS_PROBABILITY_PROP)) {
        setMalfunctionsProbability(Float.parseFloat(prop.getProperty(MALFUNCTIONS_PROBABILITY_PROP)));
    }

    validateConfiguration();
}

From source file:com.impetus.kundera.ejb.EntityManagerFactoryBuilder.java

/**
* Builds up EntityManagerFactory for a given persistenceUnitName and
* overriding properties.//  w w w  .  jav a2 s  .c  o m
* 
* @param persistenceUnitName
*            the persistence unit name
* @param override
*            the override
* @return the entity manager factory
*/
public EntityManagerFactory buildEntityManagerFactory(String persistenceUnitName,
        Map<Object, Object> override) {
    PersistenceMetadata metadata = getPersistenceMetadata(persistenceUnitName);

    Properties props = new Properties();
    // Override properties
    Properties metadataProperties = metadata.getProps();
    // Make sure, it's empty or Unmodifiable
    override = override == null ? Collections.EMPTY_MAP : Collections.unmodifiableMap(override);

    // Take all from Metadata and override with supplied map 
    for (Map.Entry<Object, Object> entry : metadataProperties.entrySet()) {
        Object key = entry.getKey();
        Object value = entry.getValue();

        if (override.containsKey(key)) {
            value = override.get(key);
        }
        props.put(key, value);
    }

    // Now take all the remaining ones from override
    for (Map.Entry<Object, Object> entry : override.entrySet()) {
        Object key = entry.getKey();
        Object value = entry.getValue();

        if (!props.containsKey(key)) {
            props.put(key, value);
        }
    }

    log.info("Building EntityManagerFactory for name: " + metadata.getName() + ", and Properties:" + props);
    return new EntityManagerFactoryImpl(metadata, props);
}

From source file:org.bultreebank.labpipe.converters.LineConverter.java

/**
 * @throws ArrayIndexOutOfBoundsException 
 * @deprecated since v1.0/*  www  .j a v a 2 s  .co  m*/
 */
public static String toConllLine(String line, int id, Properties conllMap)
        throws ArrayIndexOutOfBoundsException {

    StringBuilder conll = new StringBuilder();

    line = line.replaceAll(" ", "\t");

    String[] columns = line.split("\t");

    String token = columns[0];
    String tag = columns[1];
    String lemma = null;
    if (columns.length > 2) {
        lemma = columns[2];
    }

    // ID number
    conll.append(id);
    conll.append("\t");

    // Token
    conll.append(token);
    conll.append("\t");

    // Lemma
    if (lemma != null) {
        conll.append(lemma);
    } else {
        conll.append("_");
    }
    conll.append("\t");

    // Short tag (BTB first letter)
    if (tag.contains("punct")) {
        conll.append("Punct");
    } else {
        conll.append(tag.charAt(0));
    }
    conll.append("\t");

    // Long tag
    if (tag.contains("punct") || tag.contains("Punct")) {
        conll.append("Punct");
    } else if (tag.length() > 2 && tag.charAt(1) != '-') {
        conll.append(tag.substring(0, 2));
    } else if (tag.length() > 2 && tag.charAt(1) == '-') {
        conll.append(tag.charAt(0));
    } else {
        conll.append(tag);
    }
    conll.append("\t");

    // Features (rest of the tag separated with pipe signs)
    if (conllMap.containsKey(tag)) { // using the map configuration

        conll.append(conllMap.getProperty(tag));

    } else { // tags not listed in the map -- failsafe

        if (tag.length() > 2 && !tag.contains("unct")) {

            conll.append(StringUtils.join(tag.substring(2).split(""), "|").substring(1));

        } else {

            conll.append("_");

        }

    }

    return conll.toString();

}

From source file:com.noelios.restlet.ext.jdbc.JdbcClientHelper.java

/**
 * Returns a JDBC connection./*  w  ww  .j av a2s  .  com*/
 * 
 * @param uri
 *            The connection URI.
 * @param properties
 *            The connection properties.
 * @param usePooling
 *            Indicates if the connection pooling should be used.
 * @return The JDBC connection.
 * @throws SQLException
 */
protected Connection getConnection(String uri, Properties properties, boolean usePooling) throws SQLException {
    Connection result = null;

    if (usePooling) {
        for (final ConnectionSource c : this.connectionSources) {
            // Check if the connection URI is identical
            // and if the same number of properties is present
            if ((result == null) && c.getUri().equalsIgnoreCase(uri)
                    && (properties.size() == c.getProperties().size())) {
                // Check that the properties tables are equivalent
                boolean equal = true;
                for (final Object key : c.getProperties().keySet()) {
                    if (equal && properties.containsKey(key)) {
                        equal = equal && (properties.get(key).equals(c.getProperties().get(key)));
                    } else {
                        equal = false;
                    }
                }

                if (equal) {
                    result = c.getConnection();
                }
            }
        }

        if (result == null) {
            // No existing connection source found
            final ConnectionSource cs = new ConnectionSource(uri, properties);
            this.connectionSources.add(cs);
            result = cs.getConnection();
        }
    } else {
        result = DriverManager.getConnection(uri, properties);
    }

    return result;
}

From source file:org.apache.gobblin.metrics.GobblinMetrics.java

private void buildFileMetricReporter(Properties properties) {
    if (!Boolean.valueOf(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_FILE_ENABLED_KEY,
            ConfigurationKeys.DEFAULT_METRICS_REPORTING_FILE_ENABLED))) {
        return;/*  w ww . ja v a 2s  .c o  m*/
    }
    LOGGER.info("Reporting metrics to log files");

    if (!properties.containsKey(ConfigurationKeys.METRICS_LOG_DIR_KEY)) {
        LOGGER.error("Not reporting metrics to log files because " + ConfigurationKeys.METRICS_LOG_DIR_KEY
                + " is undefined");
        return;
    }

    try {
        String fsUri = properties.getProperty(ConfigurationKeys.FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI);
        FileSystem fs = FileSystem.get(URI.create(fsUri), new Configuration());

        // Each job gets its own metric log subdirectory
        Path metricsLogDir = new Path(properties.getProperty(ConfigurationKeys.METRICS_LOG_DIR_KEY),
                this.getName());
        if (!fs.exists(metricsLogDir) && !fs.mkdirs(metricsLogDir)) {
            LOGGER.error("Failed to create metric log directory for metrics " + this.getName());
            return;
        }

        // Add a suffix to file name if specified in properties.
        String metricsFileSuffix = properties.getProperty(ConfigurationKeys.METRICS_FILE_SUFFIX,
                ConfigurationKeys.DEFAULT_METRICS_FILE_SUFFIX);
        if (!Strings.isNullOrEmpty(metricsFileSuffix) && !metricsFileSuffix.startsWith(".")) {
            metricsFileSuffix = "." + metricsFileSuffix;
        }

        // Each job run gets its own metric log file
        Path metricLogFile = new Path(metricsLogDir, this.id + metricsFileSuffix + ".metrics.log");
        boolean append = false;
        // Append to the metric file if it already exists
        if (fs.exists(metricLogFile)) {
            LOGGER.info(String.format("Metric log file %s already exists, appending to it", metricLogFile));
            append = true;
        }

        OutputStream output = append ? fs.append(metricLogFile) : fs.create(metricLogFile, true);
        // Add metrics reporter
        OutputStreamReporter.Factory.newBuilder().outputTo(output).build(properties);
        // Set up events reporter at the same time!!
        this.codahaleScheduledReporters.add(this.codahaleReportersCloser.register(
                OutputStreamEventReporter.forContext(RootMetricContext.get()).outputTo(output).build()));

        LOGGER.info("Will start reporting metrics to directory " + metricsLogDir);
    } catch (IOException ioe) {
        LOGGER.error("Failed to build file metric reporter for job " + this.id, ioe);
    }
}

From source file:org.apache.hadoop.hbase.util.LoadTestTool.java

private void addAuthInfoToConf(Properties authConfig, Configuration conf, String owner, String userList)
        throws IOException {
    List<String> users = Arrays.asList(userList.split(","));
    users.add(owner);/*  w  ww  .  j a  v a  2s . c  om*/
    for (String user : users) {
        String keyTabFileConfKey = "hbase." + user + ".keytab.file";
        String principalConfKey = "hbase." + user + ".kerberos.principal";
        if (!authConfig.containsKey(keyTabFileConfKey) || !authConfig.containsKey(principalConfKey)) {
            throw new IOException("Authentication configs missing for user : " + user);
        }
    }
    for (String key : authConfig.stringPropertyNames()) {
        conf.set(key, authConfig.getProperty(key));
    }
    LOG.debug("Added authentication properties to config successfully.");
}

From source file:org.apache.solr.update.InvenioKeepRecidUpdated.java

@SuppressWarnings("unchecked")
protected Map<String, Object> retrieveRecids(Properties prop, SolrQueryRequest req, SolrQueryResponse rsp) {

    HashMap<String, Object> retData = new HashMap<String, Object>();

    SolrParams params = req.getParams();

    Integer lastRecid = null;/*from   ww w  .j  a  v  a 2s  . c  o  m*/
    String lastUpdate = null;
    if (prop.containsKey(LAST_RECID)) {
        lastRecid = Integer.valueOf(prop.getProperty(LAST_RECID));
    }
    if (prop.containsKey(LAST_UPDATE)) {
        lastUpdate = prop.getProperty(LAST_UPDATE);
    }

    Map<String, int[]> dictData;
    // we'll generate empty records (good just to have a mapping between invenio
    // and lucene docids; necessary for search operations)
    if (params.getBool("generate", false)) {
        Integer max_recid = params.getInt(PARAM_MAX_RECID, 0);
        if (max_recid == 0 || max_recid < lastRecid) {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "The max_recid parameter missing!");
        }

        dictData = new HashMap<String, int[]>();
        int[] a = new int[max_recid - lastRecid];
        for (int i = 0, ii = lastRecid + 1; ii < max_recid + 1; i++, ii++) {
            a[i] = ii;
        }
        dictData.put("ADDED", a);
        retData.put(LAST_UPDATE, null);
        retData.put(LAST_RECID, max_recid);

    } else {
        // get recids from Invenio {'ADDED': int, 'UPDATED': int, 'DELETED':
        // int }

        PythonMessage message = MontySolrVM.INSTANCE.createMessage(pythonFunctionName)
                .setSender(this.getClass().getSimpleName())
                .setParam("max_records", params.getInt(PARAM_BATCHSIZE)).setParam("request", req)
                .setParam("response", rsp);

        if (lastRecid != null)
            message.setParam(LAST_RECID, lastRecid);
        if (lastUpdate != null)
            message.setParam(LAST_UPDATE, lastUpdate);

        if (lastRecid == null && lastUpdate == null) {
            message.setParam(LAST_UPDATE, getLastIndexUpdate(req));
        }

        log.info("Retrieving changed recs: max_records=" + params.getInt(PARAM_BATCHSIZE) + " last_recid="
                + lastRecid + " last_update=" + lastUpdate);

        MontySolrVM.INSTANCE.sendMessage(message);

        Object results = message.getResults();
        if (results == null) {
            rsp.add("message", "No new/updated/deleted records inside Invenio.");
            rsp.add("importStatus", "idle");
            return null;
        }
        dictData = (HashMap<String, int[]>) results;
        retData.put(LAST_UPDATE, (String) message.getParam(LAST_UPDATE));
        retData.put(LAST_RECID, (Integer) message.getParam(LAST_RECID));

        log.info("Retrieved: last_update=" + retData.get(LAST_UPDATE) + " last_recid="
                + retData.get(LAST_RECID));
    }
    retData.put("dictData", dictData);
    return retData;
}