Example usage for java.util Properties keySet

List of usage examples for java.util Properties keySet

Introduction

In this page you can find the example usage for java.util Properties keySet.

Prototype

@Override
    public Set<Object> keySet() 

Source Link

Usage

From source file:net.jetrix.config.ServerConfig.java

/**
 * Write the configuration of the specified filter.
 *///from w  w  w.  j  a  v  a  2  s .  co m
private void saveFilter(FilterConfig filter, PrintWriter out, String indent) {
    Properties props = filter.getProperties();
    if (props == null || props.isEmpty()) {
        if (filter.getName() != null) {
            out.println(indent + "<filter name=\"" + filter.getName() + "\"/>");
        } else {
            out.println(indent + "<filter class=\"" + filter.getClassname() + "\"/>");
        }
    } else {
        if (filter.getName() != null) {
            out.println(indent + "<filter name=\"" + filter.getName() + "\">");
        } else {
            out.println(indent + "<filter class=\"" + filter.getClassname() + "\">");
        }

        for (Object name : props.keySet()) {
            out.println(indent + "  <param name=\"" + name + "\" value=\"" + props.get(name) + "\"/>");
        }
        out.println(indent + "</filter>");
    }
}

From source file:org.apache.zeppelin.spark.SparkInterpreter.java

/**
 * Spark 2.x//from  w  w w . jav  a2 s . c  om
 * Create SparkSession
 */
public Object createSparkSession() {
    logger.info("------ Create new SparkContext {} -------", getProperty("master"));
    String execUri = System.getenv("SPARK_EXECUTOR_URI");
    conf.setAppName(getProperty("spark.app.name"));

    if (outputDir != null) {
        conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath());
    }

    if (execUri != null) {
        conf.set("spark.executor.uri", execUri);
    }

    if (System.getenv("SPARK_HOME") != null) {
        conf.setSparkHome(System.getenv("SPARK_HOME"));
    }

    conf.set("spark.scheduler.mode", "FAIR");
    conf.setMaster(getProperty("master"));
    if (isYarnMode()) {
        conf.set("master", "yarn");
        conf.set("spark.submit.deployMode", "client");
    }

    Properties intpProperty = getProperty();

    for (Object k : intpProperty.keySet()) {
        String key = (String) k;
        String val = toString(intpProperty.get(key));
        if (key.startsWith("spark.") && !val.trim().isEmpty()) {
            logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val));
            conf.set(key, val);
        }
    }

    setupConfForPySpark(conf);
    setupConfForSparkR(conf);
    Class SparkSession = Utils.findClass("org.apache.spark.sql.SparkSession");
    Object builder = Utils.invokeStaticMethod(SparkSession, "builder");
    Utils.invokeMethod(builder, "config", new Class[] { SparkConf.class }, new Object[] { conf });

    if (useHiveContext()) {
        if (hiveClassesArePresent()) {
            Utils.invokeMethod(builder, "enableHiveSupport");
            sparkSession = Utils.invokeMethod(builder, "getOrCreate");
            logger.info("Created Spark session with Hive support");
        } else {
            Utils.invokeMethod(builder, "config", new Class[] { String.class, String.class },
                    new Object[] { "spark.sql.catalogImplementation", "in-memory" });
            sparkSession = Utils.invokeMethod(builder, "getOrCreate");
            logger.info("Created Spark session with Hive support use in-memory catalogImplementation");
        }
    } else {
        sparkSession = Utils.invokeMethod(builder, "getOrCreate");
        logger.info("Created Spark session");
    }

    return sparkSession;
}

From source file:org.apache.hadoop.tools.rumen.TestRumenJobTraces.java

/**
 * Test if the {@link JobConfigurationParser} can correctly extract out 
 * key-value pairs from the job configuration.
 *//*  ww  w.  j av a 2  s .co  m*/
@Test
public void testJobConfigurationParsing() throws Exception {
    final FileSystem lfs = FileSystem.getLocal(new Configuration());

    final Path rootTempDir = new Path(System.getProperty("test.build.data", "/tmp")).makeQualified(lfs.getUri(),
            lfs.getWorkingDirectory());

    final Path tempDir = new Path(rootTempDir, "TestJobConfigurationParser");
    lfs.delete(tempDir, true);

    // Add some configuration parameters to the conf
    JobConf jConf = new JobConf(false);
    String key = "test.data";
    String value = "hello world";
    jConf.set(key, value);

    // create the job conf file
    Path jobConfPath = new Path(tempDir.toString(), "job.xml");
    lfs.delete(jobConfPath, false);
    DataOutputStream jobConfStream = lfs.create(jobConfPath);
    jConf.writeXml(jobConfStream);
    jobConfStream.close();

    // now read the job conf file using the job configuration parser
    Properties properties = JobConfigurationParser.parse(lfs.open(jobConfPath));

    // check if the required parameter is loaded
    assertEquals(
            "Total number of extracted properties (" + properties.size()
                    + ") doesn't match the expected size of 1 [" + "JobConfigurationParser]",
            1, properties.size());
    // check if the key is present in the extracted configuration
    assertTrue("Key " + key + " is missing in the configuration extracted " + "[JobConfigurationParser]",
            properties.keySet().contains(key));
    // check if the desired property has the correct value
    assertEquals("JobConfigurationParser couldn't recover the parameters" + " correctly", value,
            properties.get(key));

    // Test ZombieJob
    LoggedJob job = new LoggedJob();
    job.setJobProperties(properties);

    ZombieJob zjob = new ZombieJob(job, null);
    Configuration zconf = zjob.getJobConf();
    // check if the required parameter is loaded
    assertEquals("ZombieJob couldn't recover the parameters correctly", value, zconf.get(key));
}

From source file:org.apache.pig.tools.grunt.GruntParser.java

@Override
protected void processSet() throws IOException, ParseException {
    filter.validate(PigCommandFilter.Command.SET);
    Properties jobProps = mPigServer.getPigContext().getProperties();
    Properties sysProps = System.getProperties();

    List<String> jobPropsList = Lists.newArrayList();
    List<String> sysPropsList = Lists.newArrayList();

    for (Object key : jobProps.keySet()) {
        String propStr = key + "=" + jobProps.getProperty((String) key);
        if (sysProps.containsKey(key)) {
            sysPropsList.add("system: " + propStr);
        } else {/*  w ww . j a  va2 s  . co m*/
            jobPropsList.add(propStr);
        }
    }
    Collections.sort(jobPropsList);
    Collections.sort(sysPropsList);
    jobPropsList.addAll(sysPropsList);
    for (String prop : jobPropsList) {
        System.out.println(prop);
    }
}

From source file:org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.java

public void configureHCat(final SqoopOptions opts, final Job job, final ConnManager connMgr,
        final String dbTable, final Configuration config) throws IOException {
    if (configured) {
        LOG.info("Ignoring configuration request for HCatalog info");
        return;/*from   w w w .  j av  a  2 s. c om*/
    }
    options = opts;
    checkHomeDirs(opts);
    connManager = connMgr;
    dbTableName = dbTable;
    configuration = config;
    hCatJob = job;
    hCatDatabaseName = options.getHCatDatabaseName() != null ? options.getHCatDatabaseName() : DEFHCATDB;
    hCatDatabaseName = hCatDatabaseName.toLowerCase();

    String optHCTabName = options.getHCatTableName();
    hCatTableName = optHCTabName.toLowerCase();

    if (!hCatTableName.equals(optHCTabName)) {
        LOG.warn("Provided HCatalog table name " + optHCTabName + " will be mapped to  " + hCatTableName);
    }

    StringBuilder sb = new StringBuilder();
    sb.append(hCatDatabaseName);
    sb.append('.').append(hCatTableName);
    hCatQualifiedTableName = sb.toString();

    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
    if (principalID != null) {
        configuration.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
    }
    hCatStaticPartitionKeys = new ArrayList<String>();
    hCatStaticPartitionValues = new ArrayList<String>();
    String partKeysString = options.getHCatalogPartitionKeys();
    String partKeysVals = options.getHCatalogPartitionValues();
    // Already validated
    if (partKeysString != null) {
        String[] keys = partKeysString.split(",");
        for (int i = 0; i < keys.length; ++i) {
            String k = keys[i].trim();
            hCatStaticPartitionKeys.add(k);
        }
        String[] vals = partKeysVals.split(",");
        for (int i = 0; i < vals.length; ++i) {
            String v = vals[i].trim();
            hCatStaticPartitionValues.add(v);
        }
    } else {
        partKeysString = options.getHivePartitionKey();
        if (partKeysString != null) {
            hCatStaticPartitionKeys.add(partKeysString);
        }
        partKeysVals = options.getHivePartitionValue();
        hCatStaticPartitionValues.add(partKeysVals);

    }
    Properties userMapping = options.getMapColumnHive();
    userHiveMapping = new LCKeyMap<String>();
    for (Object o : userMapping.keySet()) {
        String v = (String) userMapping.get(o);
        userHiveMapping.put((String) o, v);
    }
    // Get the partition key filter if needed
    Map<String, String> filterMap = getHCatSPFilterMap();
    String filterStr = getHCatSPFilterStr();
    initDBColumnInfo();
    if (options.doCreateHCatalogTable()) {
        LOG.info("Creating HCatalog table " + hCatQualifiedTableName + " for import");
        createHCatTable(false);
    } else if (options.doDropAndCreateHCatalogTable()) {
        LOG.info("Dropping and Creating HCatalog table " + hCatQualifiedTableName + " for import");
        createHCatTable(true);
    }
    // For serializing the schema to conf
    HCatInputFormat hif = HCatInputFormat.setInput(hCatJob, hCatDatabaseName, hCatTableName);
    // For serializing the schema to conf
    if (filterStr != null) {
        LOG.info("Setting hCatInputFormat filter to " + filterStr);
        hif.setFilter(filterStr);
    }

    hCatFullTableSchema = HCatInputFormat.getTableSchema(configuration);
    hCatFullTableSchemaFieldNames = hCatFullTableSchema.getFieldNames();

    LOG.info("HCatalog full table schema fields = "
            + Arrays.toString(hCatFullTableSchema.getFieldNames().toArray()));

    if (filterMap != null) {
        LOG.info("Setting hCatOutputFormat filter to " + filterStr);
    }

    HCatOutputFormat.setOutput(hCatJob, OutputJobInfo.create(hCatDatabaseName, hCatTableName, filterMap));
    hCatOutputSchema = HCatOutputFormat.getTableSchema(configuration);
    List<HCatFieldSchema> hCatPartitionSchemaFields = new ArrayList<HCatFieldSchema>();
    int totalFieldsCount = hCatFullTableSchema.size();
    int dataFieldsCount = hCatOutputSchema.size();
    if (totalFieldsCount > dataFieldsCount) {
        for (int i = dataFieldsCount; i < totalFieldsCount; ++i) {
            hCatPartitionSchemaFields.add(hCatFullTableSchema.get(i));
        }
    }

    hCatPartitionSchema = new HCatSchema(hCatPartitionSchemaFields);
    for (HCatFieldSchema hfs : hCatPartitionSchemaFields) {
        if (hfs.getType() != HCatFieldSchema.Type.STRING) {
            throw new IOException("The table provided " + getQualifiedHCatTableName()
                    + " uses unsupported  partitioning key type  for column " + hfs.getName() + " : "
                    + hfs.getTypeString() + ".  Only string "
                    + "fields are allowed in partition columns in HCatalog");
        }

    }
    LOG.info("HCatalog table partitioning key fields = "
            + Arrays.toString(hCatPartitionSchema.getFieldNames().toArray()));

    List<HCatFieldSchema> outputFieldList = new ArrayList<HCatFieldSchema>();
    for (String col : dbColumnNames) {
        try {
            HCatFieldSchema hfs = hCatFullTableSchema.get(col);
            if (hfs == null) {
                throw new IOException("Database column " + col + " not found in " + " hcatalog table.");
            }
        } catch (Exception e) {
            throw new IOException(
                    "Caught Exception checking database column " + col + " in " + " hcatalog table.", e);
        }
        boolean skip = false;
        if (hCatStaticPartitionKeys != null) {
            for (String key : hCatStaticPartitionKeys) {
                if (col.equals(key)) {
                    skip = true;
                    break;
                }
            }
        }
        if (skip) {
            continue;
        }
        outputFieldList.add(hCatFullTableSchema.get(col));
    }

    projectedSchema = new HCatSchema(outputFieldList);

    LOG.info(
            "HCatalog projected schema fields = " + Arrays.toString(projectedSchema.getFieldNames().toArray()));

    validateStaticPartitionKey();
    validateHCatTableFieldTypes();

    HCatOutputFormat.setSchema(configuration, hCatFullTableSchema);

    addJars(hCatJob, options);
    config.setBoolean(DEBUG_HCAT_IMPORT_MAPPER_PROP, Boolean.getBoolean(DEBUG_HCAT_IMPORT_MAPPER_PROP));
    config.setBoolean(DEBUG_HCAT_EXPORT_MAPPER_PROP, Boolean.getBoolean(DEBUG_HCAT_EXPORT_MAPPER_PROP));
    configured = true;
}

From source file:org.apache.zeppelin.spark.SparkInterpreter.java

public SparkContext createSparkContext_1() {
    logger.info("------ Create new SparkContext {} -------", getProperty("master"));

    String execUri = System.getenv("SPARK_EXECUTOR_URI");
    String[] jars = null;//from   www  . j a va 2  s  .  co  m

    if (Utils.isScala2_10()) {
        jars = (String[]) Utils.invokeStaticMethod(SparkILoop.class, "getAddedJars");
    } else {
        jars = (String[]) Utils.invokeStaticMethod(Utils.findClass("org.apache.spark.repl.Main"),
                "getAddedJars");
    }

    String classServerUri = null;
    String replClassOutputDirectory = null;

    try { // in case of spark 1.1x, spark 1.2x
        Method classServer = intp.getClass().getMethod("classServer");
        Object httpServer = classServer.invoke(intp);
        classServerUri = (String) Utils.invokeMethod(httpServer, "uri");
    } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
            | InvocationTargetException e) {
        // continue
    }

    if (classServerUri == null) {
        try { // for spark 1.3x
            Method classServer = intp.getClass().getMethod("classServerUri");
            classServerUri = (String) classServer.invoke(intp);
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            // continue instead of: throw new InterpreterException(e);
            // Newer Spark versions (like the patched CDH5.7.0 one) don't contain this method
            logger.warn(
                    String.format("Spark method classServerUri not available due to: [%s]", e.getMessage()));
        }
    }

    if (classServerUri == null) {
        try { // for RcpEnv
            Method getClassOutputDirectory = intp.getClass().getMethod("getClassOutputDirectory");
            File classOutputDirectory = (File) getClassOutputDirectory.invoke(intp);
            replClassOutputDirectory = classOutputDirectory.getAbsolutePath();
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            // continue
        }
    }

    if (Utils.isScala2_11()) {
        classServer = createHttpServer(outputDir);
        Utils.invokeMethod(classServer, "start");
        classServerUri = (String) Utils.invokeMethod(classServer, "uri");
    }

    conf.setMaster(getProperty("master")).setAppName(getProperty("spark.app.name"));

    if (classServerUri != null) {
        conf.set("spark.repl.class.uri", classServerUri);
    }

    if (replClassOutputDirectory != null) {
        conf.set("spark.repl.class.outputDir", replClassOutputDirectory);
    }

    if (jars.length > 0) {
        conf.setJars(jars);
    }

    if (execUri != null) {
        conf.set("spark.executor.uri", execUri);
    }
    if (System.getenv("SPARK_HOME") != null) {
        conf.setSparkHome(System.getenv("SPARK_HOME"));
    }
    conf.set("spark.scheduler.mode", "FAIR");

    Properties intpProperty = getProperty();

    for (Object k : intpProperty.keySet()) {
        String key = (String) k;
        String val = toString(intpProperty.get(key));
        if (key.startsWith("spark.") && !val.trim().isEmpty()) {
            logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val));
            conf.set(key, val);
        }
    }
    setupConfForPySpark(conf);
    setupConfForSparkR(conf);
    SparkContext sparkContext = new SparkContext(conf);
    return sparkContext;
}

From source file:org.apache.synapse.securevault.secret.repository.FileBaseSecretRepository.java

/**
 * Initializes the repository based on provided properties
 *
 * @param properties Configuration properties
 * @param id         Identifier to identify properties related to the corresponding repository
 *///from   w ww  .  j  a v a2  s.  co m
public void init(Properties properties, String id) {
    StringBuffer sb = new StringBuffer();
    sb.append(id);
    sb.append(DOT);
    sb.append(LOCATION);

    String filePath = MiscellaneousUtil.getProperty(properties, sb.toString(), DEFAULT_CONF_LOCATION);

    Properties cipherProperties = MiscellaneousUtil.loadProperties(filePath);
    if (cipherProperties.isEmpty()) {
        if (log.isDebugEnabled()) {
            log.debug("Cipher texts cannot be loaded form : " + filePath);
        }
        return;
    }

    StringBuffer sbTwo = new StringBuffer();
    sbTwo.append(id);
    sbTwo.append(DOT);
    sbTwo.append(ALGORITHM);
    //Load algorithm
    String algorithm = MiscellaneousUtil.getProperty(properties, sbTwo.toString(), DEFAULT_ALGORITHM);
    StringBuffer buffer = new StringBuffer();
    buffer.append(DOT);
    buffer.append(KEY_STORE);

    //Load keyStore
    String keyStore = MiscellaneousUtil.getProperty(properties, buffer.toString(), null);
    KeyStoreWrapper keyStoreWrapper;
    if (TRUSTED.equals(keyStore)) {
        keyStoreWrapper = trust;

    } else {
        keyStoreWrapper = identity;
    }

    //Creates a cipherInformation

    CipherInformation cipherInformation = new CipherInformation();
    cipherInformation.setAlgorithm(algorithm);
    cipherInformation.setCipherOperationMode(CipherOperationMode.DECRYPT);
    cipherInformation.setInType(EncodingType.BASE64); //TODO
    DecryptionProvider baseCipher = CipherFactory.createCipher(cipherInformation, keyStoreWrapper);

    for (Object alias : cipherProperties.keySet()) {
        //decrypt the encrypted text 
        String key = String.valueOf(alias);
        String encryptedText = cipherProperties.getProperty(key);
        encryptedData.put(key, encryptedText);
        if (encryptedText == null || "".equals(encryptedText.trim())) {
            if (log.isDebugEnabled()) {
                log.debug("There is no secret for the alias : " + alias);
            }
            continue;
        }

        String decryptedText = new String(baseCipher.decrypt(encryptedText.trim().getBytes()));
        secrets.put(key, decryptedText);
    }
    initialize = true;
}

From source file:org.apache.maven.archetype.creator.FilesetArchetypeCreator.java

private String getReversedPlainContent(String content, Properties properties) {
    String result = content;/*ww  w  .  j av a2s  .c o  m*/

    for (Iterator<?> propertyIterator = properties.keySet().iterator(); propertyIterator.hasNext();) {
        String propertyKey = (String) propertyIterator.next();

        result = StringUtils.replace(result, properties.getProperty(propertyKey), "${" + propertyKey + "}");
    }
    return result;
}

From source file:org.apache.maven.archetype.creator.FilesetArchetypeCreator.java

private String getReversedFilename(String filename, Properties properties) {
    String result = filename;/*from ww w  .  j av  a  2  s . c o m*/

    for (Iterator<?> propertyIterator = properties.keySet().iterator(); propertyIterator.hasNext();) {
        String propertyKey = (String) propertyIterator.next();

        result = StringUtils.replace(result, properties.getProperty(propertyKey), "__" + propertyKey + "__");
    }

    return result;
}

From source file:com.liferay.ide.project.core.tests.ProjectCoreBase.java

private void persistAppServerProperties() throws FileNotFoundException, IOException, ConfigurationException {
    Properties initProps = new Properties();
    initProps.put("app.server.type", "tomcat");
    initProps.put("app.server.tomcat.dir", getLiferayRuntimeDir().toPortableString());
    initProps.put("app.server.tomcat.deploy.dir", getLiferayRuntimeDir().append("webapps").toPortableString());
    initProps.put("app.server.tomcat.lib.global.dir",
            getLiferayRuntimeDir().append("lib/ext").toPortableString());
    initProps.put("app.server.parent.dir", getLiferayRuntimeDir().removeLastSegments(1).toPortableString());
    initProps.put("app.server.tomcat.portal.dir",
            getLiferayRuntimeDir().append("webapps/ROOT").toPortableString());

    IPath loc = getLiferayPluginsSdkDir();
    String userName = System.getProperty("user.name"); //$NON-NLS-1$
    File userBuildFile = loc.append("build." + userName + ".properties").toFile(); //$NON-NLS-1$ //$NON-NLS-2$

    try (FileOutputStream fileOutput = new FileOutputStream(userBuildFile)) {
        if (userBuildFile.exists()) {
            PropertiesConfiguration propsConfig = new PropertiesConfiguration(userBuildFile);
            for (Object key : initProps.keySet()) {
                propsConfig.setProperty((String) key, initProps.get(key));
            }// w w w  .  j a va2s .  co m
            propsConfig.setHeader("");
            propsConfig.save(fileOutput);

        } else {
            Properties props = new Properties();
            props.putAll(initProps);
            props.store(fileOutput, StringPool.EMPTY);
        }
    }
}