Example usage for java.util Properties remove

List of usage examples for java.util Properties remove

Introduction

In this page you can find the example usage for java.util Properties remove.

Prototype

@Override
    public synchronized Object remove(Object key) 

Source Link

Usage

From source file:org.echocat.jomon.resources.FileResource.java

@Override
public void removeProperty(@Nonnull String name) throws IOException {
    synchronized (this) {
        final Properties properties = getPropertiesInternal();
        properties.remove(name);
        saveProperties(properties);//from ww  w  .j a  v  a 2 s  .c  o  m
    }
}

From source file:org.ops4j.pax.jdbc.pool.dbcp2.impl.DbcpPooledDataSourceFactory.java

protected Properties getNonPoolProps(Properties props) {
    Properties dsProps = new Properties();
    for (Object keyO : props.keySet()) {
        String key = (String) keyO;
        if (!key.startsWith(POOL_PREFIX) && !key.startsWith(FACTORY_PREFIX)) {
            dsProps.put(key, props.get(key));
        }/* w w  w .j a v  a 2  s. c  o  m*/
    }
    dsProps.remove(DataSourceFactory.JDBC_DATASOURCE_NAME);
    return dsProps;
}

From source file:org.wso2.carbon.identity.notification.mgt.json.bean.JsonSubscription.java

/**
 * Sets authentication information to JsonEndpointInfo object
 *
 * @param prefix             json.subscribe.eventName.endpoint.endpointName
 * @param endpointProperties Set of properties which has keys with above prefix
 * @param endpointInfo       JsonEndpointInfo object which needs to be set the authentication
 *                           params//from  w  w  w.  j  a va2 s . com
 * @return A JsonEndpointInfo object which has set relevant authentication information if
 * authentication is required
 * @throws NotificationManagementException
 */
public JsonEndpointInfo setAuthenticationInfo(String prefix, Properties endpointProperties,
        JsonEndpointInfo endpointInfo) throws NotificationManagementException {

    String authenticationRequired = (String) endpointProperties
            .remove(prefix + "." + JsonModuleConstants.Config.AUTH_REQUIRED_QNAME);

    // If authentication required
    if (Boolean.parseBoolean(authenticationRequired)) {
        endpointInfo.setAuthenticationRequired(true);
        String username = (String) endpointProperties
                .remove(prefix + "." + JsonModuleConstants.Config.USERNAME_QNAME);
        String password = (String) endpointProperties
                .remove(prefix + "." + JsonModuleConstants.Config.PASSWORD_QNAME);

        if (StringUtils.isEmpty(username) || StringUtils.isEmpty(password)) {
            throw new NotificationManagementException(
                    "No authentication information  found for authentication " + "required endpoint");
        }
        endpointInfo.setUsername(username);
        endpointInfo.setPassword(password.trim().toCharArray());
    } else {
        endpointInfo.setAuthenticationRequired(false);
    }
    return endpointInfo;
}

From source file:org.wso2.carbon.identity.notification.mgt.email.bean.EmailSubscription.java

/**
 * Build EmailEndpointInfo from the properties relevant to endpoints
 *
 * @param prefix             Prefix of the endpoint. Eg email.subscription.subscriptionName.endpoint
 * @param endpointProperties Properties which are specific to endpoint. eg email.subscription
 *                           .subscriptionName.endpoint.property
 * @return EmailEndpointInfo object which consists of email endpoint information
 * @throws NotificationManagementException
 *///from  ww w  .  jav a 2 s  . c o  m
private EmailEndpointInfo buildEndpoint(String prefix, Properties endpointProperties)
        throws NotificationManagementException {

    EmailEndpointInfo emailEndpointInfo = new EmailEndpointInfo();
    String emailAddress = (String) endpointProperties
            .remove(prefix + "." + EmailModuleConstants.Config.ADDRESS_QNAME);
    // If no configured email address is found, check in event properties for an email address.
    if (StringUtils.isEmpty(emailAddress)) {
        emailAddress = (String) endpointProperties.remove(EmailModuleConstants.Config.ADDRESS_QNAME);
    }
    // If there is no configured email address, stop building endpoint, throw an exception
    if (StringUtils.isNotEmpty(emailAddress)) {
        emailAddress = emailAddress.trim();
        if (log.isDebugEnabled()) {
            log.debug("Registering email endpoint with address " + emailAddress);
        }
        emailEndpointInfo.setEmailAddress(emailAddress);
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Registering email endpoint with prefix " + prefix
                    + " without address. Expecting the email " + "address at event time");
        }
    }

    String template = (String) endpointProperties
            .remove(prefix + "." + EmailModuleConstants.Config.MAIL_TEMPLATE_QNAME);
    if (template != null) {
        //Set the email template configured for endpoint.
        emailEndpointInfo.setTemplate(NotificationManagementUtils.readMessageTemplate(template.trim()));
    } else {
        // If template is null, event level template will be used
        if (log.isDebugEnabled()) {
            log.debug("No template configured for endpoint" + emailAddress);
        }
    }
    // Set endpoint specific properties
    emailEndpointInfo.setProperties(
            NotificationManagementUtils.buildSingleWordKeyProperties(prefix, endpointProperties));
    return emailEndpointInfo;
}

From source file:org.pentaho.metadata.util.LocalizationUtil.java

/**
 * This method returns a list of missing and extra keys specified in a properties bundle
 * /*  w w w .  java  2  s  .co  m*/
 * @param domain
 *          the domain object to analyze
 * @param props
 *          the imported properties to analyze
 * @param locale
 *          the locale to analyze
 * @return messages
 */
public List<String> analyzeImport(Domain domain, Properties props, String locale) {
    ArrayList<String> messages = new ArrayList<String>();

    // determine missing strings
    Properties origProps = exportLocalizedProperties(domain, locale);
    Properties cloneOrig = (Properties) origProps.clone();
    for (Object key : origProps.keySet()) {
        if (props.containsKey(key)) {
            cloneOrig.remove(key);
        }
    }

    // anything left in cloneOrig was missing
    for (Object key : cloneOrig.keySet()) {
        messages.add(Messages.getString("LocalizationUtil.MISSING_KEY_MESSAGE", key));
    }

    // determine extra strings
    Properties cloneProps = (Properties) props.clone();

    for (Object key : props.keySet()) {
        if (origProps.containsKey(key)) {
            cloneProps.remove(key);
        }
    }

    // anything left in cloneProps was extra
    for (Object key : cloneProps.keySet()) {
        messages.add(Messages.getString("LocalizationUtil.EXTRA_KEY_MESSAGE", key));
    }

    return messages;
}

From source file:org.jumpmind.symmetric.util.SnapshotUtil.java

public static File createSnapshot(ISymmetricEngine engine) {

    String dirName = engine.getEngineName().replaceAll(" ", "-") + "-"
            + new SimpleDateFormat("yyyyMMddHHmmss").format(new Date());

    IParameterService parameterService = engine.getParameterService();
    File tmpDir = new File(parameterService.getTempDirectory(), dirName);
    tmpDir.mkdirs();/*from  w ww.j a va 2s  .  c  o m*/

    File logDir = null;

    String parameterizedLogDir = parameterService.getString("server.log.dir");
    if (isNotBlank(parameterizedLogDir)) {
        logDir = new File(parameterizedLogDir);
    }

    if (logDir != null && logDir.exists()) {
        log.info("Using server.log.dir setting as the location of the log files");
    } else {
        logDir = new File("logs");

        if (!logDir.exists()) {
            File file = findSymmetricLogFile();
            if (file != null) {
                logDir = file.getParentFile();
            }
        }

        if (!logDir.exists()) {
            logDir = new File("../logs");
        }

        if (!logDir.exists()) {
            logDir = new File("target");
        }

        if (logDir.exists()) {
            File[] files = logDir.listFiles();
            if (files != null) {
                for (File file : files) {
                    if (file.getName().toLowerCase().endsWith(".log")) {
                        try {
                            FileUtils.copyFileToDirectory(file, tmpDir);
                        } catch (IOException e) {
                            log.warn("Failed to copy " + file.getName() + " to the snapshot directory", e);
                        }
                    }
                }
            }
        }

    }

    ITriggerRouterService triggerRouterService = engine.getTriggerRouterService();
    List<TriggerHistory> triggerHistories = triggerRouterService.getActiveTriggerHistories();
    TreeSet<Table> tables = new TreeSet<Table>();
    for (TriggerHistory triggerHistory : triggerHistories) {
        Table table = engine.getDatabasePlatform().getTableFromCache(triggerHistory.getSourceCatalogName(),
                triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
        if (table != null && !table.getName().toUpperCase()
                .startsWith(engine.getSymmetricDialect().getTablePrefix().toUpperCase())) {
            tables.add(table);
        }
    }

    List<Trigger> triggers = triggerRouterService.getTriggers(true);
    for (Trigger trigger : triggers) {
        Table table = engine.getDatabasePlatform().getTableFromCache(trigger.getSourceCatalogName(),
                trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
        if (table != null) {
            tables.add(table);
        }
    }

    FileWriter fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "config-export.csv"));
        engine.getDataExtractorService().extractConfigurationStandalone(engine.getNodeService().findIdentity(),
                fwriter, TableConstants.SYM_NODE, TableConstants.SYM_NODE_SECURITY,
                TableConstants.SYM_NODE_IDENTITY, TableConstants.SYM_NODE_HOST,
                TableConstants.SYM_NODE_CHANNEL_CTL, TableConstants.SYM_CONSOLE_USER);
    } catch (IOException e) {
        log.warn("Failed to export symmetric configuration", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }

    FileOutputStream fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "table-definitions.xml"));
        DbExport export = new DbExport(engine.getDatabasePlatform());
        export.setFormat(Format.XML);
        export.setNoData(true);
        export.exportTables(fos, tables.toArray(new Table[tables.size()]));
    } catch (IOException e) {
        log.warn("Failed to export table definitions", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    String tablePrefix = engine.getTablePrefix();

    DbExport export = new DbExport(engine.getDatabasePlatform());
    export.setFormat(Format.CSV);
    export.setNoCreateInfo(true);

    extract(export, new File(tmpDir, "identity.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_IDENTITY));

    extract(export, new File(tmpDir, "node.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE));

    extract(export, new File(tmpDir, "nodesecurity.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_SECURITY));

    extract(export, new File(tmpDir, "nodehost.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_HOST));

    extract(export, new File(tmpDir, "triggerhist.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_TRIGGER_HIST));

    extract(export, new File(tmpDir, "lock.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_LOCK));

    extract(export, new File(tmpDir, "nodecommunication.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_COMMUNICATION));

    extract(export, 5000, new File(tmpDir, "outgoingbatch.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_OUTGOING_BATCH));

    extract(export, 5000, new File(tmpDir, "incomingbatch.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_INCOMING_BATCH));

    final int THREAD_INDENT_SPACE = 50;
    fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "threads.txt"));
        ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
        long[] threadIds = threadBean.getAllThreadIds();
        for (long l : threadIds) {
            ThreadInfo info = threadBean.getThreadInfo(l, 100);
            if (info != null) {
                String threadName = info.getThreadName();
                fwriter.append(StringUtils.rightPad(threadName, THREAD_INDENT_SPACE));
                StackTraceElement[] trace = info.getStackTrace();
                boolean first = true;
                for (StackTraceElement stackTraceElement : trace) {
                    if (!first) {
                        fwriter.append(StringUtils.rightPad("", THREAD_INDENT_SPACE));
                    } else {
                        first = false;
                    }
                    fwriter.append(stackTraceElement.getClassName());
                    fwriter.append(".");
                    fwriter.append(stackTraceElement.getMethodName());
                    fwriter.append("()");
                    int lineNumber = stackTraceElement.getLineNumber();
                    if (lineNumber > 0) {
                        fwriter.append(": ");
                        fwriter.append(Integer.toString(stackTraceElement.getLineNumber()));
                    }
                    fwriter.append("\n");
                }
                fwriter.append("\n");
            }
        }
    } catch (IOException e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters.properties"));
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        SortedProperties parameters = new SortedProperties();
        parameters.putAll(effectiveParameters);
        parameters.remove("db.password");
        parameters.store(fos, "parameters.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameter information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters-changed.properties"));
        Properties defaultParameters = new Properties();
        InputStream in = SnapshotUtil.class.getResourceAsStream("/symmetric-default.properties");
        defaultParameters.load(in);
        IOUtils.closeQuietly(in);
        in = SnapshotUtil.class.getResourceAsStream("/symmetric-console-default.properties");
        if (in != null) {
            defaultParameters.load(in);
            IOUtils.closeQuietly(in);
        }
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        Properties changedParameters = new SortedProperties();
        Map<String, ParameterMetaData> parameters = ParameterConstants.getParameterMetaData();
        for (String key : parameters.keySet()) {
            String defaultValue = defaultParameters.getProperty((String) key);
            String currentValue = effectiveParameters.getProperty((String) key);
            if (defaultValue == null && currentValue != null
                    || (defaultValue != null && !defaultValue.equals(currentValue))) {
                changedParameters.put(key, currentValue == null ? "" : currentValue);
            }
        }
        changedParameters.remove("db.password");
        changedParameters.store(fos, "parameters-changed.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameters-changed information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    writeRuntimeStats(engine, tmpDir);
    writeJobsStats(engine, tmpDir);

    if ("true".equals(System.getProperty(SystemConstants.SYSPROP_STANDALONE_WEB))) {
        writeDirectoryListing(engine, tmpDir);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "system.properties"));
        SortedProperties props = new SortedProperties();
        props.putAll(System.getProperties());
        props.store(fos, "system.properties");
    } catch (IOException e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    try {
        File jarFile = new File(getSnapshotDirectory(engine), tmpDir.getName() + ".zip");
        JarBuilder builder = new JarBuilder(tmpDir, jarFile, new File[] { tmpDir }, Version.version());
        builder.build();
        FileUtils.deleteDirectory(tmpDir);
        return jarFile;
    } catch (IOException e) {
        throw new IoException("Failed to package snapshot files into archive", e);
    }
}

From source file:org.ops4j.pax.jdbc.pool.dbcp2.impl.ds.PooledDataSourceFactory.java

private Properties getNonPoolProps(Properties props) {
    Properties dsProps = new Properties();
    for (Object keyO : props.keySet()) {
        String key = (String) keyO;
        if (!key.startsWith(POOL_PREFIX)) {
            dsProps.put(key, props.get(key));
        }//from w  w  w.  ja va 2  s.c om
    }
    dsProps.remove(DataSourceFactory.JDBC_DATASOURCE_NAME);
    return dsProps;
}

From source file:net.sf.jabb.util.db.impl.OracleCachedDataSourceProvider.java

public DataSource createDataSource(String source, String config) {
    DataSource ds = null;/* w  ww  .j a  va2s . c o m*/
    Properties props;
    try {
        props = propLoader.load(config);
        OracleDataSource ods = new OracleDataSource();

        // see http://download.oracle.com/docs/cd/B28359_01/java.111/b31224/urls.htm
        ods.setDriverType(props.getProperty("_driverType"));
        props.remove("_driverType");
        ods.setNetworkProtocol(props.getProperty("_networkProtocol"));
        props.remove("_networkProtocol");
        ods.setPortNumber(Integer.parseInt(props.getProperty("_portNumber")));
        props.remove("_portNumber");
        ods.setServerName(props.getProperty("_serverName"));
        props.remove("_serverName");
        ods.setDatabaseName(props.getProperty("_databaseName"));
        props.remove("_databaseName");

        ods.setUser(props.getProperty("user"));
        props.remove("user");
        ods.setPassword(props.getProperty("password"));
        props.remove("password");

        ods.setConnectionProperties(props);
        // see http://download.oracle.com/docs/cd/B14117_01/java.101/b10979/conncache.htm
        ods.setConnectionCachingEnabled(true);
        ods.setConnectionCacheName(source);

        ds = ods;
    } catch (InvalidPropertiesFormatException e) {
        log.error(
                "Wrong configuration properties file format for '" + source + "' with configuration: " + config,
                e);
    } catch (IOException e) {
        log.error("Error loading configuration file for '" + source + "' with configuration: " + config, e);
    } catch (SQLException e) {
        log.error("Error creating Oracle cached data source for '" + source + "' with configuration: " + config,
                e);
    } catch (Exception e) {
        log.error("Error creating data source for '" + source + "' with configuration: " + config, e);
    }

    return ds;
}

From source file:org.ebayopensource.turmeric.eclipse.utils.test.io.TestPropertiesFileUtil.java

/**
 * Test method for {@link org.ebayopensource.turmeric.eclipse.utils.io.PropertiesFileUtil#isEqual(java.util.Properties, java.util.Properties)}.
 *///from  ww  w. j a  v a2 s .co m
@Test
public void testIsEqual() {
    Map<String, String> data = new ConcurrentHashMap<String, String>();
    data.put("nikon", "D3x");
    data.put("canon", "1D Mark III");
    data.put("pentax", "K7");

    Properties props1 = new Properties();
    props1.putAll(data);

    Properties props2 = new Properties();
    props2.putAll(new ConcurrentHashMap<String, String>(data));
    Assert.assertTrue(PropertiesFileUtil.isEqual(props1, props2));

    props2.put("sony", "a900");
    Assert.assertFalse(PropertiesFileUtil.isEqual(props1, props2));

    props2.remove("sony");
    props2.put("canon", "sucks");
    System.out.println(props1);
    System.out.println(props2);
    Assert.assertFalse(PropertiesFileUtil.isEqual(props1, props2));

}

From source file:org.elasticsearch.hadoop.integration.hive.HiveEmbeddedServer2.java

private HiveConf configure() throws Exception {
    String scratchDir = NTFSLocalFileSystem.SCRATCH_DIR;

    File scratchDirFile = new File(scratchDir);
    TestUtils.delete(scratchDirFile);// w  ww  . j a  v a 2 s . com

    Configuration cfg = new Configuration();
    HiveConf conf = new HiveConf(cfg, HiveConf.class);
    conf.addToRestrictList("columns.comments");
    refreshConfig(conf);

    HdpBootstrap.hackHadoopStagingOnWin();

    // work-around for NTFS FS
    // set permissive permissions since otherwise, on some OS it fails
    if (TestUtils.isWindows()) {
        conf.set("fs.file.impl", NTFSLocalFileSystem.class.getName());
        conf.set("hive.scratch.dir.permission", "650");
        conf.setVar(ConfVars.SCRATCHDIRPERMISSION, "650");
        conf.set("hive.server2.enable.doAs", "false");
        conf.set("hive.execution.engine", "mr");
        //conf.set("hadoop.bin.path", getClass().getClassLoader().getResource("hadoop.cmd").getPath());
        System.setProperty("path.separator", ";");
        conf.setVar(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
                DummyHiveAuthenticationProvider.class.getName());
    } else {
        conf.set("hive.scratch.dir.permission", "777");
        conf.setVar(ConfVars.SCRATCHDIRPERMISSION, "777");
        scratchDirFile.mkdirs();
        // also set the permissions manually since Hive doesn't do it...
        scratchDirFile.setWritable(true, false);
    }

    int random = new Random().nextInt();

    conf.set("hive.metastore.warehouse.dir", scratchDir + "/warehouse" + random);
    conf.set("hive.metastore.metadb.dir", scratchDir + "/metastore_db" + random);
    conf.set("hive.exec.scratchdir", scratchDir);
    conf.set("fs.permissions.umask-mode", "022");
    conf.set("javax.jdo.option.ConnectionURL",
            "jdbc:derby:;databaseName=" + scratchDir + "/metastore_db" + random + ";create=true");
    conf.set("hive.metastore.local", "true");
    conf.set("hive.aux.jars.path", "");
    conf.set("hive.added.jars.path", "");
    conf.set("hive.added.files.path", "");
    conf.set("hive.added.archives.path", "");
    conf.set("fs.default.name", "file:///");

    // clear mapred.job.tracker - Hadoop defaults to 'local' if not defined. Hive however expects this to be set to 'local' - if it's not, it does a remote execution (i.e. no child JVM)
    Field field = Configuration.class.getDeclaredField("properties");
    field.setAccessible(true);
    Properties props = (Properties) field.get(conf);
    props.remove("mapred.job.tracker");
    props.remove("mapreduce.framework.name");
    props.setProperty("fs.default.name", "file:///");

    // intercept SessionState to clean the threadlocal
    Field tss = SessionState.class.getDeclaredField("tss");
    tss.setAccessible(true);
    //tss.set(null, new InterceptingThreadLocal());

    return new HiveConf(conf);
}