Example usage for java.util Properties remove

List of usage examples for java.util Properties remove

Introduction

In this page you can find the example usage for java.util Properties remove.

Prototype

@Override
    public synchronized Object remove(Object key) 

Source Link

Usage

From source file:org.freeplane.main.application.UserPropertiesUpdater.java

private void importOldPreferences(final File userPreferencesFile, final File oldUserPreferencesFile) {
    try {/*from  ww w  . j  a  va2 s. com*/
        Properties userProp = loadProperties(userPreferencesFile);
        userProp.remove("lastOpened_1.0.20");
        userProp.remove("openedNow_1.0.20");
        userProp.remove("browse_url_storage");
        fixFontSize(userProp, "defaultfontsize");
        fixFontSize(userProp, "label_font_size");
        saveProperties(userProp, userPreferencesFile);
    } catch (IOException e) {
    }
}

From source file:org.freeplane.main.application.UserPropertiesUpdater.java

private void fixFontSize(Properties userProp, String name) {
    final Object defaultFontSizeObj = userProp.remove(name);
    if (defaultFontSizeObj == null)
        return;//from www .  j  a  v a  2 s .  c o  m
    try {
        int oldDefaultFontSize = Integer.parseInt(defaultFontSizeObj.toString());
        int newDefaultFontSize = Math.round(oldDefaultFontSize / UITools.FONT_SCALE_FACTOR);
        userProp.put(name, Integer.toString(newDefaultFontSize));
    } catch (NumberFormatException e) {
    }
}

From source file:org.sonar.batch.scan.DefaultProjectBootstrapper.java

@VisibleForTesting
protected static void cleanAndCheckAggregatorProjectProperties(ProjectDefinition project) {
    Properties properties = project.getProperties();

    // SONARPLUGINS-2295
    String[] sourceDirs = getListFromProperty(properties, PROPERTY_SOURCES);
    for (String path : sourceDirs) {
        File sourceFolder = getFileFromPath(path, project.getBaseDir());
        if (sourceFolder.isDirectory()) {
            LOG.warn(/*from  ww  w. j  a  v a2 s. c o  m*/
                    "/!\\ A multi-module project can't have source folders, so '{}' won't be used for the analysis. "
                            + "If you want to analyse files of this folder, you should create another sub-module and move them inside it.",
                    sourceFolder.toString());
        }
    }

    // "aggregator" project must not have the following properties:
    properties.remove(PROPERTY_SOURCES);
    properties.remove(PROPERTY_TESTS);
    properties.remove(PROPERTY_BINARIES);
    properties.remove(PROPERTY_LIBRARIES);

    // and they don't need properties related to their modules either
    Properties clone = (Properties) properties.clone();
    List<String> moduleIds = Lists.newArrayList(getListFromProperty(properties, PROPERTY_MODULES));
    for (Entry<Object, Object> entry : clone.entrySet()) {
        String key = (String) entry.getKey();
        if (isKeyPrefixedByModuleId(key, moduleIds)) {
            properties.remove(key);
        }
    }
}

From source file:com.pivotal.gemfire.tools.pulse.internal.data.DataBrowser.java

/**
 * deleteQueryById method deletes query from query history file
 * //from w  ww.j av a  2s  .  c  o m
 * @param queryId
 *          Unique Id of Query to be deleted
 * @return boolean
 */
public boolean deleteQueryById(String queryId) {

    boolean operationStatus = false;

    if (StringUtils.isNotNullNotEmptyNotWhiteSpace(queryId)) {
        // Fetch all queries from query log file
        Properties properties = fetchAllQueriesFromFile();

        // Remove query in properties
        properties.remove(queryId);

        // Store queries in file
        operationStatus = storeQueriesInFile(properties);
    }

    return operationStatus;
}

From source file:de.micromata.genome.logging.spi.FileLogConfigurationDAOImpl.java

@Override
public void setLogLevel(LogLevel logLevel, String patternString) {
    Properties p = loadProperties();
    if (p == null) {
        p = new Properties();
    }//from  ww w .  j  a v  a2  s.  co  m
    if (logLevel == null) {
        p.remove(patternString);
    } else {
        p.setProperty(patternString, logLevel.toString());
    }
    storeProperties(p);
    buildPattern();
}

From source file:org.kie.workbench.common.services.backend.compiler.impl.external339.ReusableAFMavenCli.java

private static void purgeBannedProperties(Properties toPurge, Properties itemsToRemove) {
    for (Object key : itemsToRemove.keySet()) {
        toPurge.remove(key);
    }/* ww w  .  j  av a 2s  .com*/
}

From source file:com.norconex.commons.lang.map.PropertiesTest.java

@Test
public void testRemoveCaseInsensitiveMultiple() throws Exception {
    Properties properties = new Properties(true);
    List<String> list1 = asList("a", "b", "c");
    List<String> list2 = asList("d", "e", "f");
    properties.put("Key", list1);
    properties.put("KEy", list2);
    assertEquals(ListUtils.sum(list1, list2), properties.remove("key"));
}

From source file:org.elasticsearch.hadoop.integration.hive.HiveEmbeddedServer.java

private HiveConf configure() throws Exception {
    String scratchDir = NTFSLocalFileSystem.SCRATCH_DIR;

    File scratchDirFile = new File(scratchDir);
    TestUtils.delete(scratchDirFile);/*from   w ww .jav a  2  s .c  o  m*/

    Configuration cfg = new Configuration();
    HiveConf conf = new HiveConf(cfg, HiveConf.class);
    conf.addToRestrictList("columns.comments");
    refreshConfig(conf);

    HdpBootstrap.hackHadoopStagingOnWin();

    // work-around for NTFS FS
    // set permissive permissions since otherwise, on some OS it fails
    if (TestUtils.isWindows()) {
        conf.set("fs.file.impl", NTFSLocalFileSystem.class.getName());
        conf.set("hive.scratch.dir.permission", "650");
        conf.setVar(ConfVars.SCRATCHDIRPERMISSION, "650");
        conf.set("hive.server2.enable.doAs", "false");
        conf.set("hive.execution.engine", "mr");
        //conf.set("hadoop.bin.path", getClass().getClassLoader().getResource("hadoop.cmd").getPath());
        System.setProperty("path.separator", ";");
        conf.setVar(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
                DummyHiveAuthenticationProvider.class.getName());
    } else {
        conf.set("hive.scratch.dir.permission", "777");
        conf.setVar(ConfVars.SCRATCHDIRPERMISSION, "777");
        scratchDirFile.mkdirs();
        // also set the permissions manually since Hive doesn't do it...
        scratchDirFile.setWritable(true, false);
    }

    int random = new Random().nextInt();

    conf.set("hive.metastore.warehouse.dir", scratchDir + "/warehouse" + random);
    conf.set("hive.metastore.metadb.dir", scratchDir + "/metastore_db" + random);
    conf.set("hive.exec.scratchdir", scratchDir);
    conf.set("fs.permissions.umask-mode", "022");
    conf.set("javax.jdo.option.ConnectionURL",
            "jdbc:derby:;databaseName=" + scratchDir + "/metastore_db" + random + ";create=true");
    conf.set("hive.metastore.local", "true");
    conf.set("hive.aux.jars.path", "");
    conf.set("hive.added.jars.path", "");
    conf.set("hive.added.files.path", "");
    conf.set("hive.added.archives.path", "");
    conf.set("fs.default.name", "file:///");

    // clear mapred.job.tracker - Hadoop defaults to 'local' if not defined. Hive however expects this to be set to 'local' - if it's not, it does a remote execution (i.e. no child JVM)
    Field field = Configuration.class.getDeclaredField("properties");
    field.setAccessible(true);
    Properties props = (Properties) field.get(conf);
    props.remove("mapred.job.tracker");
    props.remove("mapreduce.framework.name");
    props.setProperty("fs.default.name", "file:///");

    // intercept SessionState to clean the threadlocal
    Field tss = SessionState.class.getDeclaredField("tss");
    tss.setAccessible(true);
    tss.set(null, new InterceptingThreadLocal());

    return new HiveConf(conf);
}

From source file:org.xwiki.extension.internal.converter.ExtensionConverter.java

private String getProperty(Properties properties, String propertyName, boolean delete) {
    return delete ? (String) properties.remove(MavenUtils.MPKEYPREFIX + propertyName)
            : properties.getProperty(MavenUtils.MPKEYPREFIX + propertyName);
}

From source file:io.warp10.worf.WorfTemplate.java

public WorfTemplate(Properties config, String templateFilePath) throws WorfException {
    try {//from ww w. ja  v  a 2  s . com
        this.config = config;

        if (isTemplate(config)) {
            config.remove("worf.template");
        }

        // load template file
        Path templatePath = Paths.get(templateFilePath);
        Charset charset = StandardCharsets.UTF_8;

        content = new String(Files.readAllBytes(templatePath), charset);
    } catch (Exception exp) {
        throw new WorfException("Unable to load template cause=" + exp.getMessage());
    }
}