Example usage for java.util Properties equals

List of usage examples for java.util Properties equals

Introduction

In this page you can find the example usage for java.util Properties equals.

Prototype

@Override
    public synchronized boolean equals(Object o) 

Source Link

Usage

From source file:com.alibaba.rocketmq.common.MixAll.java

public static boolean isPropertiesEqual(final Properties p1, final Properties p2) {
    return p1.equals(p2);
}

From source file:ws.salient.session.Session.java

public boolean hasChanged(KnowledgeBase knowledgeBase, Properties properties) {
    return !(knowledgeBase.equals(this.knowledgeBase) && properties.equals(this.properties));
}

From source file:gobblin.aws.AWSJobConfigurationManager.java

private void fetchJobConf() throws IOException, ConfigurationException {
    // Refresh job config pull details from config
    fetchJobConfSettings();/*from w w w.jav  a 2s  .co m*/

    // TODO: Eventually when config store supports job files as well
    // .. we can replace this logic with config store
    if (this.jobConfS3Uri.isPresent() && this.jobConfDirPath.isPresent()) {

        // Download the zip file
        final String zipFile = appendSlash(this.jobConfDirPath.get())
                + StringUtils.substringAfterLast(this.jobConfS3Uri.get(), File.separator);
        LOGGER.debug("Downloading to zip: " + zipFile + " from uri: " + this.jobConfS3Uri.get());

        FileUtils.copyURLToFile(new URL(this.jobConfS3Uri.get()), new File(zipFile));
        final String extractedPullFilesPath = appendSlash(this.jobConfDirPath.get()) + "files";

        // Extract the zip file
        LOGGER.debug("Extracting to directory: " + extractedPullFilesPath + " from zip: " + zipFile);
        unzipArchive(zipFile, new File(extractedPullFilesPath));

        // Load all new job configurations
        // TODO: Currently new and updated jobs are handled, we should un-schedule deleted jobs as well
        final File jobConfigDir = new File(extractedPullFilesPath);
        if (jobConfigDir.exists()) {
            LOGGER.info("Loading job configurations from " + jobConfigDir);
            final Properties properties = new Properties();
            properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY,
                    jobConfigDir.getAbsolutePath());

            final List<Properties> jobConfigs = SchedulerUtils.loadGenericJobConfigs(properties);
            LOGGER.info("Loaded " + jobConfigs.size() + " job configuration(s)");
            for (Properties config : jobConfigs) {
                LOGGER.debug("Config value: " + config);

                // If new config or existing config got updated, then post new job config arrival event
                final String jobConfigPathIdentifier = config
                        .getProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY);
                if (!jobConfFiles.containsKey(jobConfigPathIdentifier)) {
                    jobConfFiles.put(jobConfigPathIdentifier, config);

                    postNewJobConfigArrival(config.getProperty(ConfigurationKeys.JOB_NAME_KEY), config);
                    LOGGER.info("New config arrived for job: " + jobConfigPathIdentifier);
                } else if (!config.equals(jobConfFiles.get(jobConfigPathIdentifier))) {
                    jobConfFiles.put(jobConfigPathIdentifier, config);

                    postNewJobConfigArrival(config.getProperty(ConfigurationKeys.JOB_NAME_KEY), config);
                    LOGGER.info("Config updated for job: " + jobConfigPathIdentifier);
                } else {
                    LOGGER.info("Config not changed for job: " + jobConfigPathIdentifier);
                }
            }
        } else {
            LOGGER.warn("Job configuration directory " + jobConfigDir + " not found");
        }
    }
}

From source file:pl.project13.maven.git.GitCommitIdMojo.java

void maybeGeneratePropertiesFile(@NotNull Properties localProperties, File base, String propertiesFilename)
        throws GitCommitIdExecutionException {
    try {//from   w ww  . ja va 2 s. c  o m
        final File gitPropsFile = craftPropertiesOutputFile(base, propertiesFilename);
        final boolean isJsonFormat = "json".equalsIgnoreCase(format);

        boolean shouldGenerate = true;

        if (gitPropsFile.exists()) {
            final Properties persistedProperties;

            try {
                if (isJsonFormat) {
                    log.info("Reading existing json file [{}] (for module {})...",
                            gitPropsFile.getAbsolutePath(), project.getName());

                    persistedProperties = readJsonProperties(gitPropsFile);
                } else {
                    log.info("Reading existing properties file [{}] (for module {})...",
                            gitPropsFile.getAbsolutePath(), project.getName());

                    persistedProperties = readProperties(gitPropsFile);
                }

                final Properties propertiesCopy = (Properties) localProperties.clone();

                final String buildTimeProperty = prefixDot + BUILD_TIME;

                propertiesCopy.remove(buildTimeProperty);
                persistedProperties.remove(buildTimeProperty);

                shouldGenerate = !propertiesCopy.equals(persistedProperties);
            } catch (CannotReadFileException ex) {
                // Read has failed, regenerate file
                log.info("Cannot read properties file [{}] (for module {})...", gitPropsFile.getAbsolutePath(),
                        project.getName());
                shouldGenerate = true;
            }
        }

        if (shouldGenerate) {
            Files.createParentDirs(gitPropsFile);
            Writer outputWriter = null;
            boolean threw = true;

            try {
                outputWriter = new OutputStreamWriter(new FileOutputStream(gitPropsFile), sourceCharset);
                if (isJsonFormat) {
                    log.info("Writing json file to [{}] (for module {})...", gitPropsFile.getAbsolutePath(),
                            project.getName());
                    ObjectMapper mapper = new ObjectMapper();
                    mapper.writerWithDefaultPrettyPrinter().writeValue(outputWriter, localProperties);
                } else {
                    log.info("Writing properties file to [{}] (for module {})...",
                            gitPropsFile.getAbsolutePath(), project.getName());
                    localProperties.store(outputWriter, "Generated by Git-Commit-Id-Plugin");
                }
                threw = false;
            } catch (final IOException ex) {
                throw new RuntimeException("Cannot create custom git properties file: " + gitPropsFile, ex);
            } finally {
                Closeables.close(outputWriter, threw);
            }
        } else {
            log.info("Properties file [{}] is up-to-date (for module {})...", gitPropsFile.getAbsolutePath(),
                    project.getName());
        }
    } catch (IOException e) {
        throw new GitCommitIdExecutionException(e);
    }
}

From source file:de.dal33t.powerfolder.Controller.java

/**
 * Saves the current config to disk//from  ww w .  ja  v a 2 s .c  om
 */
public synchronized void saveConfig() {
    if (!started) {
        return;
    }
    logFine("Saving config (" + getConfigName() + ".config)");

    Path file;
    Path tempFile;
    Path folderFile;
    Path tempFolderFile;
    Path backupFile;
    if (getConfigLocationBase() == null) {
        file = Paths.get(getConfigName() + ".config").toAbsolutePath();
        tempFile = Paths.get(getConfigName() + ".writing.config").toAbsolutePath();
        folderFile = Paths.get(getConfigName() + "-Folder.config").toAbsolutePath();
        tempFolderFile = Paths.get(getConfigName() + "-Folder.writing.config").toAbsolutePath();
        backupFile = Paths.get(getConfigName() + ".config.backup").toAbsolutePath();
    } else {
        file = getConfigLocationBase().resolve(getConfigName() + ".config");
        tempFile = getConfigLocationBase().resolve(getConfigName() + ".writing.config").toAbsolutePath();
        backupFile = getConfigLocationBase().resolve(getConfigName() + ".config.backup");
        folderFile = getConfigLocationBase().resolve(getConfigName() + "-Folder.config");
        tempFolderFile = getConfigLocationBase().resolve(getConfigName() + "-Folder.writing.config")
                .toAbsolutePath();
    }

    try {
        // Backup is done in #backupConfigAssets
        Files.deleteIfExists(backupFile);

        String distName = "PowerFolder";
        if (distribution != null && StringUtils.isNotBlank(distribution.getName())) {
            distName = distribution.getName();
        }

        Properties prev = new Properties();
        if (Files.exists(file)) {
            try (BufferedInputStream in = new BufferedInputStream(Files.newInputStream(file))) {
                prev.load(in);
            }
        }

        if (!prev.equals(config.getRegular())) {
            // Store config in misc base
            PropertiesUtil.saveConfig(tempFile, config.getRegular(),
                    distName + " config file (v" + PROGRAM_VERSION + ')');
            Files.deleteIfExists(file);
            try {
                Files.move(tempFile, file);
            } catch (IOException e) {
                Files.copy(tempFile, file);
                Files.delete(tempFile);
            }
        } else {
            if (isFine()) {
                logFine("Not storing config to " + file + ". Base config remains unchanged");
            }
        }

        if (!config.getFolders().isEmpty()) {
            Properties prevFolders = new Properties();
            if (Files.exists(folderFile)) {
                try (BufferedInputStream in = new BufferedInputStream(Files.newInputStream(folderFile))) {
                    prevFolders.load(in);
                }
            }
            if (!prevFolders.equals(config.getFolders())) {
                PropertiesUtil.saveConfig(tempFolderFile, config.getFolders(),
                        distName + " folders config file (v" + PROGRAM_VERSION + ')');
                Files.deleteIfExists(folderFile);
                try {
                    Files.move(tempFolderFile, folderFile);
                } catch (IOException e) {
                    Files.copy(tempFolderFile, folderFile);
                    Files.delete(tempFolderFile);
                }
            }
        }
    } catch (IOException e) {
        // FATAL
        logSevere("Unable to save config. " + e, e);
        exit(1);
    } catch (Exception e) {
        // major problem , setting code is wrong
        e.printStackTrace();
        logSevere("major problem , setting code is wrong", e);
    }
}

From source file:com.tremolosecurity.provisioning.core.ProvisioningEngineImpl.java

@Override
public void initScheduler() throws ProvisioningException {
    if (this.cfgMgr.getCfg().getProvisioning() == null
            || this.cfgMgr.getCfg().getProvisioning().getScheduler() == null) {
        logger.warn("Scheduler not defined");
        return;//w  w w.j av  a2  s  .  c o m
    }

    SchedulingType sct = this.cfgMgr.getCfg().getProvisioning().getScheduler();

    Properties scheduleProps = new Properties();

    scheduleProps.setProperty("org.quartz.scheduler.instanceName", sct.getInstanceLabel());

    String instanceLabel = null;
    try {
        Enumeration<NetworkInterface> enumer = NetworkInterface.getNetworkInterfaces();
        while (enumer.hasMoreElements()) {
            NetworkInterface ni = enumer.nextElement();
            Enumeration<InetAddress> enumeri = ni.getInetAddresses();
            while (enumeri.hasMoreElements()) {
                InetAddress addr = enumeri.nextElement();
                if (addr.getHostAddress().startsWith(sct.getInstanceIPMask())) {
                    instanceLabel = addr.getHostAddress();
                }
            }
        }
    } catch (SocketException e) {
        throw new ProvisioningException("Could not read network addresses", e);
    }

    if (instanceLabel == null) {
        logger.warn("No IP starts with '" + sct.getInstanceIPMask() + "'");
        instanceLabel = "AUTO";
    }

    scheduleProps.setProperty("org.quartz.scheduler.instanceId", instanceLabel);
    scheduleProps.setProperty("org.quartz.threadPool.threadCount", Integer.toString(sct.getThreadCount()));

    if (sct.isUseDB()) {
        scheduleProps.setProperty("org.quartz.jobStore.class", "org.quartz.impl.jdbcjobstore.JobStoreTX");
        scheduleProps.setProperty("org.quartz.jobStore.driverDelegateClass",
                sct.getScheduleDB().getDelegateClassName());
        scheduleProps.setProperty("org.quartz.jobStore.dataSource", "scheduleDB");
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.driver", sct.getScheduleDB().getDriver());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.URL", sct.getScheduleDB().getUrl());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.user", sct.getScheduleDB().getUser());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.password",
                sct.getScheduleDB().getPassword());
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.maxConnections",
                Integer.toString(sct.getScheduleDB().getMaxConnections()));
        scheduleProps.setProperty("org.quartz.dataSource.scheduleDB.validationQuery",
                sct.getScheduleDB().getValidationQuery());
        scheduleProps.setProperty("org.quartz.jobStore.useProperties", "true");
        scheduleProps.setProperty("org.quartz.jobStore.isClustered", "true");
    } else {
        scheduleProps.setProperty("org.quartz.jobStore.class", "org.quartz.simpl.RAMJobStore");
    }

    try {

        /*String classpath = System.getProperty("java.class.path");
        String[] classpathEntries = classpath.split(File.pathSeparator);
        for (String cp : classpathEntries) {
           System.out.println(cp);
        }*/

        PrintStream out = new PrintStream(new FileOutputStream(
                System.getProperty(OpenUnisonConstants.UNISON_CONFIG_QUARTZDIR) + "/quartz.properties"));
        scheduleProps.store(out, "Unison internal scheduler properties");
        out.flush();
        out.close();
    } catch (IOException e) {
        throw new ProvisioningException("Could not write to quartz.properties", e);
    }

    try {
        this.scheduler = StdSchedulerFactory.getDefaultScheduler();
        this.scheduler.start();
        this.cfgMgr.addThread(new StopScheduler(this.scheduler));
        HashSet<String> jobKeys = new HashSet<String>();

        for (JobType jobType : sct.getJob()) {
            jobKeys.add(jobType.getName() + "-" + jobType.getGroup());
            JobKey jk = new JobKey(jobType.getName(), jobType.getGroup());
            JobDetail jd = this.scheduler.getJobDetail(jk);
            if (jd == null) {
                logger.info("Adding new job '" + jobType.getName() + "' / '" + jobType.getGroup() + "'");
                try {
                    addJob(jobType, jk);

                } catch (ClassNotFoundException e) {
                    throw new ProvisioningException("Could not initialize job", e);
                }

            } else {
                //check to see if we need to modify
                StringBuffer cron = new StringBuffer();
                cron.append(jobType.getCronSchedule().getSeconds()).append(' ')
                        .append(jobType.getCronSchedule().getMinutes()).append(' ')
                        .append(jobType.getCronSchedule().getHours()).append(' ')
                        .append(jobType.getCronSchedule().getDayOfMonth()).append(' ')
                        .append(jobType.getCronSchedule().getMonth()).append(' ')
                        .append(jobType.getCronSchedule().getDayOfWeek()).append(' ')
                        .append(jobType.getCronSchedule().getYear());

                Properties configProps = new Properties();
                for (ParamType pt : jobType.getParam()) {
                    configProps.setProperty(pt.getName(), pt.getValue());
                }

                Properties jobProps = new Properties();
                for (String key : jd.getJobDataMap().getKeys()) {
                    jobProps.setProperty(key, (String) jd.getJobDataMap().getString(key));
                }

                List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jd.getKey());
                CronTrigger trigger = (CronTrigger) triggers.get(0);

                if (!jobType.getClassName().equals(jd.getJobClass().getName())) {
                    logger.info("Reloading job '" + jobType.getName() + "' / '" + jobType.getGroup()
                            + "' - change in class name");
                    reloadJob(jobType, jd);
                } else if (!cron.toString().equalsIgnoreCase(trigger.getCronExpression())) {
                    logger.info("Reloading job '" + jobType.getName() + "' / '" + jobType.getGroup()
                            + "' - change in schedule");
                    reloadJob(jobType, jd);
                } else if (!configProps.equals(jobProps)) {
                    logger.info("Reloading job '" + jobType.getName() + "' / '" + jobType.getGroup()
                            + "' - change in properties");
                    reloadJob(jobType, jd);
                }
            }
        }

        for (String groupName : scheduler.getJobGroupNames()) {

            for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {

                String jobName = jobKey.getName();
                String jobGroup = jobKey.getGroup();

                //get job's trigger
                List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jobKey);

                if (!jobKeys.contains(jobName + "-" + jobGroup)) {
                    logger.info("Removing jab '" + jobName + "' / '" + jobGroup + "'");
                    scheduler.deleteJob(jobKey);
                }

            }

        }

    } catch (SchedulerException e) {
        throw new ProvisioningException("Could not initialize scheduler", e);
    } catch (ClassNotFoundException e) {
        throw new ProvisioningException("Could not initialize scheduler", e);
    }

}

From source file:org.apache.gobblin.aws.AWSJobConfigurationManager.java

private void fetchJobConf() throws IOException, ConfigurationException {
    // Refresh job config pull details from config
    fetchJobConfSettings();/*  w ww  .j a va 2s .  c  o  m*/

    // TODO: Eventually when config store supports job files as well
    // .. we can replace this logic with config store
    if (this.jobArchiveRetriever.isPresent() && this.jobConfDirPath.isPresent()) {
        // Download the zip file
        final String zipFile = this.jobArchiveRetriever.get().retrieve(this.config, this.jobConfDirPath.get());

        final String extractedPullFilesPath = appendSlash(this.jobConfDirPath.get()) + "files";

        // Extract the zip file
        LOGGER.debug("Extracting to directory: " + extractedPullFilesPath + " from zip: " + zipFile);
        unzipArchive(zipFile, new File(extractedPullFilesPath));

        // Load all new job configurations
        // TODO: Currently new and updated jobs are handled, we should un-schedule deleted jobs as well
        final File jobConfigDir = new File(extractedPullFilesPath);
        if (jobConfigDir.exists()) {
            LOGGER.info("Loading job configurations from " + jobConfigDir);
            final Properties properties = ConfigUtils.configToProperties(this.config);
            properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY,
                    jobConfigDir.getAbsolutePath());

            final List<Properties> jobConfigs = SchedulerUtils.loadGenericJobConfigs(properties);
            LOGGER.info("Loaded " + jobConfigs.size() + " job configuration(s)");
            for (Properties config : jobConfigs) {
                LOGGER.debug("Config value: " + config);

                // If new config or existing config got updated, then post new job config arrival event
                final String jobConfigPathIdentifier = config
                        .getProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY);
                if (!jobConfFiles.containsKey(jobConfigPathIdentifier)) {
                    jobConfFiles.put(jobConfigPathIdentifier, config);

                    postNewJobConfigArrival(config.getProperty(ConfigurationKeys.JOB_NAME_KEY), config);
                    LOGGER.info("New config arrived for job: " + jobConfigPathIdentifier);
                } else if (!config.equals(jobConfFiles.get(jobConfigPathIdentifier))) {
                    jobConfFiles.put(jobConfigPathIdentifier, config);

                    postNewJobConfigArrival(config.getProperty(ConfigurationKeys.JOB_NAME_KEY), config);
                    LOGGER.info("Config updated for job: " + jobConfigPathIdentifier);
                } else {
                    LOGGER.info("Config not changed for job: " + jobConfigPathIdentifier);
                }
            }
        } else {
            LOGGER.warn("Job configuration directory " + jobConfigDir + " not found");
        }
    }
}

From source file:org.apache.hadoop.hive.metastore.ObjectStore.java

/**
 * Called whenever this object is instantiated using ReflectionUtils, and also
 * on connection retries. In cases of connection retries, conf will usually
 * contain modified values./*  w  w w.j  a v a 2  s  . com*/
 */
@Override
@SuppressWarnings("nls")
public void setConf(Configuration conf) {
    // Although an instance of ObjectStore is accessed by one thread, there may
    // be many threads with ObjectStore instances. So the static variables
    // pmf and prop need to be protected with locks.
    pmfPropLock.lock();
    try {
        isInitialized = false;
        hiveConf = conf;
        configureSSL(conf);
        Properties propsFromConf = getDataSourceProps(conf);
        boolean propsChanged = !propsFromConf.equals(prop);

        if (propsChanged) {
            if (pmf != null) {
                clearOutPmfClassLoaderCache(pmf);
                // close the underlying connection pool to avoid leaks
                pmf.close();
            }
            pmf = null;
            prop = null;
        }

        assert (!isActiveTransaction());
        shutdown();
        // Always want to re-create pm as we don't know if it were created by the
        // most recent instance of the pmf
        pm = null;
        directSql = null;
        expressionProxy = null;
        openTrasactionCalls = 0;
        currentTransaction = null;
        transactionStatus = TXN_STATUS.NO_STATE;

        initialize(propsFromConf);

        String partitionValidationRegex = hiveConf
                .get(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.name());
        if (partitionValidationRegex != null && !partitionValidationRegex.isEmpty()) {
            partitionValidationPattern = Pattern.compile(partitionValidationRegex);
        } else {
            partitionValidationPattern = null;
        }

        if (!isInitialized) {
            throw new RuntimeException("Unable to create persistence manager. Check dss.log for details");
        } else {
            LOG.info("Initialized ObjectStore");
        }
    } finally {
        pmfPropLock.unlock();
    }
}

From source file:org.apache.hadoop.hive.metastore.PersistenceManagerProvider.java

/**
 * This method updates the PersistenceManagerFactory and its properties if the given
 * configuration is different from its current set of properties. Most common case is that
 * the persistenceManagerFactory properties do not change, and hence this method is optimized to
 * be non-blocking in such cases. However, if the properties are different, this method blocks
 * other threads until the properties are updated, current pmf is closed and
 * a new pmf is re-initialized. Note that when a PersistenceManagerFactory is re-initialized all
 * the PersistenceManagers which are instantiated using old factory become invalid and will throw
 * JDOUserException. Hence it is recommended that this method is called in the setup/init phase
 * of the Metastore service when there are no other active threads serving clients.
 *
 * @param conf Configuration which provides the datanucleus/datasource properties for comparison
 *///from   w  w  w. j  ava  2  s  .  co  m
public static void updatePmfProperties(Configuration conf) {
    // take a read lock to check if the datasource properties changed.
    // Most common case is that datasource properties do not change
    Properties propsFromConf = PersistenceManagerProvider.getDataSourceProps(conf);
    pmfReadLock.lock();
    // keep track of if the read-lock is acquired by this thread
    // so that we can unlock it before leaving this method
    // this is needed because pmf methods below could throw JDOException (unchecked exception)
    // which can lead to readLock not being acquired at the end of the inner try-finally
    // block below
    boolean readLockAcquired = true;
    try {
        // if pmf properties change, need to update, release read lock and take write lock
        if (prop == null || pmf == null || !propsFromConf.equals(prop)) {
            pmfReadLock.unlock();
            readLockAcquired = false;
            pmfWriteLock.lock();
            try {
                // check if we need to update pmf again here in case some other thread already did it
                // for us after releasing readlock and before acquiring write lock above
                if (prop == null || pmf == null || !propsFromConf.equals(prop)) {
                    // OK, now we really need to re-initialize pmf and pmf properties
                    if (LOG.isInfoEnabled()) {
                        LOG.info("Updating the pmf due to property change");
                        if (prop == null) {
                            LOG.info("Current pmf properties are uninitialized");
                        } else {
                            for (String key : prop.stringPropertyNames()) {
                                if (!key.equals(propsFromConf.get(key))) {
                                    if (LOG.isDebugEnabled() && MetastoreConf.isPrintable(key)) {
                                        // The jdbc connection url can contain sensitive information like username and password
                                        // which should be masked out before logging.
                                        String oldVal = prop.getProperty(key);
                                        String newVal = propsFromConf.getProperty(key);
                                        if (key.equals(ConfVars.CONNECT_URL_KEY.getVarname())) {
                                            oldVal = MetaStoreServerUtils.anonymizeConnectionURL(oldVal);
                                            newVal = MetaStoreServerUtils.anonymizeConnectionURL(newVal);
                                        }
                                        LOG.debug("Found {} to be different. Old val : {} : New Val : {}", key,
                                                oldVal, newVal);
                                    } else {
                                        LOG.debug("Found masked property {} to be different", key);
                                    }
                                }
                            }
                        }
                    }
                    if (pmf != null) {
                        clearOutPmfClassLoaderCache();
                        if (!forTwoMetastoreTesting) {
                            // close the underlying connection pool to avoid leaks
                            LOG.debug("Closing PersistenceManagerFactory");
                            pmf.close();
                            LOG.debug("PersistenceManagerFactory closed");
                        }
                        pmf = null;
                    }
                    // update the pmf properties object then initialize pmf using them
                    prop = propsFromConf;
                    retryLimit = MetastoreConf.getIntVar(conf, ConfVars.HMS_HANDLER_ATTEMPTS);
                    retryInterval = MetastoreConf.getTimeVar(conf, ConfVars.HMS_HANDLER_INTERVAL,
                            TimeUnit.MILLISECONDS);
                    // init PMF with retry logic
                    retry(() -> {
                        initPMF(conf);
                        return null;
                    });
                }
                // downgrade by acquiring read lock before releasing write lock
                pmfReadLock.lock();
                readLockAcquired = true;
            } finally {
                pmfWriteLock.unlock();
            }
        }
    } finally {
        if (readLockAcquired) {
            pmfReadLock.unlock();
        }
    }
}

From source file:org.apache.openaz.xacml.rest.XACMLPapServlet.java

private boolean isPDPCurrent(Properties policies, Properties pipconfig, Properties pdpProperties) {
    String localRootPolicies = policies.getProperty(XACMLProperties.PROP_ROOTPOLICIES);
    String localReferencedPolicies = policies.getProperty(XACMLProperties.PROP_REFERENCEDPOLICIES);
    if (localRootPolicies == null || localReferencedPolicies == null) {
        logger.warn("Missing property on PAP server: RootPolicies=" + localRootPolicies
                + "  ReferencedPolicies=" + localReferencedPolicies);
        return false;
    }//from  w ww.j  ava  2s .  c om
    //
    // Compare the policies and pipconfig properties to the pdpProperties
    //
    try {
        //
        // the policy properties includes only xacml.rootPolicies and
        // xacml.referencedPolicies without any .url entries
        //
        Properties pdpPolicies = XACMLProperties.getPolicyProperties(pdpProperties, false);
        Properties pdpPipConfig = XACMLProperties.getPipProperties(pdpProperties);
        if (localRootPolicies.equals(pdpPolicies.getProperty(XACMLProperties.PROP_ROOTPOLICIES))
                && localReferencedPolicies
                        .equals(pdpPolicies.getProperty(XACMLProperties.PROP_REFERENCEDPOLICIES))
                && pdpPipConfig.equals(pipconfig)) {
            //
            // The PDP is current
            //
            return true;
        }
    } catch (Exception e) { //NOPMD
        // we get here if the PDP did not include either xacml.rootPolicies or xacml.pip.engines,
        // or if there are policies that do not have a corresponding ".url" property.
        // Either of these cases means that the PDP is not up-to-date, so just drop-through to return
        // false.
    }
    return false;
}