List of usage examples for com.google.common.collect Maps difference
public static <K, V> SortedMapDifference<K, V> difference(SortedMap<K, ? extends V> left, Map<? extends K, ? extends V> right)
From source file:com.thinkbiganalytics.metadata.rest.model.nifi.NiFiFlowCacheSync.java
public Map<String, String> getProcessorIdToProcessorNameUpdatedSinceLastSync( Map<String, String> processorIdToProcessorName) { MapDifference<String, String> diff = Maps.difference(snapshot.getProcessorIdToProcessorName(), processorIdToProcessorName); return diff.entriesOnlyOnRight(); }
From source file:org.jfrog.hudson.AbstractBuildInfoDeployer.java
protected Build createBuildInfo(String buildAgentName, String buildAgentVersion, BuildType buildType) { String buildinfoName = build.getParent().getFullName(); if (buildAgentName.compareTo("Generic") == 0) { buildinfoName = Util.replaceMacro(((ArtifactoryGenericConfigurator) configurator).getBuildInfoName(), env);/* w ww .j a v a 2 s. c o m*/ } BuildInfoBuilder builder = new BuildInfoBuilder(ExtractorUtils.sanitizeBuildName(buildinfoName)) .number(build.getNumber() + "").type(buildType) .buildAgent(new BuildAgent(buildAgentName, buildAgentVersion)) .agent(new Agent("hudson", build.getHudsonVersion())); String buildUrl = ActionableHelper.getBuildUrl(build); if (StringUtils.isNotBlank(buildUrl)) { builder.url(buildUrl); } Calendar startedTimestamp = build.getTimestamp(); builder.startedDate(startedTimestamp.getTime()); long duration = System.currentTimeMillis() - startedTimestamp.getTimeInMillis(); builder.durationMillis(duration); String artifactoryPrincipal = configurator.getArtifactoryServer().getResolvingCredentials().getUsername(); if (StringUtils.isBlank(artifactoryPrincipal)) { artifactoryPrincipal = ""; } builder.artifactoryPrincipal(artifactoryPrincipal); String userCause = ActionableHelper.getUserCausePrincipal(build); if (userCause != null) { builder.principal(userCause); } Cause.UpstreamCause parent = ActionableHelper.getUpstreamCause(build); if (parent != null) { String parentProject = ExtractorUtils.sanitizeBuildName(parent.getUpstreamProject()); int parentNumber = parent.getUpstreamBuild(); builder.parentName(parentProject); builder.parentNumber(parentNumber + ""); if (StringUtils.isBlank(userCause)) { builder.principal("auto"); } } gatherSysPropInfo(builder); addBuildInfoVariables(builder); String revision = ExtractorUtils.getVcsRevision(env); if (StringUtils.isNotBlank(revision)) { builder.vcsRevision(revision); } if (configurator.isIncludeEnvVars()) { for (Map.Entry<String, String> entry : env.entrySet()) { builder.addProperty(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(), entry.getValue()); } } else { MapDifference<String, String> difference = Maps.difference(env, System.getenv()); Map<String, String> filteredEnvVars = difference.entriesOnlyOnLeft(); for (Map.Entry<String, String> entry : filteredEnvVars.entrySet()) { builder.addProperty(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(), entry.getValue()); } } LicenseControl licenseControl = new LicenseControl(configurator.isRunChecks()); if (configurator.isRunChecks()) { if (StringUtils.isNotBlank(configurator.getViolationRecipients())) { licenseControl.setLicenseViolationsRecipientsList(configurator.getViolationRecipients()); } if (StringUtils.isNotBlank(configurator.getScopes())) { licenseControl.setScopesList(configurator.getScopes()); } } licenseControl.setIncludePublishedArtifacts(configurator.isIncludePublishArtifacts()); licenseControl.setAutoDiscover(configurator.isLicenseAutoDiscovery()); builder.licenseControl(licenseControl); BuildRetention buildRetention = new BuildRetention(configurator.isDiscardBuildArtifacts()); if (configurator.isDiscardOldBuilds()) { buildRetention = BuildRetentionFactory.createBuildRetention(build, configurator.isDiscardBuildArtifacts()); } builder.buildRetention(buildRetention); if ((Hudson.getInstance().getPlugin("jira") != null) && configurator.isEnableIssueTrackerIntegration()) { new IssuesTrackerHelper(build, listener, configurator.isAggregateBuildIssues(), configurator.getAggregationBuildStatus()).setIssueTrackerInfo(builder); } // add staging status if it is a release build ReleaseAction release = ActionableHelper.getLatestAction(build, ReleaseAction.class); if (release != null) { String stagingRepoKey = release.getStagingRepositoryKey(); if (StringUtils.isBlank(stagingRepoKey)) { stagingRepoKey = configurator.getRepositoryKey(); } builder.addStatus(new PromotionStatusBuilder(Promotion.STAGED).timestampDate(startedTimestamp.getTime()) .comment(release.getStagingComment()).repository(stagingRepoKey).ciUser(userCause) .user(artifactoryPrincipal).build()); } Build buildInfo = builder.build(); // for backwards compatibility for Artifactory 2.2.3 if (parent != null) { buildInfo.setParentBuildId(parent.getUpstreamProject()); } return buildInfo; }
From source file:com.netflix.paas.cassandra.tasks.ClusterRefreshTask.java
@Override public void execte(TaskContext context) throws Exception { // Get parameters from the context String clusterName = context.getStringParameter("cluster"); Boolean ignoreSystem = context.getBooleanParameter("ignoreSystem", true); CassandraClusterEntity entity = (CassandraClusterEntity) context.getParamater("entity"); LOG.info("Refreshing cluster " + clusterName); // Read the current state from the DAO // CassandraClusterEntity entity = clusterDao.read(clusterName); Map<String, String> existingKeyspaces = entity.getKeyspaces(); if (existingKeyspaces == null) { existingKeyspaces = Maps.newHashMap(); entity.setKeyspaces(existingKeyspaces); }//from www.j a v a 2 s . com Map<String, String> existingColumnFamilies = entity.getColumnFamilies(); if (existingColumnFamilies == null) { existingColumnFamilies = Maps.newHashMap(); entity.setColumnFamilies(existingColumnFamilies); } Set<String> foundKeyspaces = Sets.newHashSet(); Set<String> foundColumnFamilies = Sets.newHashSet(); Cluster cluster = provider .acquireCluster(new ClusterKey(entity.getClusterName(), entity.getDiscoveryType())); boolean changed = false; // // Iterate found keyspaces try { for (KeyspaceDefinition keyspace : cluster.describeKeyspaces()) { // Extract data from the KeyspaceDefinition String ksName = keyspace.getName(); MapStringToObject keyspaceOptions = getKeyspaceOptions(keyspace); if (existingKeyspaces.containsKey(ksName)) { MapStringToObject previousOptions = JsonSerializer.fromString(existingKeyspaces.get(ksName), MapStringToObject.class); MapDifference keyspaceDiff = Maps.difference(keyspaceOptions, previousOptions); if (keyspaceDiff.areEqual()) { LOG.info("Keyspace '{}' didn't change", new Object[] { ksName }); } else { changed = true; LOG.info("CF Changed: " + keyspaceDiff.entriesDiffering()); } } else { changed = true; } String strKeyspaceOptions = JsonSerializer.toString(keyspaceOptions); // // Keep track of keyspace foundKeyspaces.add(keyspace.getName()); existingKeyspaces.put(ksName, strKeyspaceOptions); LOG.info("Found keyspace '{}|{}' : {}", new Object[] { entity.getClusterName(), ksName, keyspaceOptions }); // // Iterate found column families for (ColumnFamilyDefinition cf : keyspace.getColumnFamilyList()) { // Extract data from the ColumnFamilyDefinition String cfName = String.format("%s|%s", keyspace.getName(), cf.getName()); MapStringToObject cfOptions = getColumnFamilyOptions(cf); String strCfOptions = JsonSerializer.toString(cfOptions); // // // Check for changes if (existingColumnFamilies.containsKey(cfName)) { MapStringToObject previousOptions = JsonSerializer .fromString(existingColumnFamilies.get(cfName), MapStringToObject.class); LOG.info("Old options: " + previousOptions); MapDifference cfDiff = Maps.difference(cfOptions, previousOptions); if (cfDiff.areEqual()) { LOG.info("CF '{}' didn't change", new Object[] { cfName }); } else { changed = true; LOG.info("CF Changed: " + cfDiff.entriesDiffering()); } } else { changed = true; } // // // Keep track of the cf foundColumnFamilies.add(cfName); existingColumnFamilies.put(cfName, strCfOptions); LOG.info("Found column family '{}|{}|{}' : {}", new Object[] { entity.getClusterName(), keyspace.getName(), cf.getName(), strCfOptions }); } } } catch (Exception e) { LOG.info("Error refreshing cluster: " + entity.getClusterName(), e); entity.setEnabled(false); } SetView<String> ksRemoved = Sets.difference(existingKeyspaces.keySet(), foundKeyspaces); LOG.info("Keyspaces removed: " + ksRemoved); SetView<String> cfRemoved = Sets.difference(existingColumnFamilies.keySet(), foundColumnFamilies); LOG.info("CF removed: " + cfRemoved); clusterDao.write(entity); }
From source file:org.locationtech.geogig.remotes.pack.DiffRemoteRefsOp.java
@Override protected List<RefDiff> _call() { checkState(remote != null, "no remote provided"); // list of refs/remotes/<remote>/<refname> or refs/heads according to formatAsRemoteRefs Map<String, Ref> remotes; Map<String, Ref> locals; {/*w w w .ja va 2s .c o m*/ // current live remote refs in the remote's local namespace (e.g. refs/heads/<branch>) Iterable<Ref> remoteRefs = getRemoteRefs(); if (formatAsRemoteRefs) { // format refs returned by the remote in its local namespaces to our repository's // remotes namespace remoteRefs = command(MapRef.class)// .setRemote(remote.getInfo())// .convertToRemote()// .addAll(remoteRefs)// .call(); } // current local local copy of the remote refs (e.g. refs/remotes/<remote>/<branch> List<Ref> remoteLocalRefs = Lists.newArrayList(getRemoteLocalRefs()); if (!formatAsRemoteRefs) { // format local repository copies of the remote refs to the remote's local namespace remoteLocalRefs = command(MapRef.class)// .setRemote(remote.getInfo())// .convertToLocal()// .addAll(remoteLocalRefs)// .call(); } if (this.getTags) { Map<String, RevTag> tags = Maps.uniqueIndex(command(TagListOp.class).call(), (t) -> t.getName()); for (Ref rf : remoteRefs) { if (rf.getName().startsWith(Ref.TAGS_PREFIX) && tags.containsKey(rf.localName())) { RevTag tag = tags.get(rf.localName()); remoteLocalRefs.add(new Ref(Ref.TAGS_PREFIX + tag.getName(), tag.getId())); } } } remotes = Maps.uniqueIndex(remoteRefs, (r) -> r.getName()); locals = Maps.uniqueIndex(remoteLocalRefs, (r) -> r.getName()); } final boolean mapped = remote.getInfo().getMapped(); if (mapped) { // for a mapped remote, we are only interested in the branch we are mapped to final String mappedBranch = remote.getInfo().getMappedBranch(); checkNotNull(mappedBranch); final String mappedBranchName = Ref.localName(mappedBranch); remotes = Maps.filterKeys(remotes, (name) -> Ref.localName(name).equals(mappedBranchName)); locals = Maps.filterKeys(locals, (name) -> Ref.localName(name).equals(mappedBranchName)); } MapDifference<String, Ref> difference = Maps.difference(remotes, locals); // refs existing on the remote and not on the local repo Collection<Ref> newRemoteRefs = difference.entriesOnlyOnLeft().values(); // remote refs existing on the local repo and not existing on the remote anymore Collection<Ref> removedRemoteRefs = difference.entriesOnlyOnRight().values(); // refs existing both in local and remote with different objectIds Collection<ValueDifference<Ref>> changes = difference.entriesDiffering().values(); List<RefDiff> diffs = new ArrayList<>(); newRemoteRefs.forEach((r) -> diffs.add(RefDiff.added(r))); removedRemoteRefs.forEach((r) -> diffs.add(RefDiff.removed(r))); // v.leftValue() == new (remote copy), v.rightValue() == old (local copy) changes.forEach((v) -> diffs.add(RefDiff.updated(v.rightValue(), v.leftValue()))); return diffs; }
From source file:net.shibboleth.idp.consent.flow.ar.impl.ReleaseAttributes.java
/** {@inheritDoc} */ @Override//from w w w. ja v a 2 s. c o m protected void doExecute(@Nonnull final ProfileRequestContext profileRequestContext, @Nonnull final ProfileInterceptorContext interceptorContext) { final Map<String, Consent> consents = getConsentContext().getCurrentConsents().isEmpty() ? getConsentContext().getPreviousConsents() : getConsentContext().getCurrentConsents(); log.debug("{} Consents '{}'", getLogPrefix(), consents); final Map<String, IdPAttribute> attributes = getAttributeContext().getIdPAttributes(); log.debug("{} Attributes before release '{}'", getLogPrefix(), attributes); final Map<String, IdPAttribute> releasedAttributes = new HashMap<>(attributes.size()); for (final IdPAttribute attribute : attributes.values()) { if (!getAttributeReleaseContext().getConsentableAttributes().containsKey(attribute.getId())) { log.debug("{} Attribute '{}' will be released because it is excluded from consent", getLogPrefix(), attribute); releasedAttributes.put(attribute.getId(), attribute); continue; } if (!consents.containsKey(attribute.getId())) { log.debug("{} Attribute '{}' will not be released because consent for it does not exist", getLogPrefix(), attribute); continue; } final Consent consent = consents.get(attribute.getId()); if (consent.isApproved()) { log.debug("{} Attribute '{}' will be released because consent is approved", getLogPrefix(), attribute); releasedAttributes.put(attribute.getId(), attribute); } else { log.debug("{} Attribute '{}' will not be released because consent is not approved", getLogPrefix(), attribute); } } if (log.isDebugEnabled()) { log.debug("{} Releasing attributes '{}'", getLogPrefix(), releasedAttributes); final MapDifference<String, IdPAttribute> diff = Maps.difference(attributes, releasedAttributes); log.debug("{} Not releasing attributes '{}'", getLogPrefix(), diff.entriesOnlyOnLeft()); } getAttributeContext().setIdPAttributes(releasedAttributes.values()); }
From source file:com.palantir.atlasdb.keyvalue.impl.RowResults.java
public static <T> RowResult<T> merge(RowResult<T> base, RowResult<T> overwrite) { Validate.isTrue(Arrays.equals(base.getRowName(), overwrite.getRowName())); Builder<byte[], T> colBuilder = ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()); colBuilder.putAll(overwrite.getColumns()); colBuilder.putAll(Maps.difference(base.getColumns(), overwrite.getColumns()).entriesOnlyOnLeft()); return RowResult.create(base.getRowName(), colBuilder.build()); }
From source file:com.example.getstarted.util.DatastoreSessionFilter.java
@Override public void doFilter(ServletRequest servletReq, ServletResponse servletResp, FilterChain chain) throws IOException, ServletException { HttpServletRequest req = (HttpServletRequest) servletReq; HttpServletResponse resp = (HttpServletResponse) servletResp; // Check if the session cookie is there, if not there, make a session cookie using a unique // identifier. String sessionId = getCookieValue(req, "bookshelfSessionId"); if (sessionId.equals("")) { String sessionNum = new BigInteger(130, new SecureRandom()).toString(32); Cookie session = new Cookie("bookshelfSessionId", sessionNum); session.setPath("/"); resp.addCookie(session);/*from ww w . java2s. c o m*/ } Map<String, String> datastoreMap = loadSessionVariables(req); // session variables for request chain.doFilter(servletReq, servletResp); // Allow the servlet to process request and response HttpSession session = req.getSession(); // Create session map Map<String, String> sessionMap = new HashMap<>(); Enumeration<String> attrNames = session.getAttributeNames(); while (attrNames.hasMoreElements()) { String attrName = attrNames.nextElement(); sessionMap.put(attrName, (String) session.getAttribute(attrName)); } // Create a diff between the new session variables and the existing session variables // to minimize datastore access MapDifference<String, String> diff = Maps.difference(sessionMap, datastoreMap); Map<String, String> setMap = diff.entriesOnlyOnLeft(); Map<String, String> deleteMap = diff.entriesOnlyOnRight(); // Apply the diff setSessionVariables(sessionId, setMap); deleteSessionVariables(sessionId, FluentIterable.from(deleteMap.keySet()).toArray(String.class)); }
From source file:org.openengsb.core.services.internal.deployer.connector.ConnectorFile.java
public ChangeSet getChanges(File file) { ImmutableMap<String, String> newPropertyMap = readProperties(file); MapDifference<String, String> changedAttributes = Maps.difference(attributes, getAttributesFromMap(newPropertyMap)); MapDifference<String, Object> changedProperties = Maps.difference(properties, getPropertiesFromMap(newPropertyMap)); return new ChangeSet(newPropertyMap.get("domainType"), newPropertyMap.get("connectorType"), changedAttributes, changedProperties); }
From source file:com.thinkbiganalytics.metadata.rest.model.nifi.NiFiFlowCacheSync.java
public Map<String, String> getProcessorIdToProcessGroupIdUpdatedSinceLastSync( Map<String, String> processorIdToFeedProcessGroupId) { MapDifference<String, String> diff = Maps.difference(snapshot.getProcessorIdToFeedProcessGroupId(), processorIdToFeedProcessGroupId); return diff.entriesOnlyOnRight(); }
From source file:org.jfrog.build.extractor.BuildInfoExtractorUtils.java
public static Properties getEnvProperties(Properties startProps, Log log) { IncludeExcludePatterns patterns = new IncludeExcludePatterns( startProps.getProperty(BuildInfoConfigProperties.PROP_ENV_VARS_INCLUDE_PATTERNS), startProps.getProperty(BuildInfoConfigProperties.PROP_ENV_VARS_EXCLUDE_PATTERNS)); Properties props = new Properties(); // Add all the startProps that starts with BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX for (Map.Entry<Object, Object> startEntry : startProps.entrySet()) { if (StringUtils.startsWith((String) startEntry.getKey(), BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX)) { props.put(startEntry.getKey(), startEntry.getValue()); }/*w w w .j a va 2s . c om*/ } // Add all system environment that match the patterns Map<String, String> envMap = System.getenv(); for (Map.Entry<String, String> entry : envMap.entrySet()) { String varKey = entry.getKey(); if (PatternMatcher.pathConflicts(varKey, patterns)) { continue; } props.put(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + varKey, entry.getValue()); } Map<String, String> sysProps = new HashMap(System.getProperties()); Map<String, String> filteredSysProps = Maps.difference(sysProps, System.getenv()).entriesOnlyOnLeft(); for (Map.Entry<String, String> entry : filteredSysProps.entrySet()) { String varKey = entry.getKey(); if (PatternMatcher.pathConflicts(varKey, patterns)) { continue; } props.put(varKey, entry.getValue()); } // TODO: [by FSI] Test if this is needed! Since start props are used now String propertiesFilePath = getAdditionalPropertiesFile(startProps, log); if (StringUtils.isNotBlank(propertiesFilePath)) { File propertiesFile = new File(propertiesFilePath); InputStream inputStream = null; try { if (propertiesFile.exists()) { inputStream = new FileInputStream(propertiesFile); Properties propertiesFromFile = new Properties(); propertiesFromFile.load(inputStream); props.putAll(filterDynamicProperties(propertiesFromFile, ENV_PREDICATE)); } } catch (IOException e) { throw new RuntimeException( "Unable to load build info properties from file: " + propertiesFile.getAbsolutePath(), e); } finally { IOUtils.closeQuietly(inputStream); } } return props; }