List of usage examples for com.google.common.collect Sets intersection
public static <E> SetView<E> intersection(final Set<E> set1, final Set<?> set2)
From source file:org.eclipse.sw360.moderation.db.ModerationRequestGenerator.java
protected <S> void dealWithEnumMap(U field, Class<? extends TEnum> S) { Map<String, S> addedMap = (Map<String, S>) updateDocument.getFieldValue(field); if (addedMap == null) { addedMap = new HashMap<>(); }/*from w w w. j av a 2 s . c om*/ Map<String, S> actualMap = (Map<String, S>) actualDocument.getFieldValue(field); for (Map.Entry<String, S> entry : actualMap.entrySet()) { addedMap.remove(entry); } Map<String, S> deletedMap = (Map<String, S>) actualDocument.getFieldValue(field); if (deletedMap == null) { deletedMap = new HashMap<>(); } Map<String, S> updateMap = (Map<String, S>) updateDocument.getFieldValue(field); for (Map.Entry<String, S> entry : updateMap.entrySet()) { deletedMap.remove(entry); } //determine changes in common linkedProjects Set<String> commonKeys = Sets.intersection(updateMap.keySet(), actualMap.keySet()); for (String id : commonKeys) { S actual = actualMap.get(id); S update = updateMap.get(id); if (!actual.equals(update)) { addedMap.put(id, update); deletedMap.put(id, actual); } } if (!addedMap.isEmpty()) { documentAdditions.setFieldValue(field, addedMap); } if (!deletedMap.isEmpty()) { documentDeletions.setFieldValue(field, deletedMap); } }
From source file:com.android.tools.idea.run.LaunchCompatibility.java
/** * Returns whether an application with the given requirements can be run on the given device. * * @param minSdkVersion minSdkVersion specified by the application * @param projectTarget android target corresponding to the targetSdkVersion * @param requiredFeatures required list of hardware features * @param device the device to check compatibility against * @return a {@link ThreeState} indicating whether the application can be run on the device, and a reason if it isn't * compatible.// w w w . j a v a 2 s . co m */ @NotNull public static LaunchCompatibility canRunOnDevice(@NotNull AndroidVersion minSdkVersion, @NotNull IAndroidTarget projectTarget, @NotNull EnumSet<IDevice.HardwareFeature> requiredFeatures, @Nullable Set<String> supportedAbis, @NotNull AndroidDevice device) { // check if the device has the required minApi // note that in cases where targetSdk is a preview platform, gradle sets minsdk to be the same as targetsdk, // so as to only allow running on those systems AndroidVersion deviceVersion = device.getVersion(); if (!deviceVersion.equals(AndroidVersion.DEFAULT) && !deviceVersion.canRun(minSdkVersion)) { String reason = String.format("minSdk(%1$s) %3$s deviceSdk(%2$s)", minSdkVersion, deviceVersion, minSdkVersion.getCodename() == null ? ">" : "!="); return new LaunchCompatibility(ThreeState.NO, reason); } // check if the device provides the required features for (IDevice.HardwareFeature feature : requiredFeatures) { if (!device.supportsFeature(feature)) { return new LaunchCompatibility(ThreeState.NO, "missing feature: " + feature); } } // Typically, we only need to check that features required by the apk are supported by the device, which is done above // In the case of watch though, we do an explicit check in the other direction: if the device is a watch, we don't want // non-watch apks to be installed on it. if (device.supportsFeature(IDevice.HardwareFeature.WATCH)) { if (!requiredFeatures.contains(IDevice.HardwareFeature.WATCH)) { return new LaunchCompatibility(ThreeState.NO, "missing uses-feature watch, non-watch apks cannot be launched on a watch"); } } // Verify that the device ABI matches one of the target ABIs for JNI apps. if (supportedAbis != null) { Set<String> deviceAbis = Sets.newLinkedHashSet(); for (Abi abi : device.getAbis()) { deviceAbis.add(abi.toString()); } if (!supportedAbis.isEmpty() && Sets.intersection(supportedAbis, deviceAbis).isEmpty()) { return new LaunchCompatibility(ThreeState.NO, "Device supports " + Joiner.on(", ").join(deviceAbis) + ", but APK only supports " + Joiner.on(", ").join(supportedAbis)); } } // we are done with checks for platform targets if (projectTarget.isPlatform()) { return YES; } // Add-ons specify a list of libraries. We need to check that the required libraries are available on the device. // See AddOnTarget#canRunOn List<IAndroidTarget.OptionalLibrary> additionalLibs = projectTarget.getAdditionalLibraries(); if (additionalLibs.isEmpty()) { return YES; } String targetName = projectTarget.getName(); if (GOOGLE_APIS_TARGET_NAME.equals(targetName)) { // We'll assume that Google APIs are available on all devices. return YES; } else { // Unsure because we don't have an easy way of determining whether those libraries are on a device return new LaunchCompatibility(ThreeState.UNSURE, "unsure if device supports addon: " + targetName); } }
From source file:com.siemens.sw360.portal.tags.DisplayReleaseChanges.java
private void renderReleaseIdToRelationship(StringBuilder display) { if (ensureSomethingTodoAndNoNull(Release._Fields.RELEASE_ID_TO_RELATIONSHIP)) { Set<String> changedReleaseIds = Sets.intersection(additions.getReleaseIdToRelationship().keySet(), deletions.getReleaseIdToRelationship().keySet()); Set<String> releaseIdsInDb = nullToEmptyMap(actual.getReleaseIdToRelationship()).keySet(); //keep only releases that are still in the database changedReleaseIds = Sets.intersection(changedReleaseIds, releaseIdsInDb); Set<String> removedReleaseIds = Sets.difference(deletions.getReleaseIdToRelationship().keySet(), changedReleaseIds);//from w w w. j a v a 2 s . c o m removedReleaseIds = Sets.intersection(removedReleaseIds, releaseIdsInDb); Set<String> addedReleaseIds = Sets.difference(additions.getReleaseIdToRelationship().keySet(), changedReleaseIds); display.append("<h3> Changes in linked releases </h3>"); LinkedReleaseRenderer renderer = new LinkedReleaseRenderer(display, tableClasses, idPrefix, actual.getCreatedBy()); renderer.renderReleaseLinkList(display, deletions.getReleaseIdToRelationship(), removedReleaseIds, "Removed Release Links"); renderer.renderReleaseLinkList(display, additions.getReleaseIdToRelationship(), addedReleaseIds, "Added Release Links"); renderer.renderReleaseLinkListCompare(display, actual.getReleaseIdToRelationship(), deletions.getReleaseIdToRelationship(), additions.getReleaseIdToRelationship(), changedReleaseIds); } }
From source file:ai.grakn.graql.internal.reasoner.rule.InferenceRule.java
/** * @return true if head and body do not share any variables *//*from ww w . j av a2s . co m*/ public boolean hasDisconnectedHead() { return Sets.intersection(body.getVarNames(), head.getVarNames()).isEmpty(); }
From source file:com.dangdang.ddframe.job.cloud.scheduler.ha.ReconcileScheduledService.java
private void processRemaining() { Set<TaskContext> runningTasks = Sets.intersection(remainingTasks, filterRunningTask()).immutableCopy(); remainingTasks.clear();/*from ww w . j a v a2s . c o m*/ if (runningTasks.isEmpty()) { log.info("Elastic Job - Reconcile: All tasks have been reconciled"); return; } remainingTasks.addAll(runningTasks); long nextTriggerReconcileMilliSeconds = latestReconcileMilliSeconds + (long) currentRePostInterval; if (System.currentTimeMillis() < nextTriggerReconcileMilliSeconds) { log.debug("Elastic Job - Reconcile: Next trigger time : {}", new Date(nextTriggerReconcileMilliSeconds)); return; } if (postTimes < maxPostTimes) { postReconcile(); return; } log.warn("Elastic Job - Reconcile: Reconcile retrying reaches max times, clear task {}", remainingTasks.size()); for (TaskContext taskContext : remainingTasks) { facadeService.removeRunning(taskContext); facadeService.recordFailoverTask(taskContext); String hostname = facadeService.popMapping(taskContext.getId()); if (!Strings.isNullOrEmpty(hostname)) { taskScheduler.getTaskUnAssigner().call(TaskContext.getIdForUnassignedSlave(taskContext.getId()), hostname); } statisticManager.taskRunFailed(); } remainingTasks.clear(); }
From source file:com.palantir.atlasdb.keyvalue.impl.AbstractTableMappingService.java
@Override public Set<TableReference> mapToFullTableNames(Set<String> tableNames) { Set<TableReference> newSet = Sets.newHashSet(); Set<String> tablesToReload = Sets.newHashSet(); for (String name : tableNames) { if (name.contains(PERIOD)) { newSet.add(TableReference.createFromFullyQualifiedName(name)); } else if (tableMap.get().containsValue(name)) { newSet.add(getFullTableName(name)); } else if (unmappedTables.containsKey(name)) { newSet.add(TableReference.createWithEmptyNamespace(name)); } else {// w w w. java 2 s .c o m tablesToReload.add(name); } } if (!tablesToReload.isEmpty()) { updateTableMap(); for (String tableName : Sets.difference(tablesToReload, tableMap.get().values())) { unmappedTables.put(tableName, true); newSet.add(TableReference.createWithEmptyNamespace(tableName)); } for (String tableName : Sets.intersection(tablesToReload, tableMap.get().values())) { newSet.add(getFullTableName(tableName)); } } return newSet; }
From source file:org.fenixedu.academic.domain.serviceRequests.documentRequests.RegistryDiplomaRequest.java
@Override public EventType getEventType() { final SetView<EventType> eventTypesToUse = Sets.intersection(getPossibleEventTypes(), getProgramConclusion().getEventTypes().getTypes()); if (eventTypesToUse.size() != 1) { throw new DomainException("error.program.conclusion.many.event.types"); }/*from ww w . j a va 2s .c o m*/ return eventTypesToUse.iterator().next(); }
From source file:com.yahoo.yqlplus.engine.rules.JoinFilterPushDown.java
@Override public OperatorNode<SequenceOperator> visitSequenceOperator(OperatorNode<SequenceOperator> node) { // if this is a FILTER AND it contains a JOIN (perhaps with some other transforms in the way) // AND the filter contains only references to the left side of the join if (node.getOperator() != SequenceOperator.FILTER) { return super.visitSequenceOperator(node); }//from w w w .j a va 2s. c om // we have a FILTER, see if there's a JOIN underneath OperatorNode<SequenceOperator> target = node.getArgument(0); OperatorNode<ExpressionOperator> filter = node.getArgument(1); // It has to be *directly* underneath due to the way logical operands are constructed by the current parser // there may be a stack of JOINs, but we can attack each one in sequence if (target.getOperator() == SequenceOperator.JOIN || target.getOperator() == SequenceOperator.LEFT_JOIN) { List<OperatorNode<ExpressionOperator>> top = Lists.newArrayList(); List<OperatorNode<ExpressionOperator>> leftFilter = Lists.newArrayList(); List<OperatorNode<ExpressionOperator>> rightFilter = Lists.newArrayList(); OperatorNode<SequenceOperator> leftSide = target.getArgument(0); OperatorNode<SequenceOperator> rightSide = target.getArgument(1); OperatorNode<ExpressionOperator> joinExpr = target.getArgument(2); if (filter.getOperator() == ExpressionOperator.AND) { flatten(top, (List<OperatorNode<ExpressionOperator>>) filter.getArgument(0)); } else { top.add(filter); } Iterator<OperatorNode<ExpressionOperator>> topIterator = top.iterator(); while (topIterator.hasNext()) { OperatorNode<ExpressionOperator> clause = topIterator.next(); Set<String> left = findSources(leftSide); Set<String> right = findSources(rightSide); Set<String> referencedFilter = findReferencedSources(clause); boolean useLeft = !Sets.intersection(referencedFilter, left).isEmpty(); boolean useRight = !Sets.intersection(referencedFilter, right).isEmpty(); if (useLeft && useRight) { // can't do anything } else if (useLeft) { leftFilter.add(clause); topIterator.remove(); } else if (useRight) { rightFilter.add(clause); topIterator.remove(); } } OperatorNode<SequenceOperator> result = node; if (rightFilter.size() > 0) { rightSide = visitSequenceOperator(new MergeFilters().visitSequenceOperator(OperatorNode.create( node.getLocation(), SequenceOperator.FILTER, rightSide, createFilter(rightFilter)))); } if (leftFilter.size() > 0) { leftSide = visitSequenceOperator(new MergeFilters().visitSequenceOperator(OperatorNode .create(node.getLocation(), SequenceOperator.FILTER, leftSide, createFilter(leftFilter)))); } if (rightFilter.size() > 0 || leftFilter.size() > 0) { result = OperatorNode.create(target.getLocation(), target.getAnnotations(), target.getOperator(), leftSide, rightSide, joinExpr); if (top.size() > 0) { result = OperatorNode.create(node.getLocation(), node.getAnnotations(), node.getOperator(), result, createFilter(top)); } else { for (Map.Entry<String, Object> e : node.getAnnotations().entrySet()) { result.putAnnotation(e.getKey(), e.getValue()); } } } return super.visitSequenceOperator(result); } return node; }
From source file:org.diqube.execution.steps.OverwritingRowIdAndStep.java
@Override protected void execute() { int leftRowIdSize = leftRowIds.size(); int rightRowIdSize = rightRowIds.size(); Set<Long> res = Sets.intersection(leftRowIds, rightRowIds); Long[] resArray = res.toArray(new Long[res.size()]); ExecutionEnvironment activeEnv;/*from ww w .j a v a 2 s .co m*/ synchronized (latestEnvSync) { activeEnv = latestEnv; } if (activeEnv != null) forEachOutputConsumerOfType(OverwritingRowIdConsumer.class, c -> c.consume(activeEnv, resArray)); if (leftSourceIsDone.get() && rightSourceIsDone.get() && leftRowIds.size() == leftRowIdSize && rightRowIds.size() == rightRowIdSize) { forEachOutputConsumerOfType(GenericConsumer.class, c -> c.sourceIsDone()); doneProcessing(); } }
From source file:com.mysema.query.mongodb.MongodbSerializer.java
@Override public Object visit(Operation<?> expr, Void context) { Operator<?> op = expr.getOperator(); if (op == Ops.EQ) { if (expr.getArg(0) instanceof Operation) { Operation<?> lhs = (Operation<?>) expr.getArg(0); if (lhs.getOperator() == Ops.COL_SIZE || lhs.getOperator() == Ops.ARRAY_SIZE) { return asDBObject(asDBKey(lhs, 0), asDBObject("$size", asDBValue(expr, 1))); } else { throw new UnsupportedOperationException("Illegal operation " + expr); }/* w ww . j a v a 2 s . c o m*/ } else if (isReference(expr, 0)) { return asDBObject(asDBKey(expr, 0), asReference(expr, 1)); } else { return asDBObject(asDBKey(expr, 0), asDBValue(expr, 1)); } } else if (op == Ops.STRING_IS_EMPTY) { return asDBObject(asDBKey(expr, 0), ""); } else if (op == Ops.AND) { BSONObject lhs = (BSONObject) handle(expr.getArg(0)); BSONObject rhs = (BSONObject) handle(expr.getArg(1)); if (Sets.intersection(lhs.keySet(), rhs.keySet()).isEmpty()) { lhs.putAll(rhs); return lhs; } else { BasicDBList list = new BasicDBList(); list.add(handle(expr.getArg(0))); list.add(handle(expr.getArg(1))); return asDBObject("$and", list); } } else if (op == Ops.NOT) { //Handle the not's child BasicDBObject arg = (BasicDBObject) handle(expr.getArg(0)); //Only support the first key, let's see if there //is cases where this will get broken String key = arg.keySet().iterator().next(); Operation<?> subOperation = (Operation<?>) expr.getArg(0); Operator<?> subOp = subOperation.getOperator(); if (subOp == Ops.IN) { return visit(OperationImpl.create(Boolean.class, Ops.NOT_IN, subOperation.getArg(0), subOperation.getArg(1)), context); } else if (subOp != Ops.EQ && subOp != Ops.STRING_IS_EMPTY) { return asDBObject(key, asDBObject("$not", arg.get(key))); } else { return asDBObject(key, asDBObject("$ne", arg.get(key))); } } else if (op == Ops.OR) { BasicDBList list = new BasicDBList(); list.add(handle(expr.getArg(0))); list.add(handle(expr.getArg(1))); return asDBObject("$or", list); } else if (op == Ops.NE) { if (isReference(expr, 0)) { return asDBObject(asDBKey(expr, 0), asDBObject("$ne", asReference(expr, 1))); } else { return asDBObject(asDBKey(expr, 0), asDBObject("$ne", asDBValue(expr, 1))); } } else if (op == Ops.STARTS_WITH) { return asDBObject(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1))); } else if (op == Ops.STARTS_WITH_IC) { return asDBObject(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1), Pattern.CASE_INSENSITIVE)); } else if (op == Ops.ENDS_WITH) { return asDBObject(asDBKey(expr, 0), Pattern.compile(regexValue(expr, 1) + "$")); } else if (op == Ops.ENDS_WITH_IC) { return asDBObject(asDBKey(expr, 0), Pattern.compile(regexValue(expr, 1) + "$", Pattern.CASE_INSENSITIVE)); } else if (op == Ops.EQ_IGNORE_CASE) { return asDBObject(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1) + "$", Pattern.CASE_INSENSITIVE)); } else if (op == Ops.STRING_CONTAINS) { return asDBObject(asDBKey(expr, 0), Pattern.compile(".*" + regexValue(expr, 1) + ".*")); } else if (op == Ops.STRING_CONTAINS_IC) { return asDBObject(asDBKey(expr, 0), Pattern.compile(".*" + regexValue(expr, 1) + ".*", Pattern.CASE_INSENSITIVE)); } else if (op == Ops.MATCHES) { return asDBObject(asDBKey(expr, 0), Pattern.compile(asDBValue(expr, 1).toString())); } else if (op == Ops.MATCHES_IC) { return asDBObject(asDBKey(expr, 0), Pattern.compile(asDBValue(expr, 1).toString(), Pattern.CASE_INSENSITIVE)); } else if (op == Ops.LIKE) { String regex = ExpressionUtils.likeToRegex((Expression) expr.getArg(1)).toString(); return asDBObject(asDBKey(expr, 0), Pattern.compile(regex)); } else if (op == Ops.BETWEEN) { BasicDBObject value = new BasicDBObject("$gte", asDBValue(expr, 1)); value.append("$lte", asDBValue(expr, 2)); return asDBObject(asDBKey(expr, 0), value); } else if (op == Ops.IN) { int constIndex = 0; int exprIndex = 1; if (expr.getArg(1) instanceof Constant<?>) { constIndex = 1; exprIndex = 0; } if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { Collection<?> values = (Collection<?>) ((Constant<?>) expr.getArg(constIndex)).getConstant(); return asDBObject(asDBKey(expr, exprIndex), asDBObject("$in", values.toArray())); } else { if (isReference(expr, exprIndex)) { return asDBObject(asDBKey(expr, exprIndex), asReference(expr, constIndex)); } else { return asDBObject(asDBKey(expr, exprIndex), asDBValue(expr, constIndex)); } } } else if (op == Ops.NOT_IN) { int constIndex = 0; int exprIndex = 1; if (expr.getArg(1) instanceof Constant<?>) { constIndex = 1; exprIndex = 0; } if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { Collection<?> values = (Collection<?>) ((Constant<?>) expr.getArg(constIndex)).getConstant(); return asDBObject(asDBKey(expr, exprIndex), asDBObject("$nin", values.toArray())); } else { if (isReference(expr, exprIndex)) { return asDBObject(asDBKey(expr, exprIndex), asDBObject("$ne", asReference(expr, constIndex))); } else { return asDBObject(asDBKey(expr, exprIndex), asDBObject("$ne", asDBValue(expr, constIndex))); } } } else if (op == Ops.COL_IS_EMPTY) { BasicDBList list = new BasicDBList(); list.add(asDBObject(asDBKey(expr, 0), new BasicDBList())); list.add(asDBObject(asDBKey(expr, 0), asDBObject("$exists", false))); return asDBObject("$or", list); } else if (op == Ops.LT) { return asDBObject(asDBKey(expr, 0), asDBObject("$lt", asDBValue(expr, 1))); } else if (op == Ops.GT) { return asDBObject(asDBKey(expr, 0), asDBObject("$gt", asDBValue(expr, 1))); } else if (op == Ops.LOE) { return asDBObject(asDBKey(expr, 0), asDBObject("$lte", asDBValue(expr, 1))); } else if (op == Ops.GOE) { return asDBObject(asDBKey(expr, 0), asDBObject("$gte", asDBValue(expr, 1))); } else if (op == Ops.IS_NULL) { return asDBObject(asDBKey(expr, 0), asDBObject("$exists", false)); } else if (op == Ops.IS_NOT_NULL) { return asDBObject(asDBKey(expr, 0), asDBObject("$exists", true)); } else if (op == Ops.CONTAINS_KEY) { Path<?> path = (Path<?>) expr.getArg(0); Expression<?> key = expr.getArg(1); return asDBObject(visit(path, context) + "." + key.toString(), asDBObject("$exists", true)); } else if (op == MongodbOps.NEAR) { return asDBObject(asDBKey(expr, 0), asDBObject("$near", asDBValue(expr, 1))); } else if (op == MongodbOps.ELEM_MATCH) { return asDBObject(asDBKey(expr, 0), asDBObject("$elemMatch", asDBValue(expr, 1))); } throw new UnsupportedOperationException("Illegal operation " + expr); }