List of usage examples for com.google.common.collect Sets difference
public static <E> SetView<E> difference(final Set<E> set1, final Set<?> set2)
From source file:org.dllearner.utilities.ReasoningUtils.java
/** * binary partition a list of sets into true and false, depending on whether they satisfy concept * @param concept the OWL concept used for partition * @param sets list of sets to partition * @return an array of Coverage data, one entry for each input set */// w ww .j a v a 2 s. c o m @SafeVarargs public final Coverage[] getCoverage(OWLClassExpression concept, Set<OWLIndividual>... sets) { Coverage[] rv = new Coverage[sets.length]; if (!reasoner.isUseInstanceChecks()) { if (reasoner instanceof SPARQLReasoner && ((SPARQLReasoner) reasoner).isUseValueLists()) { for (int i = 0; i < sets.length; ++i) { SortedSet<OWLIndividual> trueSet = reasoner.hasType(concept, sets[i]); rv[i] = new Coverage(); rv[i].total = sets[i].size(); rv[i].trueSet.addAll(trueSet); rv[i].falseSet.addAll(Sets.difference(sets[i], trueSet)); rv[i].trueCount = rv[i].trueSet.size(); rv[i].falseCount = rv[i].falseSet.size(); } } else { SortedSet<OWLIndividual> individuals = reasoner.getIndividuals(concept); for (int i = 0; i < sets.length; ++i) { rv[i] = new Coverage(); rv[i].total = sets[i].size(); rv[i].trueSet.addAll(Sets.intersection(sets[i], individuals)); rv[i].falseSet.addAll(Sets.difference(sets[i], individuals)); rv[i].trueCount = rv[i].trueSet.size(); rv[i].falseCount = rv[i].falseSet.size(); } } } else { for (int i = 0; i < sets.length; ++i) { rv[i] = new Coverage(); rv[i].total = sets[i].size(); for (OWLIndividual example : sets[i]) { if (getReasoner().hasType(concept, example)) { rv[i].trueSet.add(example); } else { rv[i].falseSet.add(example); } if (interrupted()) { return null; } } rv[i].trueCount = rv[i].trueSet.size(); rv[i].falseCount = rv[i].falseSet.size(); } } return rv; }
From source file:com.github.sevntu.checkstyle.checks.annotation.RequiredParameterForAnnotationCheck.java
@Override public void visitToken(DetailAST annotationNode) { String annotationName = getAnnotationName(annotationNode); if (annotationName.equals(this.annotationName)) { Set<String> missingParameters = Sets.difference(requiredParameters, getAnnotationParameters(annotationNode)); if (!missingParameters.isEmpty()) { String missingParametersAsString = Joiner.on(", ").join(missingParameters); log(annotationNode, MSG_KEY, this.annotationName, missingParametersAsString); }//from www .ja v a 2 s . c om } }
From source file:org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy.java
/** * * @param gcBefore/*from ww w.j a va2 s . com*/ * @return */ private synchronized List<SSTableReader> getNextBackgroundSSTables(final int gcBefore) { if (Iterables.isEmpty(cfs.getSSTables(SSTableSet.LIVE))) return Collections.emptyList(); Set<SSTableReader> uncompacting = ImmutableSet .copyOf(filter(cfs.getUncompactingSSTables(), sstables::contains)); // Find fully expired SSTables. Those will be included no matter what. Set<SSTableReader> expired = Collections.emptySet(); if (System.currentTimeMillis() - lastExpiredCheck > options.expiredSSTableCheckFrequency) { logger.debug("TWCS expired check sufficiently far in the past, checking for fully expired SSTables"); expired = CompactionController.getFullyExpiredSSTables(cfs, uncompacting, cfs.getOverlappingLiveSSTables(uncompacting), gcBefore); lastExpiredCheck = System.currentTimeMillis(); } else { logger.debug("TWCS skipping check for fully expired SSTables"); } Set<SSTableReader> candidates = Sets.newHashSet(filterSuspectSSTables(uncompacting)); List<SSTableReader> compactionCandidates = new ArrayList<>( getNextNonExpiredSSTables(Sets.difference(candidates, expired), gcBefore)); if (!expired.isEmpty()) { logger.debug("Including expired sstables: {}", expired); compactionCandidates.addAll(expired); } return compactionCandidates; }
From source file:org.obiba.mica.web.model.NetworkDtos.java
@NotNull Mica.NetworkDto.Builder asDtoBuilder(@NotNull Network network, boolean asDraft) { Mica.NetworkDto.Builder builder = Mica.NetworkDto.newBuilder(); if (network.hasModel()) builder.setContent(JSONUtils.toJSON(network.getModel())); builder.setId(network.getId()) // .addAllName(localizedStringDtos.asDto(network.getName())) // .addAllDescription(localizedStringDtos.asDto(network.getDescription())) // .addAllAcronym(localizedStringDtos.asDto(network.getAcronym())); Mica.PermissionsDto permissionsDto = permissionsDtos.asDto(network); NetworkState networkState = networkService.getEntityState(network.getId()); builder.setPublished(networkState.isPublished()); if (asDraft) { builder.setTimestamps(TimestampsDtos.asDto(network)) // .setPublished(networkState.isPublished()) // .setExtension(Mica.EntityStateDto.state, entityStateDtos.asDto(networkState).setPermissions(permissionsDto).build()); }// w ww . j av a 2 s .c o m builder.setPermissions(permissionsDto); List<String> roles = micaConfigService.getConfig().getRoles(); if (network.getMemberships() != null) { List<Mica.MembershipsDto> memberships = network.getMemberships().entrySet().stream() .filter(e -> roles.contains(e.getKey())) .map(e -> Mica.MembershipsDto.newBuilder().setRole(e.getKey()).addAllMembers(e.getValue() .stream().map(m -> personDtos.asDto(m.getPerson(), asDraft)).collect(toList())).build()) .collect(toList()); builder.addAllMemberships(memberships); } List<BaseStudy> publishedStudies = publishedStudyService.findByIds(network.getStudyIds()); Set<String> publishedStudyIds = publishedStudies.stream().map(AbstractGitPersistable::getId) .collect(Collectors.toSet()); Sets.SetView<String> unpublishedStudyIds = Sets.difference(ImmutableSet.copyOf(network.getStudyIds() .stream() .filter(sId -> asDraft && subjectAclService.isPermitted("/draft/individual-study", "VIEW", sId) || subjectAclService.isAccessible("/individual-study", sId)) .collect(toList())), publishedStudyIds); if (!publishedStudies.isEmpty()) { Map<String, Long> datasetVariableCounts = asDraft ? null : datasetVariableService.getCountByStudyIds(Lists.newArrayList(publishedStudyIds)); publishedStudies.forEach(study -> { builder.addStudyIds(study.getId()); builder.addStudySummaries(studySummaryDtos.asDtoBuilder(study, true, datasetVariableCounts == null ? 0 : datasetVariableCounts.get(study.getId()))); }); } unpublishedStudyIds.forEach(studyId -> { try { builder.addStudySummaries(studySummaryDtos.asDto(studyId)); builder.addStudyIds(studyId); } catch (NoSuchEntityException e) { log.warn("Study not found in network {}: {}", network.getId(), studyId); // ignore } }); if (network.getLogo() != null) { builder.setLogo(attachmentDtos.asDto(network.getLogo())); } network.getNetworkIds().stream() .filter(nId -> asDraft && subjectAclService.isPermitted("/draft/network", "VIEW", nId) || subjectAclService.isAccessible("/network", nId)) .forEach(nId -> { try { builder.addNetworkSummaries(networkSummaryDtos.asDtoBuilder(nId, asDraft)); builder.addNetworkIds(nId); } catch (NoSuchEntityException e) { log.warn("Network not found in network {}: {}", network.getId(), nId); // ignore } }); return builder; }
From source file:com.google.errorprone.bugpatterns.RedundantOverride.java
@Override public Description matchMethod(MethodTree tree, VisitorState state) { MethodSymbol methodSymbol = getSymbol(tree); if (methodSymbol == null) { return NO_MATCH; }/* w ww . ja v a 2 s . co m*/ Optional<MethodSymbol> maybeSuperMethod = findSuperMethod(methodSymbol, state.getTypes()); if (!maybeSuperMethod.isPresent()) { return NO_MATCH; } MethodSymbol superMethod = maybeSuperMethod.get(); if (tree.getBody() == null || tree.getBody().getStatements().size() != 1) { return NO_MATCH; } StatementTree statement = tree.getBody().getStatements().get(0); ExpressionTree expression = getSingleInvocation(statement); if (expression == null) { return NO_MATCH; } MethodInvocationTree methodInvocationTree = (MethodInvocationTree) expression; if (!getSymbol(methodInvocationTree).equals(superMethod)) { return NO_MATCH; } ExpressionTree receiver = getReceiver(methodInvocationTree); if (!(receiver instanceof IdentifierTree)) { return NO_MATCH; } if (!((IdentifierTree) receiver).getName().contentEquals("super")) { return NO_MATCH; } // Exempt Javadocs; the override might be here to add documentation. DocCommentTree docCommentTree = JavacTrees.instance(state.context).getDocCommentTree(state.getPath()); if (docCommentTree != null) { return NO_MATCH; } // Exempt broadening of visibility. if (!methodSymbol.getModifiers().equals(superMethod.getModifiers())) { return NO_MATCH; } // Overriding a protected member in another package broadens the visibility to the new package. if (methodSymbol.getModifiers().contains(Modifier.PROTECTED) && !Objects.equals(superMethod.packge(), methodSymbol.packge())) { return NO_MATCH; } // Exempt any change in annotations (aside from @Override). ImmutableSet<Symbol> superAnnotations = getAnnotations(superMethod); ImmutableSet<Symbol> methodAnnotations = getAnnotations(methodSymbol); if (!Sets.difference(Sets.symmetricDifference(superAnnotations, methodAnnotations), ImmutableSet.of(state.getSymtab().overrideType.tsym)).isEmpty()) { return NO_MATCH; } for (int i = 0; i < tree.getParameters().size(); ++i) { if (!(methodInvocationTree.getArguments().get(i) instanceof IdentifierTree)) { return NO_MATCH; } if (!getSymbol(tree.getParameters().get(i)) .equals(getSymbol(methodInvocationTree.getArguments().get(i)))) { return NO_MATCH; } } // Exempt if there are comments within the body. (Do this last, as it's expensive.) if (ErrorProneTokens.getTokens(state.getSourceForNode(tree.getBody()), state.context).stream() .anyMatch(t -> !t.comments().isEmpty())) { return NO_MATCH; } return describeMatch(tree, SuggestedFix.delete(tree)); }
From source file:dagger.internal.codegen.SubcomponentFactoryMethodValidator.java
private SetView<TypeElement> ownedModules(ComponentNode component, BindingGraph graph) { return Sets.difference(((ComponentNodeImpl) component).componentDescriptor().moduleTypes(), inheritedModules(component, graph)); }
From source file:org.apache.beam.fn.harness.DoFnPTransformRunnerFactory.java
@Override public final RunnerT createRunnerForPTransform(PipelineOptions pipelineOptions, BeamFnDataClient beamFnDataClient, BeamFnStateClient beamFnStateClient, String ptransformId, PTransform pTransform, Supplier<String> processBundleInstructionId, Map<String, PCollection> pCollections, Map<String, RunnerApi.Coder> coders, Map<String, RunnerApi.WindowingStrategy> windowingStrategies, ListMultimap<String, FnDataReceiver<WindowedValue<?>>> pCollectionIdsToConsumers, Consumer<ThrowingRunnable> addStartFunction, Consumer<ThrowingRunnable> addFinishFunction, BundleSplitListener splitListener) { Context<FnInputT, OutputT> context = new Context<>(pipelineOptions, beamFnStateClient, ptransformId, pTransform, processBundleInstructionId, pCollections, coders, windowingStrategies, pCollectionIdsToConsumers, splitListener); RunnerT runner = createRunner(context); // Register the appropriate handlers. addStartFunction.accept(runner::startBundle); Iterable<String> mainInput = Sets.difference(pTransform.getInputsMap().keySet(), Sets.union(context.parDoPayload.getSideInputsMap().keySet(), context.parDoPayload.getTimerSpecsMap().keySet())); for (String localInputName : mainInput) { pCollectionIdsToConsumers.put(pTransform.getInputsOrThrow(localInputName), (FnDataReceiver) (FnDataReceiver<WindowedValue<TransformInputT>>) runner::processElement); }//from w w w. j a va 2 s . com // Register as a consumer for each timer PCollection. for (String localName : context.parDoPayload.getTimerSpecsMap().keySet()) { TimeDomain timeDomain = DoFnSignatures .getTimerSpecOrThrow(context.doFnSignature.timerDeclarations().get(localName), context.doFn) .getTimeDomain(); pCollectionIdsToConsumers.put(pTransform.getInputsOrThrow(localName), timer -> runner.processTimer(localName, timeDomain, (WindowedValue<KV<Object, Timer>>) timer)); } addFinishFunction.accept(runner::finishBundle); return runner; }
From source file:edu.harvard.med.screensaver.service.screenresult.ScreenResultLoader.java
/** * Load the screen results from a workbook into the database. * @param workbook //from ww w.java 2 s . co m * @param admin * @param finalPlateNumberRange * @param incrementalFlush force the loader to periodically flush cached ResultValues and other Entities being held by the * Hibernate session. Use this value to limit memory requirements for large datasets. * @throws ParseErrorsException * @throws EntityNotFoundException * @throws EntityExistsException */ @Transactional(propagation = Propagation.REQUIRES_NEW /* to ensure that errors cause rollback */, rollbackFor = { ParseErrorsException.class, EntityNotFoundException.class, EntityExistsException.class }) public ScreenResult parseAndLoad(Screen screen, Workbook workbook, AdministratorUser admin, String comments, IntRange finalPlateNumberRange, boolean incrementalFlush) throws ParseErrorsException { screen = _dao.reloadEntity(screen); ScreenResultParser screenResultParser = createScreenResultParser(); screenResultParser.setIgnoreDuplicateErrors(_ignoreDuplicateErrors); ScreenResult screenResult = screenResultParser.parse(screen, workbook, finalPlateNumberRange, incrementalFlush); if (screenResultParser.getHasErrors()) { // we communicate back any parse errors as a ParseErrorsException, as this // serves to rollback the transaction, preventing persistence of invalid // screen result entity throw new ParseErrorsException(screenResultParser.getErrors()); } if (incrementalFlush) { _dao.flush(); _dao.clear(); screen = _dao.reloadEntity(screen); } admin = _dao.reloadEntity(admin); AdministrativeActivity screenResultDataLoading = screen.getScreenResult().createScreenResultDataLoading( admin, screenResultParser.getPlateNumbersLoadedWithMaxReplicates(), comments); int assayPlatesCreated = Sets.difference(screen.getAssayPlatesDataLoaded(), screen.getAssayPlatesScreened()) .size(); if (assayPlatesCreated > 0) { log.info("created " + assayPlatesCreated + " assay plate(s) that were not previously recorded as having been screened: " + Sets.difference(screen.getAssayPlatesDataLoaded(), screen.getAssayPlatesScreened())); } _dao.persistEntity(screenResultDataLoading); _screenDerivedPropertiesUpdater.updateScreeningStatistics(screen); log.info("Screen result data loading completed successfully!"); return screenResult; }
From source file:org.obiba.mica.micaConfig.service.MicaConfigService.java
@CacheEvict(value = "micaConfig", allEntries = true) public void save(@NotNull @Valid MicaConfig micaConfig) { MicaConfig savedConfig = getOrCreateMicaConfig(); ArrayList<String> removedRoles = Lists.newArrayList( Sets.difference(Sets.newHashSet(savedConfig.getRoles()), Sets.newHashSet(micaConfig.getRoles()))); BeanUtils.copyProperties(micaConfig, savedConfig, "id", "version", "createdBy", "createdDate", "lastModifiedBy", "lastModifiedDate", "secretKey", "micaVersion"); if (micaConfig.getMicaVersion() != null) savedConfig.setMicaVersion(micaConfig.getMicaVersion()); micaConfigRepository.save(savedConfig); eventBus.post(new MicaConfigUpdatedEvent(getConfig(), removedRoles)); }
From source file:org.atlasapi.query.content.UriFetchingQueryExecutor.java
private static Set<String> missingUris(Iterable<String> content, Iterable<String> uris) { return Sets.difference(ImmutableSet.copyOf(uris), ImmutableSet.copyOf(content)); }