List of usage examples for java.util.function Predicate test
boolean test(T t);
From source file:io.pravega.service.server.containers.StreamSegmentMapperTests.java
/** * Tests the ability of the StreamSegmentMapper to generate/return the Id of an existing StreamSegment, as well as * retrieving existing attributes./* w w w .ja v a 2 s. com*/ */ @Test public void testGetOrAssignStreamSegmentId() { final int segmentCount = 10; final int transactionsPerSegment = 5; @Cleanup TestContext context = new TestContext(); HashSet<String> storageSegments = new HashSet<>(); for (int i = 0; i < segmentCount; i++) { String segmentName = getName(i); storageSegments.add(segmentName); setAttributes(segmentName, storageSegments.size() % ATTRIBUTE_COUNT, context); for (int j = 0; j < transactionsPerSegment; j++) { // There is a small chance of a name conflict here, but we don't care. As long as we get at least one // Transaction per segment, we should be fine. String transactionName = StreamSegmentNameUtils.getTransactionNameFromId(segmentName, UUID.randomUUID()); storageSegments.add(transactionName); setAttributes(transactionName, storageSegments.size() % ATTRIBUTE_COUNT, context); } } // We setup all necessary handlers, except the one for create. We do not need to create new Segments here. setupOperationLog(context); Predicate<String> isSealed = segmentName -> segmentName.hashCode() % 2 == 0; Function<String, Long> getInitialLength = segmentName -> (long) Math.abs(segmentName.hashCode()); setupStorageGetHandler(context, storageSegments, segmentName -> new StreamSegmentInformation(segmentName, getInitialLength.apply(segmentName), isSealed.test(segmentName), false, new ImmutableDate())); // First, map all the parents (stand-alone segments). for (String name : storageSegments) { if (StreamSegmentNameUtils.getParentStreamSegmentName(name) == null) { long id = context.mapper.getOrAssignStreamSegmentId(name, TIMEOUT).join(); Assert.assertNotEquals("No id was assigned for StreamSegment " + name, ContainerMetadata.NO_STREAM_SEGMENT_ID, id); SegmentMetadata sm = context.metadata.getStreamSegmentMetadata(id); Assert.assertNotNull("No metadata was created for StreamSegment " + name, sm); long expectedLength = getInitialLength.apply(name); boolean expectedSeal = isSealed.test(name); Assert.assertEquals("Metadata does not have the expected length for StreamSegment " + name, expectedLength, sm.getDurableLogLength()); Assert.assertEquals( "Metadata does not have the expected value for isSealed for StreamSegment " + name, expectedSeal, sm.isSealed()); val segmentState = context.stateStore.get(name, TIMEOUT).join(); Map<UUID, Long> expectedAttributes = segmentState == null ? null : segmentState.getAttributes(); SegmentMetadataComparer.assertSameAttributes( "Unexpected attributes in metadata for StreamSegment " + name, expectedAttributes, sm); } } // Now, map all the Transactions. for (String name : storageSegments) { String parentName = StreamSegmentNameUtils.getParentStreamSegmentName(name); if (parentName != null) { long id = context.mapper.getOrAssignStreamSegmentId(name, TIMEOUT).join(); Assert.assertNotEquals("No id was assigned for Transaction " + name, ContainerMetadata.NO_STREAM_SEGMENT_ID, id); SegmentMetadata sm = context.metadata.getStreamSegmentMetadata(id); Assert.assertNotNull("No metadata was created for Transaction " + name, sm); long expectedLength = getInitialLength.apply(name); boolean expectedSeal = isSealed.test(name); Assert.assertEquals("Metadata does not have the expected length for Transaction " + name, expectedLength, sm.getDurableLogLength()); Assert.assertEquals( "Metadata does not have the expected value for isSealed for Transaction " + name, expectedSeal, sm.isSealed()); val segmentState = context.stateStore.get(name, TIMEOUT).join(); Map<UUID, Long> expectedAttributes = segmentState == null ? null : segmentState.getAttributes(); SegmentMetadataComparer.assertSameAttributes( "Unexpected attributes in metadata for Transaction " + name, expectedAttributes, sm); // Check parenthood. Assert.assertNotEquals("No parent defined in metadata for Transaction " + name, ContainerMetadata.NO_STREAM_SEGMENT_ID, sm.getParentId()); long parentId = context.metadata.getStreamSegmentId(parentName, false); Assert.assertEquals("Unexpected parent defined in metadata for Transaction " + name, parentId, sm.getParentId()); } } }
From source file:io.pravega.segmentstore.server.containers.StreamSegmentMapperTests.java
/** * Tests the ability of the StreamSegmentMapper to generate/return the Id of an existing StreamSegment, as well as * retrieving existing attributes./*from ww w .j av a 2 s . c o m*/ */ @Test public void testGetOrAssignStreamSegmentId() { final int segmentCount = 10; final int transactionsPerSegment = 5; final long noSegmentId = ContainerMetadata.NO_STREAM_SEGMENT_ID; AtomicLong currentSegmentId = new AtomicLong(Integer.MAX_VALUE); Supplier<Long> nextSegmentId = () -> currentSegmentId.decrementAndGet() % 2 == 0 ? noSegmentId : currentSegmentId.get(); @Cleanup TestContext context = new TestContext(); HashSet<String> storageSegments = new HashSet<>(); for (int i = 0; i < segmentCount; i++) { String segmentName = getName(i); storageSegments.add(segmentName); setAttributes(segmentName, nextSegmentId.get(), storageSegments.size() % ATTRIBUTE_COUNT, context); for (int j = 0; j < transactionsPerSegment; j++) { // There is a small chance of a name conflict here, but we don't care. As long as we get at least one // Transaction per segment, we should be fine. String transactionName = StreamSegmentNameUtils.getTransactionNameFromId(segmentName, UUID.randomUUID()); storageSegments.add(transactionName); setAttributes(transactionName, nextSegmentId.get(), storageSegments.size() % ATTRIBUTE_COUNT, context); } } // We setup all necessary handlers, except the one for create. We do not need to create new Segments here. setupOperationLog(context); Predicate<String> isSealed = segmentName -> segmentName.hashCode() % 2 == 0; Function<String, Long> getInitialLength = segmentName -> (long) Math.abs(segmentName.hashCode()); setupStorageGetHandler(context, storageSegments, segmentName -> new StreamSegmentInformation(segmentName, getInitialLength.apply(segmentName), isSealed.test(segmentName), false, new ImmutableDate())); // First, map all the parents (stand-alone segments). for (String name : storageSegments) { if (StreamSegmentNameUtils.getParentStreamSegmentName(name) == null) { long id = context.mapper.getOrAssignStreamSegmentId(name, TIMEOUT).join(); Assert.assertNotEquals("No id was assigned for StreamSegment " + name, ContainerMetadata.NO_STREAM_SEGMENT_ID, id); SegmentMetadata sm = context.metadata.getStreamSegmentMetadata(id); Assert.assertNotNull("No metadata was created for StreamSegment " + name, sm); long expectedLength = getInitialLength.apply(name); boolean expectedSeal = isSealed.test(name); Assert.assertEquals("Metadata does not have the expected length for StreamSegment " + name, expectedLength, sm.getDurableLogLength()); Assert.assertEquals( "Metadata does not have the expected value for isSealed for StreamSegment " + name, expectedSeal, sm.isSealed()); val segmentState = context.stateStore.get(name, TIMEOUT).join(); Map<UUID, Long> expectedAttributes = segmentState == null ? null : segmentState.getAttributes(); SegmentMetadataComparer.assertSameAttributes( "Unexpected attributes in metadata for StreamSegment " + name, expectedAttributes, sm); } } // Now, map all the Transactions. for (String name : storageSegments) { String parentName = StreamSegmentNameUtils.getParentStreamSegmentName(name); if (parentName != null) { long id = context.mapper.getOrAssignStreamSegmentId(name, TIMEOUT).join(); Assert.assertNotEquals("No id was assigned for Transaction " + name, ContainerMetadata.NO_STREAM_SEGMENT_ID, id); SegmentMetadata sm = context.metadata.getStreamSegmentMetadata(id); Assert.assertNotNull("No metadata was created for Transaction " + name, sm); long expectedLength = getInitialLength.apply(name); boolean expectedSeal = isSealed.test(name); Assert.assertEquals("Metadata does not have the expected length for Transaction " + name, expectedLength, sm.getDurableLogLength()); Assert.assertEquals( "Metadata does not have the expected value for isSealed for Transaction " + name, expectedSeal, sm.isSealed()); val segmentState = context.stateStore.get(name, TIMEOUT).join(); Map<UUID, Long> expectedAttributes = segmentState == null ? null : segmentState.getAttributes(); SegmentMetadataComparer.assertSameAttributes( "Unexpected attributes in metadata for Transaction " + name, expectedAttributes, sm); // Check parenthood. Assert.assertNotEquals("No parent defined in metadata for Transaction " + name, ContainerMetadata.NO_STREAM_SEGMENT_ID, sm.getParentId()); long parentId = context.metadata.getStreamSegmentId(parentName, false); Assert.assertEquals("Unexpected parent defined in metadata for Transaction " + name, parentId, sm.getParentId()); } } }
From source file:org.briljantframework.data.vector.AbstractVector.java
@Override public <T> boolean any(Class<T> cls, Predicate<? super T> predicate) { for (int i = 0; i < size(); i++) { if (predicate.test(loc().get(cls, i))) { return true; }/* w w w . ja v a2 s . c om*/ } return false; }
From source file:org.opensingular.flow.core.ProcessInstance.java
/** * Retorna a mais nova tarefa que atende a condio informada. * @param condicao a condio informada.// w ww . j a v a 2s .com */ @Nonnull public Optional<TaskInstance> getTaskNewer(@Nonnull Predicate<TaskInstance> condicao) { Objects.requireNonNull(condicao); List<? extends IEntityTaskInstance> lista = getEntity().getTasks(); for (int i = lista.size() - 1; i != -1; i--) { TaskInstance task = getTaskInstance(lista.get(i)); if (condicao.test(task)) { return Optional.of(task); } } return Optional.empty(); }
From source file:nl.systemsgenetics.genenetworkbackend.hpo.DiseaseGeneHpoData.java
public DiseaseGeneHpoData(final File diseaseGeneHpoFile, HashMap<String, ArrayList<String>> ncbiToEnsgMap, HashMap<String, ArrayList<String>> hgncToEnsgMap, HashSet<String> exludedHpo, HashSet<String> includeGenes, String diseasePrefix) throws FileNotFoundException, IOException { geneToHpos = new HashMap<>(); diseaseToGenes = new HashMap<>(); diseaseGeneToHpos = new HashMap<>(); Predicate<String> diseasePattern; if (diseasePrefix != null) { diseasePattern = Pattern.compile("^" + diseasePrefix).asPredicate(); } else {//from ww w. ja v a2 s. c om diseasePattern = null; } final CSVParser hpoParser = new CSVParserBuilder().withSeparator('\t').withIgnoreQuotations(true).build(); final CSVReader hpoReader = new CSVReaderBuilder(new BufferedReader(new FileReader(diseaseGeneHpoFile))) .withSkipLines(1).withCSVParser(hpoParser).build(); String[] nextLine; while ((nextLine = hpoReader.readNext()) != null) { String disease = nextLine[0]; String hgcnId = nextLine[1]; String ncbiId = nextLine[2]; String hpo = nextLine[3]; if (diseasePattern != null && !diseasePattern.test(disease)) { continue; } if (exludedHpo != null && exludedHpo.contains(hpo)) { continue; } ArrayList<String> ensgIds = ncbiToEnsgMap.get(ncbiId); if (ensgIds == null) { ensgIds = hgncToEnsgMap.get(hgcnId); } if (ensgIds == null) { System.err.println("Missing mapping for gene: " + ncbiId + " " + hgcnId); } else if (ensgIds.size() > 1) { System.err.println("Skipping becasue multiple ENSG IDs for gene: " + ncbiId + " " + hgcnId); } else if (!includeGenes.contains(ensgIds.get(0))) { System.err.println("Skipping becasue gene not in include list: " + ncbiId + " " + hgcnId); } else { String ensgId = ensgIds.get(0); HashSet<String> geneHpos = geneToHpos.get(ensgId); if (geneHpos == null) { geneHpos = new HashSet<>(); geneToHpos.put(ensgId, geneHpos); } geneHpos.add(hpo); HashSet<String> diseaseGenes = diseaseToGenes.get(disease); if (diseaseGenes == null) { diseaseGenes = new HashSet<>(); diseaseToGenes.put(disease, diseaseGenes); } diseaseGenes.add(ensgId); DiseaseGene diseaseGene = new DiseaseGene(disease, ensgId); HashSet<String> diseaseGeneHpos = diseaseGeneToHpos.get(diseaseGene); if (diseaseGeneHpos == null) { diseaseGeneHpos = new HashSet<>(); diseaseGeneToHpos.put(diseaseGene, diseaseGeneHpos); } diseaseGeneHpos.add(hpo); } } }
From source file:org.briljantframework.data.vector.AbstractVector.java
@Override public <T> boolean all(Class<T> cls, Predicate<? super T> predicate) { VectorLocationGetter getter = loc(); for (int i = 0, size = size(); i < size; i++) { if (!predicate.test(getter.get(cls, i))) { return false; }/* w w w . j av a 2s . com*/ } return true; }
From source file:ddf.common.test.ServiceManager.java
public void waitForFeature(String featureName, Predicate<FeatureState> predicate) throws Exception { boolean ready = false; long timeoutLimit = System.currentTimeMillis() + REQUIRED_BUNDLES_TIMEOUT; FeaturesService featuresService = getFeaturesService(); while (!ready) { if (featuresService != null) { Feature feature = featuresService.getFeature(featureName); FeatureState state = featuresService.getState(feature.getName() + "/" + feature.getVersion()); if (state == null) { LOGGER.warn("No Feature found for featureName: {}", featureName); return; } else if (predicate.test(state)) { ready = true;//from w w w. j av a2s . c o m } } if (!ready) { if (System.currentTimeMillis() > timeoutLimit) { printInactiveBundles(); fail(String.format( "Feature did not change to State [" + predicate.toString() + "] within %d minutes.", TimeUnit.MILLISECONDS.toMinutes(REQUIRED_BUNDLES_TIMEOUT))); } LOGGER.info("Feature [{}] not [{}], sleeping...", featureName, predicate.toString()); Thread.sleep(1000); } } }
From source file:org.briljantframework.data.vector.AbstractVector.java
@Override public <T> BooleanArray where(Class<T> cls, Predicate<? super T> predicate) { BooleanArray array = Arrays.booleanArray(size()); for (int i = 0; i < size(); i++) { array.set(i, predicate.test(loc().get(cls, i))); }//from w w w .ja va2 s. com return array; }
From source file:org.finra.herd.service.impl.TagServiceImpl.java
/** * A helper method that will validate a list of tags * * @param tagEntityList the list of tags that will be validated * * @return true all of the tags are valid in the index */// w w w . j a v a2s . c o m private boolean indexValidateTagsList(final List<TagEntity> tagEntityList) { final String indexName = SearchIndexTypeEntity.SearchIndexTypes.TAG.name().toLowerCase(); final String documentType = configurationHelper .getProperty(ConfigurationValue.ELASTICSEARCH_BDEF_DOCUMENT_TYPE, String.class); Predicate<TagEntity> validInIndexPredicate = tagEntity -> { // Fetch Join with .size() tagEntity.getChildrenTagEntities().size(); // Convert the tag entity to a JSON string final String jsonString = tagHelper.safeObjectMapperWriteValueAsString(tagEntity); return this.indexFunctionsDao.isValidDocumentIndex(indexName, documentType, tagEntity.getId().toString(), jsonString); }; boolean isValid = true; for (TagEntity tagEntity : tagEntityList) { if (!validInIndexPredicate.test(tagEntity)) { isValid = false; } } return isValid; }
From source file:com.ikanow.aleph2.management_db.services.DataBucketStatusCrudService.java
/** Tries to distribute a request to listening data import managers to notify their harvesters that the bucket state has been updated * @param update_reply - the future reply to the find-and-update * @param suspended_predicate - takes the status bean (must exist at this point) and checks whether the bucket should be suspended * @param underlying_data_bucket_db - the data bucket bean db store * @param actor_context - actor context for distributing out requests * @param retry_store - the retry store for handling data import manager connectivity problems * @return a collection of success/error messages from either this function or the *//*from ww w . ja v a 2 s. com*/ private static <T> CompletableFuture<Collection<BasicMessageBean>> getOperationFuture( final CompletableFuture<Optional<DataBucketStatusBean>> update_reply, final Predicate<DataBucketStatusBean> suspended_predicate, final ICrudService<DataBucketBean> underlying_data_bucket_db, final ICrudService<DataBucketStatusBean> underlying_data_bucket_status_db, final ManagementDbActorContext actor_context, final ICrudService<BucketActionRetryMessage> retry_store) { return update_reply.thenCompose(sb -> { return sb.isPresent() ? underlying_data_bucket_db.getObjectById(sb.get()._id()) : CompletableFuture.completedFuture(Optional.empty()); }).thenCompose(bucket -> { if (!bucket.isPresent()) { return CompletableFuture.completedFuture(Arrays.asList(new BasicMessageBean(new Date(), // date false, // success IManagementDbService.CORE_MANAGEMENT_DB.get(), BucketActionMessage.UpdateBucketActionMessage.class.getSimpleName(), null, // message code ErrorUtils.get(ManagementDbErrorUtils.MISSING_STATUS_BEAN_OR_BUCKET, update_reply.join().map(s -> s._id()).orElse("(unknown)")), null) // details )); } else { // If we're here we've retrieved both the bucket and bucket status, so we're good to go final DataBucketStatusBean status_bean = update_reply.join().get(); // (as above, if we're here then must exist) // Once we have the bucket, issue the update command final BucketActionMessage.UpdateBucketActionMessage update_message = new BucketActionMessage.UpdateBucketActionMessage( bucket.get(), suspended_predicate.test(status_bean), // (ie user picks whether to suspend or unsuspend here) bucket.get(), new HashSet<String>( Optional.ofNullable(status_bean.node_affinity()).orElse(Collections.emptyList()))); // Collect message and handle retries final CompletableFuture<Collection<BasicMessageBean>> management_results = MgmtCrudUtils .applyRetriableManagementOperation(bucket.get(), actor_context, retry_store, update_message, source -> { return new BucketActionMessage.UpdateBucketActionMessage( update_message.bucket(), status_bean.suspended(), update_message.bucket(), new HashSet<String>(Arrays.asList(source))); }); return MgmtCrudUtils.handleUpdatingStatus(bucket.get(), status_bean, suspended_predicate.test(status_bean), management_results, underlying_data_bucket_status_db); } }); }