List of usage examples for java.util Collection forEach
default void forEach(Consumer<? super T> action)
From source file:org.apache.hadoop.hbase.client.SimpleRequestController.java
@Override public void incTaskCounters(Collection<byte[]> regions, ServerName sn) { tasksInProgress.incrementAndGet();/* w w w.ja v a2 s . co m*/ computeIfAbsent(taskCounterPerServer, sn, AtomicInteger::new).incrementAndGet(); regions.forEach((regBytes) -> computeIfAbsent(taskCounterPerRegion, regBytes, AtomicInteger::new) .incrementAndGet()); }
From source file:org.onosproject.store.ecmap.EventuallyConsistentMapImpl.java
private void queueUpdate(UpdateEntry<K, V> event, Collection<NodeId> peers) { if (peers == null) { // we have no friends :( return;/* w ww . j a v a 2 s.c o m*/ } peers.forEach( node -> senderPending.computeIfAbsent(node, unusedKey -> new EventAccumulator(node)).add(event)); }
From source file:io.mandrel.data.export.DelimiterSeparatedValuesExporter.java
@Override public void export(Collection<Document> documents, List<FieldExtractor> fields) { List<String> headers = fields.stream().map(field -> field.getName()).collect(Collectors.toList()); if (addHeader && !headerAdded) { try {/*from w w w . ja v a2 s. c om*/ csvWriter.writeHeader(headers.toArray(new String[] {})); } catch (Exception e) { log.debug("Can not write header {}", csvWriter.getLineNumber(), e); } headerAdded = true; } List<String> buffer = new ArrayList<>(fields.size()); documents.forEach(doc -> { for (String header : headers) { List<? extends Object> values = doc.get(header); if (!CollectionUtils.isEmpty(values)) { if (keepOnlyFirstValue) { buffer.add(values.get(0).toString()); } else { buffer.add(StringUtils.join(values, delimiterMultiValuesChar)); } } else { buffer.add(StringUtils.EMPTY); } } try { csvWriter.write(buffer); } catch (Exception e) { log.debug("Can not write line {}", csvWriter.getLineNumber(), e); } buffer.clear(); }); }
From source file:com.haulmont.restapi.service.EntitiesControllerManager.java
public CreatedEntityInfo updateEntity(String entityJson, String entityName, String entityId, String modelVersion) {//w ww . ja va 2 s . c o m String transformedEntityName = restControllerUtils.transformEntityNameIfRequired(entityName, modelVersion, JsonTransformationDirection.FROM_VERSION); MetaClass metaClass = restControllerUtils.getMetaClass(transformedEntityName); checkCanUpdateEntity(metaClass); Object id = getIdFromString(entityId, metaClass); LoadContext loadContext = new LoadContext(metaClass).setId(id); @SuppressWarnings("unchecked") Entity existingEntity = dataManager.load(loadContext); checkEntityIsNotNull(transformedEntityName, entityId, existingEntity); entityJson = restControllerUtils.transformJsonIfRequired(entityName, modelVersion, JsonTransformationDirection.FROM_VERSION, entityJson); Entity entity; try { entity = entitySerializationAPI.entityFromJson(entityJson, metaClass); } catch (Exception e) { throw new RestAPIException("Cannot deserialize an entity from JSON", "", HttpStatus.BAD_REQUEST, e); } if (entity instanceof BaseGenericIdEntity) { //noinspection unchecked ((BaseGenericIdEntity) entity).setId(id); } EntityImportView entityImportView = entityImportViewBuilderAPI.buildFromJson(entityJson, metaClass); Collection<Entity> importedEntities; try { importedEntities = entityImportExportService.importEntities(Collections.singletonList(entity), entityImportView, true); importedEntities.forEach(it -> restControllerUtils.applyAttributesSecurity(it)); } catch (EntityImportException e) { throw new RestAPIException("Entity update failed", e.getMessage(), HttpStatus.BAD_REQUEST, e); } //there may be multiple entities in importedEntities (because of @Composition references), so we must find // the main entity that will be returned return getMainEntityInfo(importedEntities, metaClass, modelVersion); }
From source file:com.evolveum.midpoint.wf.impl.util.MiscDataUtil.java
public void resolveAndStoreObjectReferences(@NotNull Collection<ObjectReferenceType> references, OperationResult result) {//from w w w . j a v a2 s . c o m references.forEach(ref -> resolveObjectReference(ref, true, result)); }
From source file:com.intuit.wasabi.api.AssignmentsResource.java
/** * Convert Assignment object collection to the List of response MAP expected by the end user. * * @param assignments/* ww w . j a va 2 s . c o m*/ * @return */ protected List<Map<String, Object>> toBatchAssignmentResponseMap(Collection<Assignment> assignments) { List<Map<String, Object>> responseList = new ArrayList<>(); assignments.forEach(assignment -> { responseList.add(toBatchAssignmentResponseMap(assignment)); }); return responseList; }
From source file:org.apache.eagle.alert.engine.spark.function.StreamRouteBoltFunction.java
public void onStreamRouteBoltSpecChange(RouterSpec spec, Map<String, StreamDefinition> sds, StreamRouterImpl router, StreamRouterBoltOutputCollector routeCollector, final Map<StreamPartition, StreamSortSpec> cachedSSS, final Map<StreamPartition, List<StreamRouterSpec>> cachedSRS, int partitionNum) { //sanityCheck(spec); // figure out added, removed, modified StreamSortSpec Map<StreamPartition, StreamSortSpec> newSSS = spec.makeSSS(); Set<StreamPartition> newStreamIds = newSSS.keySet(); Set<StreamPartition> cachedStreamIds = cachedSSS.keySet(); Collection<StreamPartition> addedStreamIds = CollectionUtils.subtract(newStreamIds, cachedStreamIds); Collection<StreamPartition> removedStreamIds = CollectionUtils.subtract(cachedStreamIds, newStreamIds); Collection<StreamPartition> modifiedStreamIds = CollectionUtils.intersection(newStreamIds, cachedStreamIds); Map<StreamPartition, StreamSortSpec> added = new HashMap<>(); Map<StreamPartition, StreamSortSpec> removed = new HashMap<>(); Map<StreamPartition, StreamSortSpec> modified = new HashMap<>(); addedStreamIds.forEach(s -> added.put(s, newSSS.get(s))); removedStreamIds.forEach(s -> removed.put(s, cachedSSS.get(s))); modifiedStreamIds.forEach(s -> {//from www . jav a2s . c o m if (!newSSS.get(s).equals(cachedSSS.get(s))) { // this means StreamSortSpec is changed for one specific streamId modified.put(s, newSSS.get(s)); } }); if (LOG.isDebugEnabled()) { LOG.debug("added StreamSortSpec " + added); LOG.debug("removed StreamSortSpec " + removed); LOG.debug("modified StreamSortSpec " + modified); } router.onStreamSortSpecChange(added, removed, modified); // switch cache this.cachedSSS = newSSS; // figure out added, removed, modified StreamRouterSpec Map<StreamPartition, List<StreamRouterSpec>> newSRS = spec.makeSRS(); Set<StreamPartition> newStreamPartitions = newSRS.keySet(); Set<StreamPartition> cachedStreamPartitions = cachedSRS.keySet(); Collection<StreamPartition> addedStreamPartitions = CollectionUtils.subtract(newStreamPartitions, cachedStreamPartitions); Collection<StreamPartition> removedStreamPartitions = CollectionUtils.subtract(cachedStreamPartitions, newStreamPartitions); Collection<StreamPartition> modifiedStreamPartitions = CollectionUtils.intersection(newStreamPartitions, cachedStreamPartitions); Collection<StreamRouterSpec> addedRouterSpecs = new ArrayList<>(); Collection<StreamRouterSpec> removedRouterSpecs = new ArrayList<>(); Collection<StreamRouterSpec> modifiedRouterSpecs = new ArrayList<>(); addedStreamPartitions.forEach(s -> addedRouterSpecs.addAll(newSRS.get(s))); removedStreamPartitions.forEach(s -> removedRouterSpecs.addAll(cachedSRS.get(s))); modifiedStreamPartitions.forEach(s -> { if (!CollectionUtils.isEqualCollection(newSRS.get(s), cachedSRS.get(s))) { // this means StreamRouterSpec is changed for one specific StreamPartition modifiedRouterSpecs.addAll(newSRS.get(s)); } }); if (LOG.isDebugEnabled()) { LOG.debug("added StreamRouterSpec " + addedRouterSpecs); LOG.debug("removed StreamRouterSpec " + removedRouterSpecs); LOG.debug("modified StreamRouterSpec " + modifiedRouterSpecs); } routeCollector.onStreamRouterSpecChange(addedRouterSpecs, removedRouterSpecs, modifiedRouterSpecs, sds); // switch cache this.cachedSRS = newSRS; routeState.store(routeCollector, cachedSSS, cachedSRS, partitionNum); }
From source file:org.apache.eagle.alert.engine.runner.StreamRouterBolt.java
/** * Compare with metadata snapshot cache to generate diff like added, removed and modified between different versions. * * @param spec//w w w.j ava 2 s. c o m */ @SuppressWarnings("unchecked") @Override public synchronized void onStreamRouteBoltSpecChange(RouterSpec spec, Map<String, StreamDefinition> sds) { sanityCheck(spec); // figure out added, removed, modified StreamSortSpec Map<StreamPartition, StreamSortSpec> newSSS = spec.makeSSS(); Set<StreamPartition> newStreamIds = newSSS.keySet(); Set<StreamPartition> cachedStreamIds = cachedSSS.keySet(); Collection<StreamPartition> addedStreamIds = CollectionUtils.subtract(newStreamIds, cachedStreamIds); Collection<StreamPartition> removedStreamIds = CollectionUtils.subtract(cachedStreamIds, newStreamIds); Collection<StreamPartition> modifiedStreamIds = CollectionUtils.intersection(newStreamIds, cachedStreamIds); Map<StreamPartition, StreamSortSpec> added = new HashMap<>(); Map<StreamPartition, StreamSortSpec> removed = new HashMap<>(); Map<StreamPartition, StreamSortSpec> modified = new HashMap<>(); addedStreamIds.forEach(s -> added.put(s, newSSS.get(s))); removedStreamIds.forEach(s -> removed.put(s, cachedSSS.get(s))); modifiedStreamIds.forEach(s -> { if (!newSSS.get(s).equals(cachedSSS.get(s))) { // this means StreamSortSpec is changed for one specific streamId modified.put(s, newSSS.get(s)); } }); if (LOG.isDebugEnabled()) { LOG.debug("added StreamSortSpec " + added); LOG.debug("removed StreamSortSpec " + removed); LOG.debug("modified StreamSortSpec " + modified); } router.onStreamSortSpecChange(added, removed, modified); // switch cache cachedSSS = newSSS; // figure out added, removed, modified StreamRouterSpec Map<StreamPartition, List<StreamRouterSpec>> newSRS = spec.makeSRS(); Set<StreamPartition> newStreamPartitions = newSRS.keySet(); Set<StreamPartition> cachedStreamPartitions = cachedSRS.keySet(); Collection<StreamPartition> addedStreamPartitions = CollectionUtils.subtract(newStreamPartitions, cachedStreamPartitions); Collection<StreamPartition> removedStreamPartitions = CollectionUtils.subtract(cachedStreamPartitions, newStreamPartitions); Collection<StreamPartition> modifiedStreamPartitions = CollectionUtils.intersection(newStreamPartitions, cachedStreamPartitions); Collection<StreamRouterSpec> addedRouterSpecs = new ArrayList<>(); Collection<StreamRouterSpec> removedRouterSpecs = new ArrayList<>(); Collection<StreamRouterSpec> modifiedRouterSpecs = new ArrayList<>(); addedStreamPartitions.forEach(s -> addedRouterSpecs.addAll(newSRS.get(s))); removedStreamPartitions.forEach(s -> removedRouterSpecs.addAll(cachedSRS.get(s))); modifiedStreamPartitions.forEach(s -> { if (!CollectionUtils.isEqualCollection(newSRS.get(s), cachedSRS.get(s))) { // this means StreamRouterSpec is changed for one specific StreamPartition modifiedRouterSpecs.addAll(newSRS.get(s)); } }); if (LOG.isDebugEnabled()) { LOG.debug("added StreamRouterSpec " + addedRouterSpecs); LOG.debug("removed StreamRouterSpec " + removedRouterSpecs); LOG.debug("modified StreamRouterSpec " + modifiedRouterSpecs); } routeCollector.onStreamRouterSpecChange(addedRouterSpecs, removedRouterSpecs, modifiedRouterSpecs, sds); // switch cache cachedSRS = newSRS; sdf = sds; specVersion = spec.getVersion(); }
From source file:org.apache.syncope.core.provisioning.java.data.AbstractAnyDataBinder.java
@SuppressWarnings({ "unchecked", "rawtypes" }) protected void processAttrPatch(final Any any, final AttrPatch patch, final PlainSchema schema, final PlainAttr<?> attr, final AnyUtils anyUtils, final Collection<ExternalResource> resources, final PropagationByResource propByRes, final SyncopeClientException invalidValues) { switch (patch.getOperation()) { case ADD_REPLACE: // 1.1 remove values if (attr.getSchema().isUniqueConstraint()) { if (attr.getUniqueValue() != null && !patch.getAttrTO().getValues().isEmpty() && !patch.getAttrTO().getValues().get(0).equals(attr.getUniqueValue().getValueAsString())) { plainAttrValueDAO.delete(attr.getUniqueValue().getKey(), anyUtils.plainAttrUniqueValueClass()); }/*w ww. j ava 2s . c o m*/ } else { Collection<String> valuesToBeRemoved = attr.getValues().stream().map(value -> value.getKey()) .collect(Collectors.toSet()); valuesToBeRemoved.forEach(attrValueKey -> { plainAttrValueDAO.delete(attrValueKey, anyUtils.plainAttrValueClass()); }); } // 1.2 add values List<String> valuesToBeAdded = patch.getAttrTO().getValues(); if (!valuesToBeAdded.isEmpty() && (!schema.isUniqueConstraint() || attr.getUniqueValue() == null || !valuesToBeAdded.iterator().next().equals(attr.getUniqueValue().getValueAsString()))) { fillAttr(valuesToBeAdded, anyUtils, schema, attr, invalidValues); } // if no values are in, the attribute can be safely removed if (attr.getValuesAsStrings().isEmpty()) { plainAttrDAO.delete(attr); } break; case DELETE: default: any.remove(attr); plainAttrDAO.delete(attr.getKey(), anyUtils.plainAttrClass()); } resources.stream().filter(resource -> (resource.getProvision(any.getType()).isPresent()) && resource.getProvision(any.getType()).get().getMapping() != null).forEach(resource -> { MappingUtils .getPropagationItems(resource.getProvision(any.getType()).get().getMapping().getItems()) .stream().filter(item -> (schema.getKey().equals(item.getIntAttrName()))) .forEach(item -> { propByRes.add(ResourceOperation.UPDATE, resource.getKey()); if (item.isConnObjectKey() && !attr.getValuesAsStrings().isEmpty()) { propByRes.addOldConnObjectKey(resource.getKey(), attr.getValuesAsStrings().get(0)); } }); }); }
From source file:com.evolveum.midpoint.model.impl.ModelObjectResolver.java
@Override public void resolveAllReferences(Collection<PrismContainerValue> pcvs, Object taskObject, OperationResult result) {//w w w . j av a 2 s .c om Session session = openResolutionSession(null); Task task = (Task) taskObject; Visitor visitor = (o) -> { if (o instanceof PrismReferenceValue) { resolveReference((PrismReferenceValue) o, "resolving object reference", session, task, result); } }; pcvs.forEach(pcv -> pcv.accept(visitor)); }