Example usage for java.util Map forEach

List of usage examples for java.util Map forEach

Introduction

In this page you can find the example usage for java.util Map forEach.

Prototype

default void forEach(BiConsumer<? super K, ? super V> action) 

Source Link

Document

Performs the given action for each entry in this map until all entries have been processed or the action throws an exception.

Usage

From source file:org.dcache.chimera.FsSqlDriver.java

void createTags(FsInode inode, int uid, int gid, int mode, Map<String, byte[]> tags) {
    if (!tags.isEmpty()) {
        Map<String, Long> ids = new HashMap<>();
        tags.forEach((key, value) -> ids.put(key, createTagInode(uid, gid, mode, value)));
        _jdbc.batchUpdate("INSERT INTO t_tags (inumber,itagid,isorign,itagname) VALUES(?,?,1,?)",
                ids.entrySet(), ids.size(), (ps, tag) -> {
                    ps.setLong(1, inode.ino());
                    ps.setLong(2, tag.getValue());
                    ps.setString(3, tag.getKey());
                });//from   w  ww  .ja  va  2s . c  om
    }
}

From source file:de.acosix.alfresco.mtsupport.repo.auth.ldap.EnhancedLDAPUserRegistry.java

protected Pair<String[], Set<QName>> initKeys(final Map<String, String> attributeMapping,
        final String... extraAttibutes) {
    // Compile a complete array of LDAP attribute names, including operational attributes
    final Set<String> attributeSet = new TreeSet<>();
    final Set<QName> qnames = new HashSet<>(attributeMapping.size() * 2);

    attributeSet.addAll(Arrays.asList(extraAttibutes));
    attributeSet.add(this.modifyTimestampAttributeName);

    attributeMapping.forEach((key, value) -> {
        if (value != null) {
            attributeSet.add(value);/*from   w  ww . jav a 2 s  . c o  m*/
        }

        final QName qname = QName.resolveToQName(this.namespaceService, key);
        qnames.add(qname);
    });

    LOGGER.debug(
            "Derived attribute names {} and property qnames {} from configured mappings {} and extra attributes {}",
            attributeSet, qnames, attributeMapping, Arrays.toString(extraAttibutes));

    return new Pair<>(attributeSet.toArray(new String[0]), qnames);
}

From source file:org.apache.samza.test.framework.TestRunner.java

/**
 * Creates an in memory stream with {@link InMemorySystemFactory} and feeds its partition with stream of messages
 * @param partitionData key of the map represents partitionId and value represents messages in the partition
 * @param descriptor describes a stream to initialize with the in memory system
 *///from  w  w w  .  j  a  v a 2  s . co  m
private <StreamMessageType> void initializeInMemoryInputStream(InMemoryInputDescriptor<?> descriptor,
        Map<Integer, Iterable<StreamMessageType>> partitionData) {
    String systemName = descriptor.getSystemName();
    String streamName = (String) descriptor.getPhysicalName().orElse(descriptor.getStreamId());
    if (configs.containsKey(TaskConfig.INPUT_STREAMS())) {
        configs.put(TaskConfig.INPUT_STREAMS(),
                configs.get(TaskConfig.INPUT_STREAMS()).concat("," + systemName + "." + streamName));
    } else {
        configs.put(TaskConfig.INPUT_STREAMS(), systemName + "." + streamName);
    }
    InMemorySystemDescriptor imsd = (InMemorySystemDescriptor) descriptor.getSystemDescriptor();
    imsd.withInMemoryScope(this.inMemoryScope);
    addConfig(descriptor.toConfig());
    addConfig(descriptor.getSystemDescriptor().toConfig());
    addSerdeConfigs(descriptor);
    StreamSpec spec = new StreamSpec(descriptor.getStreamId(), streamName, systemName, partitionData.size());
    SystemFactory factory = new InMemorySystemFactory();
    Config config = new MapConfig(descriptor.toConfig(), descriptor.getSystemDescriptor().toConfig());
    factory.getAdmin(systemName, config).createStream(spec);
    InMemorySystemProducer producer = (InMemorySystemProducer) factory.getProducer(systemName, config, null);
    SystemStream sysStream = new SystemStream(systemName, streamName);
    partitionData.forEach((partitionId, partition) -> {
        partition.forEach(e -> {
            Object key = e instanceof KV ? ((KV) e).getKey() : null;
            Object value = e instanceof KV ? ((KV) e).getValue() : e;
            if (value instanceof IncomingMessageEnvelope) {
                producer.send((IncomingMessageEnvelope) value);
            } else {
                producer.send(systemName,
                        new OutgoingMessageEnvelope(sysStream, Integer.valueOf(partitionId), key, value));
            }
        });
        producer.send(systemName, new OutgoingMessageEnvelope(sysStream, Integer.valueOf(partitionId), null,
                new EndOfStreamMessage(null)));
    });
}

From source file:alfio.manager.TicketReservationManager.java

@Transactional
void cleanupExpiredReservations(Date expirationDate) {
    List<String> expiredReservationIds = ticketReservationRepository.findExpiredReservation(expirationDate);
    if (expiredReservationIds.isEmpty()) {
        return;//from   ww w.  j a  v  a 2s . c om
    }

    specialPriceRepository.resetToFreeAndCleanupForReservation(expiredReservationIds);
    ticketRepository.resetCategoryIdForUnboundedCategories(expiredReservationIds);
    ticketFieldRepository.deleteAllValuesForReservations(expiredReservationIds);
    ticketRepository.freeFromReservation(expiredReservationIds);
    waitingQueueManager.cleanExpiredReservations(expiredReservationIds);

    //
    Map<Integer, List<ReservationIdAndEventId>> reservationIdsByEvent = ticketReservationRepository
            .getReservationIdAndEventId(expiredReservationIds).stream()
            .collect(Collectors.groupingBy(ReservationIdAndEventId::getEventId));
    reservationIdsByEvent.forEach((eventId, reservations) -> {
        Event event = eventRepository.findById(eventId);
        extensionManager.handleReservationsExpiredForEvent(event,
                reservations.stream().map(ReservationIdAndEventId::getId).collect(Collectors.toList()));
    });
    //
    ticketReservationRepository.remove(expiredReservationIds);
}

From source file:alfio.manager.TicketReservationManager.java

/**
 * Finds all the reservations that are "stuck" in payment status.
 * This could happen when there is an internal error after a successful credit card charge.
 *
 * @param expirationDate expiration date
 *//*from  w w w . j  a  v  a2 s. c  om*/
public void markExpiredInPaymentReservationAsStuck(Date expirationDate) {
    List<String> stuckReservations = ticketReservationRepository.findStuckReservations(expirationDate);
    if (!stuckReservations.isEmpty()) {
        ticketReservationRepository.updateReservationsStatus(stuckReservations,
                TicketReservationStatus.STUCK.name());

        Map<Integer, List<ReservationIdAndEventId>> reservationsGroupedByEvent = ticketReservationRepository
                .getReservationIdAndEventId(stuckReservations).stream()
                .collect(Collectors.groupingBy(ReservationIdAndEventId::getEventId));

        reservationsGroupedByEvent.forEach((eventId, reservationIds) -> {
            Event event = eventRepository.findById(eventId);
            Organization organization = organizationRepository.getById(event.getOrganizationId());
            notificationManager.sendSimpleEmail(event, organization.getEmail(), STUCK_TICKETS_SUBJECT,
                    () -> String.format(STUCK_TICKETS_MSG, event.getShortName()));

            extensionManager.handleStuckReservations(event,
                    reservationIds.stream().map(ReservationIdAndEventId::getId).collect(toList()));
        });
    }
}

From source file:org.openecomp.sdc.be.model.operations.impl.RequirementOperation.java

public Map<String, List<RequirementDefinition>> convertRequirementMap(
        Map<String, RequirementDefinition> requirementMap, String ownerId, String ownerName) {

    Map<String, List<RequirementDefinition>> typeToRequirementMap = new HashMap<String, List<RequirementDefinition>>();
    requirementMap.forEach((reqName, requirement) -> {
        // requirement.setOwnerId(ownerId);
        // requirement.setOwnerName(ownerName);
        if (typeToRequirementMap.containsKey(requirement.getCapability())) {
            typeToRequirementMap.get(requirement.getCapability()).add(requirement);
        } else {//from  www  . j  av  a 2s. co  m
            List<RequirementDefinition> list = new ArrayList<RequirementDefinition>();
            list.add(requirement);
            typeToRequirementMap.put(requirement.getCapability(), list);
        }
    });
    return typeToRequirementMap;
}

From source file:com.okta.swagger.codegen.AbstractOktaJavaClientCodegen.java

public void addListModels(Swagger swagger) {

    Map<String, Model> listModels = new LinkedHashMap<>();

    // lists in paths
    for (Path path : swagger.getPaths().values()) {

        List<Property> properties = new ArrayList<>();
        properties.add(getArrayPropertyFromOperation(path.getGet()));
        properties.add(getArrayPropertyFromOperation(path.getPost()));
        properties.add(getArrayPropertyFromOperation(path.getPatch()));
        properties.add(getArrayPropertyFromOperation(path.getPut()));

        listModels.putAll(processListsFromProperties(properties, null, swagger));
    }/*from  w  ww  . ja va2s  .  co m*/

    swagger.getDefinitions().entrySet().stream().filter(entry -> topLevelResources.contains(entry.getKey()))
            .forEach(entry -> {
                Model model = entry.getValue();
                if (model != null && model.getProperties() != null) {
                    listModels
                            .putAll(processListsFromProperties(model.getProperties().values(), model, swagger));
                }
            });

    listModels.forEach(swagger::addDefinition);

}

From source file:de.qaware.chronix.importer.csv.FileImporter.java

/**
 * Reads the given file / folder and calls the bi consumer with the extracted points
 *
 * @param points/*w  w w .  j  a  v a2  s.c o  m*/
 * @param folder
 * @param databases
 * @return
 */
public Pair<Integer, Integer> importPoints(Map<Attributes, Pair<Instant, Instant>> points, File folder,
        BiConsumer<List<ImportPoint>, Attributes>... databases) {

    final AtomicInteger pointCounter = new AtomicInteger(0);
    final AtomicInteger tsCounter = new AtomicInteger(0);
    final File metricsFile = new File(METRICS_FILE_PATH);

    LOGGER.info("Writing imported metrics to {}", metricsFile);
    LOGGER.info("Import supports csv files as well as gz compressed csv files.");

    try {
        final FileWriter metricsFileWriter = new FileWriter(metricsFile);

        Collection<File> files = new ArrayList<>();
        if (folder.isFile()) {
            files.add(folder);
        } else {
            files.addAll(FileUtils.listFiles(folder, new String[] { "gz", "csv" }, true));
        }

        AtomicInteger counter = new AtomicInteger(0);

        files.parallelStream().forEach(file -> {
            SimpleDateFormat sdf = new SimpleDateFormat(dateFormat);
            NumberFormat nf = DecimalFormat.getInstance(numberLocal);

            InputStream inputStream = null;
            BufferedReader reader = null;
            try {
                inputStream = new FileInputStream(file);

                if (file.getName().endsWith("gz")) {
                    inputStream = new GZIPInputStream(inputStream);
                }
                reader = new BufferedReader(new InputStreamReader(inputStream));

                //Read the first line
                String headerLine = reader.readLine();

                if (headerLine == null || headerLine.isEmpty()) {
                    boolean deleted = deleteFile(file, inputStream, reader);
                    LOGGER.debug("File is empty {}. File {} removed {}", file.getName(), deleted);
                    return;
                }

                //Extract the attributes from the file name
                //E.g. first_second_third_attribute.csv
                String[] fileNameMetaData = file.getName().split("_");

                String[] metrics = headerLine.split(csvDelimiter);

                Map<Integer, Attributes> attributesPerTimeSeries = new HashMap<>(metrics.length);

                for (int i = 1; i < metrics.length; i++) {
                    String metric = metrics[i];
                    String metricOnlyAscii = Normalizer.normalize(metric, Normalizer.Form.NFD);
                    metricOnlyAscii = metric.replaceAll("[^\\x00-\\x7F]", "");
                    Attributes attributes = new Attributes(metricOnlyAscii, fileNameMetaData);

                    //Check if meta data is completely set
                    if (isEmpty(attributes)) {
                        boolean deleted = deleteFile(file, inputStream, reader);
                        LOGGER.info("Attributes contains empty values {}. File {} deleted {}", attributes,
                                file.getName(), deleted);
                        continue;
                    }

                    if (attributes.getMetric().equals(".*")) {
                        boolean deleted = deleteFile(file, inputStream, reader);
                        LOGGER.info("Attributes metric{}. File {} deleted {}", attributes.getMetric(),
                                file.getName(), deleted);
                        continue;
                    }
                    attributesPerTimeSeries.put(i, attributes);
                    tsCounter.incrementAndGet();

                }

                Map<Integer, List<ImportPoint>> dataPoints = new HashMap<>();

                String line;
                while ((line = reader.readLine()) != null) {
                    String[] splits = line.split(csvDelimiter);
                    String date = splits[0];

                    Instant dateObject;
                    if (instantDate) {
                        dateObject = Instant.parse(date);
                    } else if (sdfDate) {
                        dateObject = sdf.parse(date).toInstant();
                    } else {
                        dateObject = Instant.ofEpochMilli(Long.valueOf(date));
                    }

                    for (int column = 1; column < splits.length; column++) {

                        String value = splits[column];
                        double numericValue = nf.parse(value).doubleValue();

                        ImportPoint point = new ImportPoint(dateObject, numericValue);

                        if (!dataPoints.containsKey(column)) {
                            dataPoints.put(column, new ArrayList<>());
                        }
                        dataPoints.get(column).add(point);
                        pointCounter.incrementAndGet();
                    }

                }

                dataPoints.values().forEach(Collections::sort);

                IOUtils.closeQuietly(reader);
                IOUtils.closeQuietly(inputStream);

                dataPoints.forEach((key, importPoints) -> {
                    for (BiConsumer<List<ImportPoint>, Attributes> database : databases) {
                        database.accept(importPoints, attributesPerTimeSeries.get(key));
                    }
                    points.put(attributesPerTimeSeries.get(key), Pair.of(importPoints.get(0).getDate(),
                            importPoints.get(importPoints.size() - 1).getDate()));
                    //write the stats to the file
                    Instant start = importPoints.get(0).getDate();
                    Instant end = importPoints.get(importPoints.size() - 1).getDate();

                    try {
                        writeStatsLine(metricsFileWriter, attributesPerTimeSeries.get(key), start, end);
                    } catch (IOException e) {
                        LOGGER.error("Could not write stats line", e);
                    }
                    LOGGER.info("{} of {} time series imported", counter.incrementAndGet(), tsCounter.get());
                });

            } catch (Exception e) {
                LOGGER.info("Exception while reading points.", e);
            } finally {
                //close all streams
                IOUtils.closeQuietly(reader);
                IOUtils.closeQuietly(inputStream);
            }

        });
    } catch (Exception e) {
        LOGGER.error("Exception occurred during reading points.");
    }
    return Pair.of(tsCounter.get(), pointCounter.get());
}

From source file:energy.usef.core.workflow.step.WorkflowStepLoader.java

private void loadPbcConfig() {
    try {/*w  w  w  .ja  va  2s.c om*/
        Map<String, Class<WorkflowStep>> newlyMappedWorkflowSteps = new HashMap<>();
        File pbcCatalogInConfig = new File(AbstractConfig.getConfigurationFolder() + FILE_PBC_CATALOG);
        InputStream inputStream;
        if (pbcCatalogInConfig.exists() && !pbcCatalogInConfig.isDirectory()) {
            LOGGER.info("Using PBC catalog file {}.", pbcCatalogInConfig.getAbsolutePath());
            inputStream = new FileInputStream(pbcCatalogInConfig);
        } else {
            LOGGER.warn("PBC catalog  file {} not found, using default.", pbcCatalogInConfig.getAbsolutePath());
            inputStream = this.getClass().getClassLoader().getResourceAsStream(FILE_PBC_CATALOG);
        }
        // load the properties
        Properties properties = new Properties();
        properties.load(inputStream);
        for (Map.Entry<Object, Object> entry : properties.entrySet()) {
            if (StringUtils.isEmpty((String) entry.getValue())) {
                LOGGER.error("Invalid configuration for WorkflowStep {}, class name can not be empty.",
                        entry.getKey());
            }
            Class<WorkflowStep> clazz = (Class<WorkflowStep>) Class.forName((String) entry.getValue());

            if (!hasWorkflowStepInterface(clazz)) {
                LOGGER.error(
                        "Keeping old configuration, error occurred: Class [{}] does not implement WorkflowStep interface.",
                        clazz);
                return;
            }
            newlyMappedWorkflowSteps.put((String) entry.getKey(), clazz);
        }
        // no exceptions occurred, override current map
        this.workflowStepsMap = newlyMappedWorkflowSteps;
        newlyMappedWorkflowSteps.forEach(
                (name, clazz) -> LOGGER.info("Successfully Loaded Step [{}] with Class [{}]", name, clazz));
    } catch (ClassNotFoundException | IOException e) {
        LOGGER.error("Keeping old configuration, exception occurred: " + e.getMessage(), e);
    }
}

From source file:com.streamsets.pipeline.stage.bigquery.destination.BigQueryTarget.java

@Override
public void write(Batch batch) throws StageException {
    Map<TableId, List<Record>> tableIdToRecords = new LinkedHashMap<>();
    Map<Long, Record> requestIndexToRecords = new LinkedHashMap<>();

    if (batch.getRecords().hasNext()) {
        ELVars elVars = getContext().createELVars();
        batch.getRecords().forEachRemaining(record -> {
            RecordEL.setRecordInContext(elVars, record);
            try {
                String datasetName = dataSetEval.eval(elVars, conf.datasetEL, String.class);
                String tableName = tableNameELEval.eval(elVars, conf.tableNameEL, String.class);
                TableId tableId = TableId.of(datasetName, tableName);
                if (tableIdExistsCache.get(tableId)) {
                    List<Record> tableIdRecords = tableIdToRecords.computeIfAbsent(tableId,
                            t -> new ArrayList<>());
                    tableIdRecords.add(record);
                } else {
                    getContext().toError(record, Errors.BIGQUERY_17, datasetName, tableName,
                            conf.credentials.projectId);
                }/*from  w  w  w  .  j a v a2s.  com*/
            } catch (ELEvalException e) {
                LOG.error("Error evaluating DataSet/TableName EL", e);
                getContext().toError(record, Errors.BIGQUERY_10, e);
            } catch (ExecutionException e) {
                LOG.error("Error when checking exists for tableId, Reason : {}", e);
                Throwable rootCause = Throwables.getRootCause(e);
                getContext().toError(record, Errors.BIGQUERY_13, rootCause);
            }
        });

        tableIdToRecords.forEach((tableId, records) -> {
            final AtomicLong index = new AtomicLong(0);
            final AtomicBoolean areThereRecordsToWrite = new AtomicBoolean(false);
            InsertAllRequest.Builder insertAllRequestBuilder = InsertAllRequest.newBuilder(tableId);
            records.forEach(record -> {
                try {
                    String insertId = getInsertIdForRecord(elVars, record);
                    Map<String, ?> rowContent = convertToRowObjectFromRecord(record);
                    if (rowContent.isEmpty()) {
                        throw new OnRecordErrorException(record, Errors.BIGQUERY_14);
                    }
                    insertAllRequestBuilder.addRow(insertId, rowContent);
                    areThereRecordsToWrite.set(true);
                    requestIndexToRecords.put(index.getAndIncrement(), record);
                } catch (OnRecordErrorException e) {
                    LOG.error("Error when converting record {} to row, Reason : {} ",
                            record.getHeader().getSourceId(), e.getMessage());
                    getContext().toError(record, e.getErrorCode(), e.getParams());
                }
            });

            if (areThereRecordsToWrite.get()) {
                insertAllRequestBuilder.setIgnoreUnknownValues(conf.ignoreInvalidColumn);
                insertAllRequestBuilder.setSkipInvalidRows(false);

                InsertAllRequest request = insertAllRequestBuilder.build();

                if (!request.getRows().isEmpty()) {
                    try {
                        InsertAllResponse response = bigQuery.insertAll(request);
                        if (response.hasErrors()) {
                            response.getInsertErrors().forEach((requestIdx, errors) -> {
                                Record record = requestIndexToRecords.get(requestIdx);
                                String messages = COMMA_JOINER.join(errors.stream()
                                        .map(BigQueryError::getMessage).collect(Collectors.toList()));
                                String reasons = COMMA_JOINER.join(errors.stream().map(BigQueryError::getReason)
                                        .collect(Collectors.toList()));
                                LOG.error("Error when inserting record {}, Reasons : {}, Messages : {}",
                                        record.getHeader().getSourceId(), reasons, messages);
                                getContext().toError(record, Errors.BIGQUERY_11, reasons, messages);
                            });
                        }
                    } catch (BigQueryException e) {
                        LOG.error(Errors.BIGQUERY_13.getMessage(), e);
                        //Put all records to error.
                        for (long i = 0; i < request.getRows().size(); i++) {
                            Record record = requestIndexToRecords.get(i);
                            getContext().toError(record, Errors.BIGQUERY_13, e);
                        }
                    }
                }
            }
        });
    }
}