Example usage for java.util Map forEach

List of usage examples for java.util Map forEach

Introduction

In this page you can find the example usage for java.util Map forEach.

Prototype

default void forEach(BiConsumer<? super K, ? super V> action) 

Source Link

Document

Performs the given action for each entry in this map until all entries have been processed or the action throws an exception.

Usage

From source file:org.openhab.binding.modbus.internal.handler.ModbusDataThingHandler.java

private void updateExpiredChannels(Map<ChannelUID, State> states) {
    synchronized (this) {
        updateStatusIfChanged(ThingStatus.ONLINE);
        long now = System.currentTimeMillis();
        // Update channels that have not been updated in a while, or when their values has changed
        states.forEach((uid, state) -> updateExpiredChannel(now, uid, state));
        channelLastState = states;/* w ww  .  j  av a2s .  com*/
    }
}

From source file:org.fao.geonet.api.registries.DirectoryApi.java

@ApiOperation(value = "Import spatial directory entries", nickname = "importSpatialEntries", notes = "Directory entry (AKA subtemplates) are XML fragments that can be "
        + "inserted in metadata records. Use this service to import geographic extent entries "
        + "from an ESRI Shapefile format.")
@RequestMapping(value = "/import/spatial", method = RequestMethod.POST, consumes = {
        MediaType.MULTIPART_FORM_DATA_VALUE }, produces = { MediaType.APPLICATION_JSON_VALUE })
@ResponseStatus(HttpStatus.CREATED)//  w ww.  j a v  a  2s .co m
@ApiResponses(value = { @ApiResponse(code = 201, message = "Directory entries imported."),
        @ApiResponse(code = 403, message = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_REVIEWER) })
@PreAuthorize("hasRole('Reviewer')")
@ResponseBody
public SimpleMetadataProcessingReport importSpatialEntries(
        @ApiParam(value = "The ZIP file to upload containing the Shapefile.", required = true) @RequestParam("file") MultipartFile file,
        @ApiParam(value = "Attribute to use for UUID. If none, random UUID are generated.", required = false) @RequestParam(required = false) String uuidAttribute,
        @ApiParam(value = "Pattern to build UUID from. Default is '{{uuid}}'.", required = false) @RequestParam(defaultValue = "{{uuid}}", required = false) String uuidPattern,
        @ApiParam(value = "Attribute to use for extent description. "
                + "If none, no extent description defined. TODO: Add per language desc ?", required = false) @RequestParam(required = false) String descriptionAttribute,
        @ApiParam(value = "geomProjectionTo", defaultValue = "", required = false) @RequestParam(required = false) String geomProjectionTo,
        @ApiParam(value = "lenient", defaultValue = "false", required = false) @RequestParam(required = false) boolean lenient,
        @ApiParam(value = "Create only bounding box for each spatial objects.", required = false) @RequestParam(required = false, defaultValue = "true") boolean onlyBoundingBox,
        @ApiParam(value = "Process", defaultValue = "build-extent-subtemplate", required = false) @RequestParam(required = false) String process,
        @ApiParam(value = "Schema identifier", defaultValue = "iso19139", required = false) @RequestParam(required = false) String schema,
        @ApiParam(value = API_PARAM_RECORD_UUID_PROCESSING, required = false, defaultValue = "NOTHING") @RequestParam(required = false, defaultValue = "NOTHING") final MEFLib.UuidAction uuidProcessing,
        @ApiParam(value = API_PARAP_RECORD_GROUP, required = false) @RequestParam(required = false) final Integer group,
        @ApiIgnore MultipartHttpServletRequest request) throws Exception {

    ServiceContext context = ApiUtils.createServiceContext(request);
    ApplicationContext applicationContext = ApplicationContextHolder.get();
    DataManager dm = applicationContext.getBean(DataManager.class);
    SettingManager settingManager = applicationContext.getBean(SettingManager.class);

    MetadataSchema metadataSchema = dm.getSchema(schema);
    Path xslProcessing = metadataSchema.getSchemaDir().resolve("process").resolve(process + ".xsl");

    File[] shapeFiles = unzipAndFilterShp(file);

    CollectResults collectResults = new CollectResults();

    SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport();

    for (File shapeFile : shapeFiles) {

        SimpleFeatureCollection collection = shapeFileToFeatureCollection(shapeFile);

        try (FeatureIterator<SimpleFeature> features = collection.features()) {

            while (features.hasNext()) {
                SimpleFeature feature = features.next();

                String uuid = computeUuid(uuidAttribute, uuidPattern, feature);
                String description = computeDescription(descriptionAttribute, feature);
                Envelope wgsEnvelope = computeEnvelope(feature);
                Geometry featureGeometry = reprojGeom(geomProjectionTo, lenient, feature);
                String xmlGeometry = geometryToXml(featureGeometry, collection.getSchema());

                Map<String, Object> parameters = new HashMap<>();

                parameters.put("uuid", uuid);
                parameters.put("description", description);
                parameters.put("east", wgsEnvelope.getMaxX());
                parameters.put("north", wgsEnvelope.getMaxY());
                parameters.put("west", wgsEnvelope.getMinX());
                parameters.put("south", wgsEnvelope.getMinY());
                parameters.put("onlyBoundingBox", onlyBoundingBox);
                parameters.put("geometry", xmlGeometry);

                Element subtemplate = new Element("root");
                Element snippet = Xml.transform(subtemplate, xslProcessing, parameters);

                collectResults.getEntries().put(uuid, uuid, snippet);
            }
        }

        report.addInfos(String.format("%d entries extracted from shapefile '%s'.", collection.size(),
                shapeFile.getName()));
    }

    report.setTotalRecords(collectResults.getEntries().size());

    // Save the snippets and index
    if (collectResults.getEntries().size() > 0) {
        // Create an empty record providing schema information
        // about collected subtemplates
        Metadata record = new Metadata();
        record.getDataInfo().setSchemaId(schema);
        collectResults.setRecord(record);

        int user = context.getUserSession().getUserIdAsInt();
        String siteId = settingManager.getSiteId();

        Map<String, Exception> errors = DirectoryUtils.saveEntries(context, collectResults, siteId, user, group,
                false);

        dm.flush();

        Set<Integer> listOfRecordInternalId = new HashSet<>();
        listOfRecordInternalId.addAll(collectResults.getEntryIdentifiers().values());

        report.addInfos(String.format("%d entries saved.", listOfRecordInternalId.size()));

        BatchOpsMetadataReindexer r = new BatchOpsMetadataReindexer(dm, listOfRecordInternalId);
        r.process();

        errors.forEach((k, v) -> report.addError(v));

        report.close();
    } else {
        report.addInfos(String.format("No entry found in ZIP file '%s'", file.getOriginalFilename()));
        report.close();
    }
    return report;
}

From source file:org.apache.solr.client.solrj.impl.SolrClientCloudManager.java

@Override
public byte[] httpRequest(String url, SolrRequest.METHOD method, Map<String, String> headers, String payload,
        int timeout, boolean followRedirects) throws IOException {
    HttpClient client = solrClient.getHttpClient();
    final HttpRequestBase req;
    HttpEntity entity = null;//from w  w  w. java  2s  .c o m
    if (payload != null) {
        entity = new StringEntity(payload, "UTF-8");
    }
    switch (method) {
    case GET:
        req = new HttpGet(url);
        break;
    case POST:
        req = new HttpPost(url);
        if (entity != null) {
            ((HttpPost) req).setEntity(entity);
        }
        break;
    case PUT:
        req = new HttpPut(url);
        if (entity != null) {
            ((HttpPut) req).setEntity(entity);
        }
        break;
    case DELETE:
        req = new HttpDelete(url);
        break;
    default:
        throw new IOException("Unsupported method " + method);
    }
    if (headers != null) {
        headers.forEach((k, v) -> req.addHeader(k, v));
    }
    RequestConfig.Builder requestConfigBuilder = HttpClientUtil.createDefaultRequestConfigBuilder();
    if (timeout > 0) {
        requestConfigBuilder.setSocketTimeout(timeout);
        requestConfigBuilder.setConnectTimeout(timeout);
    }
    requestConfigBuilder.setRedirectsEnabled(followRedirects);
    req.setConfig(requestConfigBuilder.build());
    HttpClientContext httpClientRequestContext = HttpClientUtil.createNewHttpClientRequestContext();
    HttpResponse rsp = client.execute(req, httpClientRequestContext);
    int statusCode = rsp.getStatusLine().getStatusCode();
    if (statusCode != 200) {
        throw new IOException("Error sending request to " + url + ", HTTP response: " + rsp.toString());
    }
    HttpEntity responseEntity = rsp.getEntity();
    if (responseEntity != null && responseEntity.getContent() != null) {
        return EntityUtils.toByteArray(responseEntity);
    } else {
        return EMPTY;
    }
}

From source file:org.apache.hadoop.hbase.tool.LoadIncrementalHFiles.java

/**
 * Populate the Queue with given HFiles//from   w  ww  .  j  av  a 2s.  co m
 */
private void populateLoadQueue(Deque<LoadQueueItem> ret, Map<byte[], List<Path>> map) {
    map.forEach((k, v) -> v.stream().map(p -> new LoadQueueItem(k, p)).forEachOrdered(ret::add));
}

From source file:org.talend.dataprep.dataset.service.DataSetService.java

/**
 * Returns the <b>full</b> data set content for given id.
 *
 * @param metadata If <code>true</code>, includes data set metadata information.
 * @param dataSetId A data set id./*from w  w  w .  j  a  v a2s  .  c  om*/
 * @return The full data set.
 */
@RequestMapping(value = "/datasets/{id}/content", method = RequestMethod.GET, produces = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Get a data set by id", notes = "Get a data set content based on provided id. Id should be a UUID returned by the list operation. Not valid or non existing data set id returns empty content.")
@Timed
@ResponseBody
public Callable<DataSet> get(
        @RequestParam(defaultValue = "true") @ApiParam(name = "metadata", value = "Include metadata information in the response") boolean metadata, //
        @RequestParam(defaultValue = "false") @ApiParam(name = "includeInternalContent", value = "Include internal content in the response") boolean includeInternalContent, //
        @PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the requested data set") String dataSetId) {
    return () -> {
        final Marker marker = Markers.dataset(dataSetId);
        LOG.debug(marker, "Get data set #{}", dataSetId);
        try {
            DataSetMetadata dataSetMetadata = dataSetMetadataRepository.get(dataSetId);
            assertDataSetMetadata(dataSetMetadata, dataSetId);
            // Build the result
            DataSet dataSet = new DataSet();
            if (metadata) {
                dataSet.setMetadata(conversionService.convert(dataSetMetadata, UserDataSetMetadata.class));
            }
            Stream<DataSetRow> stream = contentStore.stream(dataSetMetadata, -1); // Disable line limit
            if (!includeInternalContent) {
                LOG.debug("Skip internal content when serving data set #{} content.", dataSetId);
                stream = stream.map(r -> {
                    final Map<String, Object> values = r.values();
                    final Map<String, Object> filteredValues = new HashMap<>(values);
                    values.forEach((k, v) -> {
                        if (k != null && k.startsWith(FlagNames.INTERNAL_PROPERTY_PREFIX)) { // Removes technical properties
                                                                                             // from returned values.
                            filteredValues.remove(k);
                        }
                    });
                    filteredValues.put(FlagNames.TDP_ID, r.getTdpId()); // Include TDP_ID anyway
                    return new DataSetRow(r.getRowMetadata(), filteredValues);
                });
            }
            dataSet.setRecords(stream);
            return dataSet;
        } finally {
            LOG.debug(marker, "Get done.");
        }
    };
}

From source file:org.codice.ddf.configuration.migration.ImportMigrationDirectoryEntryImpl.java

@Override
public boolean restore(boolean required) {
    if (restored == null) {
        super.restored = false; // until proven otherwise
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Importing {}...", toDebugString());
        }/*ww w .  j a v  a 2  s.  c  o  m*/
        // a directory is always exported by the framework, as such we can safely extend our
        // privileges
        AccessUtils.doPrivileged(() -> {
            final PathUtils pathUtils = getContext().getPathUtils();
            final Path apath = getAbsolutePath();
            // find all existing files and keep track of it relative from ddf.home to absolute path
            final Map<Path, Path> existingFiles = FileUtils
                    .listFiles(apath.toFile(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE).stream()
                    .map(File::toPath).collect(Collectors.toMap(pathUtils::relativizeFromDDFHome, p -> p));

            // it is safe to ignore the 'required' parameter since if we get here, we have a
            // directory
            // exported to start with and all files underneath are always optional so pass false to
            // restore()
            if (fileEntries.stream().peek(me -> existingFiles.remove(me.getPath())).map(me -> me.restore(false))
                    .reduce(true, Boolean::logicalAnd)) {
                if (!filtered) {
                    // all files from the original system were exported under this directory, so remove
                    // all files that were not on the original system but are found on the current one
                    final MigrationReport report = getReport();

                    existingFiles.forEach((p, ap) -> PathUtils.cleanQuietly(ap, report));
                    // cleanup all empty directories left underneath this entry's path
                    PathUtils.cleanQuietly(apath, report);
                }
                SecurityLogger.audit("Imported directory {}", apath);
                super.restored = true;
            } else {
                SecurityLogger.audit("Error importing directory {}", apath);
                getReport().record(new MigrationException(Messages.IMPORT_PATH_COPY_ERROR, getPath(),
                        pathUtils.getDDFHome(), "some directory entries failed"));
            }
        });
    }
    return restored;
}

From source file:io.swagger.v3.parser.converter.SwaggerConverter.java

private Content convertExamples(final Map examples, final Content content) {
    if (examples != null) {
        examples.forEach((k, v) -> {
            MediaType mT = content.get(k);
            if (mT == null) {
                mT = new MediaType();
                content.addMediaType(k.toString(), mT);
            }//from   w  ww  . ja  v  a2  s .  com
            mT.setExample(v);
        });
    }
    return content;
}

From source file:io.swagger.v3.parser.converter.SwaggerConverter.java

private Map<String, Header> convertHeaders(Map<String, Property> headers) {
    Map<String, Header> result = new HashMap<>();

    headers.forEach((k, v) -> {
        result.put(k, convertHeader(v));
    });/*from   w  w w.  j av  a  2s  .  c  om*/

    return result;
}

From source file:com.baidu.rigel.biplatform.tesseract.isservice.search.service.impl.CallbackSearchServiceImpl.java

/**
 * @param context ?//  w w w. ja  v a2 s .c  o m
 * @param query ?
 * @return 
 * @throws IndexAndSearchException exception occurred when 
 */
public SearchIndexResultSet query(QueryContext context, QueryRequest query) throws IndexAndSearchException {
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "callbackquery",
            "[callbackquery:" + query + "]"));
    if (query == null || context == null || StringUtils.isEmpty(query.getCubeId())) {
        LOGGER.error(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_EXCEPTION, "callbackquery",
                "[callbackquery:" + query + "]"));
        throw new IndexAndSearchException(
                TesseractExceptionUtils.getExceptionMessage(IndexAndSearchException.QUERYEXCEPTION_MESSAGE,
                        IndexAndSearchExceptionType.ILLEGALARGUMENT_EXCEPTION),
                IndexAndSearchExceptionType.ILLEGALARGUMENT_EXCEPTION);
    }
    // TODO ???
    if (query.getGroupBy() == null || query.getSelect() == null) {
        return null;
    }
    Map<String, String> requestParams = ((QueryContextAdapter) context).getQuestionModel().getRequestParams();
    // Build query target map
    Map<String, List<MiniCubeMeasure>> callbackMeasures = context.getQueryMeasures().stream()
            .filter(m -> m.getType().equals(MeasureType.CALLBACK)).map(m -> {
                CallbackMeasure tmp = (CallbackMeasure) m;
                for (Map.Entry<String, String> entry : tmp.getCallbackParams().entrySet()) {
                    if (requestParams.containsKey(entry.getKey())) {
                        tmp.getCallbackParams().put(entry.getKey(), requestParams.get(entry.getKey()));
                    }
                }
                return m;
            }).collect(Collectors.groupingBy(c -> ((CallbackMeasure) c).getCallbackUrl(), Collectors.toList()));
    if (callbackMeasures == null || callbackMeasures.isEmpty()) {
        LOGGER.error(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_EXCEPTION, "Empty callback measure",
                "[callbackquery:" + query + "]"));
        throw new IndexAndSearchException(
                TesseractExceptionUtils.getExceptionMessage(IndexAndSearchException.QUERYEXCEPTION_MESSAGE,
                        IndexAndSearchExceptionType.ILLEGALARGUMENT_EXCEPTION),
                IndexAndSearchExceptionType.ILLEGALARGUMENT_EXCEPTION);
    }
    LOGGER.info("Find callback targets " + callbackMeasures);

    // Keep group-by sequence.
    List<String> groupby = new ArrayList<String>(query.getGroupBy().getGroups());
    LinkedHashMap<String, List<String>> groupbyParams = new LinkedHashMap<String, List<String>>(groupby.size());
    for (String g : groupby) {
        groupbyParams.put(g, new ArrayList<String>());
    }

    LinkedHashMap<String, List<String>> whereParams = new LinkedHashMap<String, List<String>>();
    for (Expression e : query.getWhere().getAndList()) {
        List<String> l = e.getQueryValues().stream().filter(v -> !StringUtils.isEmpty(v.getValue()))
                .map(v -> v.getValue()).collect(Collectors.toList());
        if (groupbyParams.containsKey(e.getProperties())) {
            // if not contains SUMMARY_KEY, add it into group by list
            if (!l.contains(TesseractConstant.SUMMARY_KEY)) {
                l.add(TesseractConstant.SUMMARY_KEY);
            }
            // Put it into group by field
            groupbyParams.get(e.getProperties()).addAll(l);
        } else {
            // Put it into filter field
            if (CollectionUtils.isEmpty(l)) {
                List<Set<String>> tmp = e.getQueryValues().stream().map(v -> v.getLeafValues())
                        .collect(Collectors.toList());
                List<String> values = Lists.newArrayList();
                tmp.forEach(t -> values.addAll(t));
                whereParams.put(e.getProperties(), values);
            } else {
                whereParams.put(e.getProperties(), new ArrayList<String>(l));
            }
        }
    }

    // Prepare query tools
    //        CountDownLatch latch = new CountDownLatch(response.size());
    //        List<Future<CallbackResponse>> results = Lists.newArrayList();
    Map<CallbackExecutor, Future<CallbackResponse>> results = Maps.newHashMap();
    ExecutorCompletionService<CallbackResponse> service = new ExecutorCompletionService<CallbackResponse>(
            taskExecutor);
    StringBuilder callbackMeasureNames = new StringBuilder();
    for (Entry<String, List<MiniCubeMeasure>> e : callbackMeasures.entrySet()) {
        CallbackExecutor ce = new CallbackExecutor(e, groupbyParams, whereParams);
        results.put(ce, service.submit(ce));
        e.getValue().forEach(m -> {
            callbackMeasureNames.append(" " + m.getCaption() + " ");
        });
    }
    //        }
    Map<CallbackExecutor, CallbackResponse> response = new ConcurrentHashMap<CallbackExecutor, CallbackResponse>(
            callbackMeasures.size());
    StringBuffer sb = new StringBuffer();
    results.forEach((k, v) -> {
        try {
            response.put(k, v.get());
        } catch (Exception e1) {
            LOGGER.error(e1.getMessage(), e1);
            sb.append(": " + callbackMeasureNames.toString()
                    + " ??, ?");
        }
    });
    if (!StringUtils.isEmpty(sb.toString())) {
        if (ThreadLocalPlaceholder.getProperty(ThreadLocalPlaceholder.ERROR_MSG_KEY) != null) {
            ThreadLocalPlaceholder.unbindProperty(ThreadLocalPlaceholder.ERROR_MSG_KEY);
        }
        ThreadLocalPlaceholder.bindProperty(ThreadLocalPlaceholder.ERROR_MSG_KEY, sb.toString());
    }
    // Package result
    SqlQuery sqlQuery = QueryRequestUtil.transQueryRequest2SqlQuery(query);
    SearchIndexResultSet result = null;
    if (!response.isEmpty()) {
        result = packageResultRecords(query, sqlQuery, response);
    } else {
        result = new SearchIndexResultSet(new Meta(query.getGroupBy().getGroups().toArray(new String[0])), 0);
    }

    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "query", "[query:" + query + "]"));
    return result;
}

From source file:org.openhab.binding.modbus.internal.handler.ModbusDataThingHandler.java

@Override
public synchronized void onError(ModbusReadRequestBlueprint request, Exception error) {
    if (hasConfigurationError()) {
        return;/*ww w  . j  a v a  2 s. c  om*/
    } else if (!isReadEnabled) {
        return;
    }
    if (error instanceof ModbusConnectionException) {
        logger.error("Thing {} '{}' had {} error on read: {}", getThing().getUID(), getThing().getLabel(),
                error.getClass().getSimpleName(), error.toString());
    } else if (error instanceof ModbusTransportException) {
        logger.error("Thing {} '{}' had {} error on read: {}", getThing().getUID(), getThing().getLabel(),
                error.getClass().getSimpleName(), error.toString());
    } else {
        logger.error(
                "Thing {} '{}' had {} error on read: {} (message: {}). Stack trace follows since this is unexpected error.",
                getThing().getUID(), getThing().getLabel(), error.getClass().getName(), error.toString(),
                error.getMessage(), error);
    }
    Map<@NonNull ChannelUID, @NonNull State> states = new HashMap<>();
    ChannelUID lastReadErrorUID = getChannelUID(ModbusBindingConstantsInternal.CHANNEL_LAST_READ_ERROR);
    if (isLinked(lastReadErrorUID)) {
        states.put(lastReadErrorUID, new DateTimeType());
    }

    synchronized (this) {
        // Update channels
        states.forEach((uid, state) -> {
            tryUpdateState(uid, state);
        });

        updateStatusIfChanged(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR,
                String.format("Error (%s) with read. Request: %s. Description: %s. Message: %s",
                        error.getClass().getSimpleName(), request, error.toString(), error.getMessage()));
    }
}