List of usage examples for java.util Map forEach
default void forEach(BiConsumer<? super K, ? super V> action)
From source file:com.qwazr.server.InFileSessionPersistenceManager.java
@Override public void persistSessions(final String deploymentName, final Map<String, PersistentSession> sessionData) { if (sessionData == null) return;//w ww .j a v a 2s. c o m final Path deploymentDir = sessionDir.resolve(deploymentName); try { if (!Files.exists(deploymentDir)) Files.createDirectory(deploymentDir); } catch (IOException e) { LOGGER.log(Level.WARNING, e, () -> "Cannot create the session directory " + deploymentDir + ": persistence aborted."); return; } sessionData.forEach( (sessionId, persistentSession) -> writeSession(deploymentDir, sessionId, persistentSession)); }
From source file:com.adobe.ags.curly.controller.ActionRunner.java
private void applyVariablesToMap(Map<String, String> variables, Map<String, String> target) { Set<String> variableTokens = ActionUtils.getVariableNames(action); Set removeSet = new HashSet<>(); Map<String, String> newValues = new HashMap<>(); target.forEach((paramName, paramValue) -> { StringProperty paramNameProperty = new SimpleStringProperty(paramName); variableTokens.forEach((String originalName) -> { String[] variableNameParts = originalName.split("\\|"); String variableName = variableNameParts[0]; String variableNameMatchPattern = Pattern.quote("${" + originalName + "}"); String val = variables.get(variableName); if (val == null) { val = ""; }/*from www . ja v a2s . c o m*/ String variableValue = Matcher.quoteReplacement(val); //---- String newParamValue = newValues.containsKey(paramNameProperty.get()) ? newValues.get(paramNameProperty.get()) : paramValue; String newParamName = paramNameProperty.get().replaceAll(variableNameMatchPattern, variableValue); paramNameProperty.set(newParamName); newParamValue = newParamValue.replaceAll(variableNameMatchPattern, variableValue); if (!newParamName.equals(paramName) || !newParamValue.equals(paramValue)) { removeSet.add(paramNameProperty.get()); removeSet.add(paramName); newValues.put(newParamName, newParamValue); } }); }); target.keySet().removeAll(removeSet); target.putAll(newValues); }
From source file:com.nike.cerberus.auth.connector.onelogin.OneLoginAuthConnector.java
/** * Executes the HTTP request based on the input parameters. * * @param url The URL to execute the request against * @param method The HTTP method for the request * @param requestBody The request body of the HTTP request * @return Response from the server// w w w . j a v a 2 s . c o m */ protected Response execute(final HttpUrl url, final String method, final Map<String, String> headers, final Object requestBody) { try { Request.Builder requestBuilder = new Request.Builder().url(url).addHeader("Accept", DEFAULT_MEDIA_TYPE.toString()); if (headers != null) { headers.forEach(requestBuilder::addHeader); } if (requestBody != null) { requestBuilder.addHeader("Content-Type", DEFAULT_MEDIA_TYPE.toString()).method(method, RequestBody.create(DEFAULT_MEDIA_TYPE, objectMapper.writeValueAsString(requestBody).getBytes(Charset.forName("UTF-8")))); } else { requestBuilder.method(method, null); } return httpClient.newCall(requestBuilder.build()).execute(); } catch (IOException e) { if (e instanceof SSLException && e.getMessage() != null && e.getMessage().contains("Unrecognized SSL message, plaintext connection?")) { throw ApiException.newBuilder().withApiErrors(DefaultApiError.SERVICE_UNAVAILABLE) .withExceptionCause(e) .withExceptionMessage( "I/O error while communicating with OneLogin. Unrecognized SSL message may be due to a web proxy e.g. AnyConnect") .build(); } else { throw ApiException.newBuilder().withApiErrors(DefaultApiError.SERVICE_UNAVAILABLE) .withExceptionCause(e).withExceptionMessage("I/O error while communicating with OneLogin.") .build(); } } }
From source file:org.onosproject.bmv2.demo.app.wcmp.WcmpFabricApp.java
private Pair<ExtensionTreatment, List<FlowRule>> provisionWcmpTreatment(DeviceId deviceId, Map<PortNumber, Double> weightedFabricPorts) throws FlowRuleGeneratorException { // Install WCMP group table entries that map from hash values to fabric ports. int groupId = groupIdOf(deviceId, weightedFabricPorts); List<PortNumber> portNumbers = Lists.newArrayList(); List<Double> weights = Lists.newArrayList(); weightedFabricPorts.forEach((p, w) -> { portNumbers.add(p);/*w w w .j a v a2s . co m*/ weights.add(w); }); List<Integer> prefixLengths = toPrefixLengths(weights); List<FlowRule> rules = Lists.newArrayList(); for (int i = 0; i < portNumbers.size(); i++) { ExtensionSelector extSelector = buildWcmpSelector(groupId, prefixLengths.get(i)); FlowRule rule = flowRuleBuilder(deviceId, WCMP_GROUP_TABLE) .withSelector(DefaultTrafficSelector.builder().extension(extSelector, deviceId).build()) .withTreatment(DefaultTrafficTreatment.builder().setOutput(portNumbers.get(i)).build()).build(); rules.add(rule); } ExtensionTreatment extTreatment = buildWcmpTreatment(groupId); return Pair.of(extTreatment, rules); }
From source file:org.apache.nifi.lookup.CSVRecordLookupService.java
private void loadCache() throws IllegalStateException, IOException { if (lock.tryLock()) { try {/*from w w w .j ava 2 s . c o m*/ final ComponentLog logger = getLogger(); if (logger.isDebugEnabled()) { logger.debug("Loading lookup table from file: " + csvFile); } final FileReader reader = new FileReader(csvFile); final CSVParser records = csvFormat.withFirstRecordAsHeader().parse(reader); ConcurrentHashMap<String, Record> cache = new ConcurrentHashMap<>(); RecordSchema lookupRecordSchema = null; for (final CSVRecord record : records) { final String key = record.get(lookupKeyColumn); if (StringUtils.isBlank(key)) { throw new IllegalStateException("Empty lookup key encountered in: " + csvFile); } else if (!ignoreDuplicates && cache.containsKey(key)) { throw new IllegalStateException( "Duplicate lookup key encountered: " + key + " in " + csvFile); } else if (ignoreDuplicates && cache.containsKey(key)) { logger.warn("Duplicate lookup key encountered: {} in {}", new Object[] { key, csvFile }); } // Put each key/value pair (except the lookup) into the properties final Map<String, Object> properties = new HashMap<>(); record.toMap().forEach((k, v) -> { if (!lookupKeyColumn.equals(k)) { properties.put(k, v); } }); if (lookupRecordSchema == null) { List<RecordField> recordFields = new ArrayList<>(properties.size()); properties.forEach((k, v) -> recordFields .add(new RecordField(k, RecordFieldType.STRING.getDataType()))); lookupRecordSchema = new SimpleRecordSchema(recordFields); } cache.put(key, new MapRecord(lookupRecordSchema, properties)); } this.cache = cache; if (cache.isEmpty()) { logger.warn("Lookup table is empty after reading file: " + csvFile); } } finally { lock.unlock(); } } }
From source file:io.gravitee.management.service.impl.ApiKeyServiceImpl.java
@Override public Map<String, List<ApiKeyEntity>> findByApplication(String applicationId) { try {/*from w ww. jav a 2 s . c o m*/ LOGGER.debug("Find all API keys for application {}", applicationId); Set<ApiKey> keys = apiKeyRepository.findByApplication(applicationId); Map<String, Set<ApiKey>> keysByApi = new HashMap<>(); keys.forEach(apiKey -> { Set<ApiKey> values = keysByApi.getOrDefault(apiKey.getApi(), new HashSet<>()); values.add(apiKey); keysByApi.put(apiKey.getApi(), values); }); Map<String, List<ApiKeyEntity>> keysByApiResult = new HashMap<>(keysByApi.size()); keysByApi.forEach((api, apiKeys) -> keysByApiResult.put(api, apiKeys.stream().sorted((key1, key2) -> key2.getCreatedAt().compareTo(key1.getCreatedAt())) .map(ApiKeyServiceImpl::convert).collect(Collectors.toList()))); return keysByApiResult; } catch (TechnicalException ex) { LOGGER.error("An error occurs while getting all API keys for application {}", applicationId, ex); throw new TechnicalManagementException( "An error occurs while getting all API keys for application " + applicationId, ex); } }
From source file:com.streamsets.pipeline.stage.origin.jdbc.cdc.postgres.PostgresCDCSource.java
@Override public String produce(String lastSourceOffset, int maxBatchSize, final BatchMaker batchMaker) throws StageException { Long offsetAsLong = Long.valueOf(0); if (dummyRecord == null) { dummyRecord = getContext().createRecord("DUMMY"); }//from w w w .j a va 2 s .co m final int batchSize = Math.min(configBean.baseConfigBean.maxBatchSize, maxBatchSize); int recordGenerationAttempts = 0; boolean recordsProduced = false; if (lastSourceOffset != null) { setOffset(StringUtils.trimToEmpty(lastSourceOffset)); } if (getOffset() != null) { offsetAsLong = LogSequenceNumber.valueOf(getOffset()).asLong(); } PostgresWalRecord postgresWalRecord = null; if (!runnerCreated) { createRunner(); runnerCreated = true; } if ((!generationStarted) && runnerCreated) { startGeneration(); generationStarted = true; } while (generationStarted && !getContext().isStopped() && !recordsProduced && recordGenerationAttempts++ < MAX_RECORD_GENERATION_ATTEMPTS) { postgresWalRecord = cdcQueue.poll(); if ((postgresWalRecord != null) && (postgresWalRecord.getLsn().asLong() > offsetAsLong)) { final Record record = processWalRecord(postgresWalRecord); if (record != null) { Map<String, String> attributes = new HashMap<>(); attributes.put(LSN, postgresWalRecord.getLsn().asString()); attributes.put(XID, postgresWalRecord.getXid()); attributes.put(TIMESTAMP_HEADER, postgresWalRecord.getTimestamp()); attributes.forEach((k, v) -> record.getHeader().setAttribute(k, v)); batchMaker.addRecord(record); walReceiver.setLsnFlushed(postgresWalRecord.getLsn()); this.setOffset(postgresWalRecord.getLsn().asString()); } } try { Thread.sleep(100); } catch (InterruptedException e) { LOG.debug("Interrupted wait"); } } return getOffset(); }
From source file:org.onosproject.store.ecmap.EventuallyConsistentMapImpl.java
@Override public void putAll(Map<? extends K, ? extends V> m) { checkState(!destroyed, destroyedMessage); m.forEach(this::put); }
From source file:edu.ucsd.sbrg.escher.model.EscherMap.java
@JsonProperty("nodes") public void setNodes(Map<String, Node> nodes) { nodes.forEach((k, v) -> v.setId(k)); this.nodes = nodes; }
From source file:org.talend.dataprep.dataset.StatisticsAdapter.java
private void injectNumberHistogram(final ColumnMetadata column, final Analyzers.Result result) { if (NUMERIC.isAssignableFrom(column.getType()) && result.exist(StreamNumberHistogramStatistics.class)) { final Statistics statistics = column.getStatistics(); final Map<org.talend.dataquality.statistics.numeric.histogram.Range, Long> histogramStatistics = result .get(StreamNumberHistogramStatistics.class).getHistogram(); final NumberFormat format = DecimalFormat.getInstance(ENGLISH); // Set histogram ranges final Histogram histogram = new NumberHistogram(); histogramStatistics.forEach((rangeValues, occurrence) -> { final HistogramRange range = new HistogramRange(); try { range.getRange().setMax(new Double(format.format(rangeValues.getUpper()))); range.getRange().setMin(new Double(format.format(rangeValues.getLower()))); } catch (NumberFormatException e) { // Fallback to non formatted numbers (unable to parse numbers). range.getRange().setMax(rangeValues.getUpper()); range.getRange().setMin(rangeValues.getLower()); }//from w ww. j a v a2s . co m range.setOccurrences(occurrence); histogram.getItems().add(range); }); statistics.setHistogram(histogram); } }