List of usage examples for java.util Map forEach
default void forEach(BiConsumer<? super K, ? super V> action)
From source file:org.wso2.carbon.uuf.internal.deployment.AppCreator.java
private void addI18nResources(Map<String, Properties> componentI18nResources, I18nResources i18nResources) { if ((componentI18nResources == null) || componentI18nResources.isEmpty()) { return;// w w w.j a v a2 s . c o m } componentI18nResources.forEach((localString, properties) -> { Locale locale = Locale.forLanguageTag(localString.replace("_", "-")); if (locale.getLanguage().isEmpty()) { throw new UUFException( "Locale is not found for the given language code. Hence language file will not" + " be deployed for" + localString); } else { i18nResources.addI18nResource(locale, properties); } }); }
From source file:com.grepcurl.random.ObjectGenerator.java
@Deprecated public <T> T generate(Class<T> klass, Map<String, Callable> setterOverrides, Object... constructorArgs) { SetterOverrides o = new SetterOverrides(); if (setterOverrides != null) { setterOverrides.forEach((methodName, callable) -> o.add(methodName, callable)); }// w w w .j a v a2 s . com return generate(klass, o, constructorArgs); }
From source file:org.apache.alert.coordinator.NodataMetadataGeneratorTest.java
@Test public void testNormal() throws Exception { StreamDefinition sd = createStreamDefinitionWithNodataAlert(); Map<String, StreamDefinition> streamDefinitionsMap = new HashMap<String, StreamDefinition>(); streamDefinitionsMap.put(sd.getStreamId(), sd); Map<String, Kafka2TupleMetadata> kafkaSources = new HashMap<String, Kafka2TupleMetadata>(); Map<String, PolicyDefinition> policies = new HashMap<String, PolicyDefinition>(); Map<String, Publishment> publishments = new HashMap<String, Publishment>(); generator.execute(config, streamDefinitionsMap, kafkaSources, policies, publishments); Assert.assertEquals(2, kafkaSources.size()); kafkaSources.forEach((key, value) -> { LOG.info("KafkaSources > {}: {}", key, ToStringBuilder.reflectionToString(value)); });/*from w ww . ja va 2 s . c o m*/ Assert.assertEquals(2, policies.size()); policies.forEach((key, value) -> { LOG.info("Policies > {}: {}", key, ToStringBuilder.reflectionToString(value)); }); Assert.assertEquals(4, publishments.size()); publishments.forEach((key, value) -> { LOG.info("Publishments > {}: {}", key, ToStringBuilder.reflectionToString(value)); }); }
From source file:org.wso2.carbon.transport.remotefilesystem.client.connector.contractimpl.VFSClientConnectorImpl.java
public VFSClientConnectorImpl(Map<String, String> connectorConfig, RemoteFileSystemListener remoteFileSystemListener) { this.connectorConfig = connectorConfig; this.remoteFileSystemListener = remoteFileSystemListener; if (Constants.PROTOCOL_FTP.equals(connectorConfig.get(Constants.PROTOCOL))) { connectorConfig.forEach((property, value) -> { // TODO: Add support for other FTP related configurations if (Constants.FTP_PASSIVE_MODE.equals(property)) { FtpFileSystemConfigBuilder.getInstance().setPassiveMode(opts, Boolean.parseBoolean(value)); }/*from w w w . java2 s.c o m*/ }); } }
From source file:org.springframework.context.support.DefaultLifecycleProcessor.java
private void stopBeans() { Map<String, Lifecycle> lifecycleBeans = getLifecycleBeans(); Map<Integer, LifecycleGroup> phases = new HashMap<>(); lifecycleBeans.forEach((beanName, bean) -> { int shutdownOrder = getPhase(bean); LifecycleGroup group = phases.get(shutdownOrder); if (group == null) { group = new LifecycleGroup(shutdownOrder, this.timeoutPerShutdownPhase, lifecycleBeans, false); phases.put(shutdownOrder, group); }/*from w w w. j a v a 2 s .co m*/ group.add(beanName, bean); }); if (!phases.isEmpty()) { List<Integer> keys = new ArrayList<>(phases.keySet()); keys.sort(Collections.reverseOrder()); for (Integer key : keys) { phases.get(key).stop(); } } }
From source file:org.springframework.context.support.DefaultLifecycleProcessor.java
private void startBeans(boolean autoStartupOnly) { Map<String, Lifecycle> lifecycleBeans = getLifecycleBeans(); Map<Integer, LifecycleGroup> phases = new HashMap<>(); lifecycleBeans.forEach((beanName, bean) -> { if (!autoStartupOnly || (bean instanceof SmartLifecycle && ((SmartLifecycle) bean).isAutoStartup())) { int phase = getPhase(bean); LifecycleGroup group = phases.get(phase); if (group == null) { group = new LifecycleGroup(phase, this.timeoutPerShutdownPhase, lifecycleBeans, autoStartupOnly); phases.put(phase, group); }// www . ja v a 2 s.c o m group.add(beanName, bean); } }); if (!phases.isEmpty()) { List<Integer> keys = new ArrayList<>(phases.keySet()); Collections.sort(keys); for (Integer key : keys) { phases.get(key).start(); } } }
From source file:org.haiku.haikudepotserver.pkg.job.PkgScreenshotImportArchiveJobRunner.java
/** * <p>Go through the database and collect information about the screenshots that are persisted.</p> *///from w w w. j a v a 2 s .c o m private void collectPersistedScreenshotMetadata(Map<String, ScreenshotImportMetadatas> data) { data.forEach((k, v) -> { if (!v.isNotFound()) { ObjectContext context = serverRuntime.newContext(); Pkg pkg = Pkg.getByName(context, k); pkg.getPkgScreenshots().forEach((ps) -> v.add(createPersistedScreenshotMetadata(ps))); } }); }
From source file:org.onosproject.net.resource.impl.LabelAllocator.java
/** * Allocates labels and associates them to source * and destination ports of a link./*from www. ja v a2s . co m*/ * * @param links the links on which labels will be reserved * @param id the intent Id * @param type the encapsulation type * @return the list of ports and associated labels */ public Map<ConnectPoint, Identifier<?>> assignLabelToPorts(Set<Link> links, IntentId id, EncapsulationType type) { Map<LinkKey, Identifier<?>> allocation = this.assignLabelToLinks(links, id, type); if (allocation.isEmpty()) { return Collections.emptyMap(); } Map<ConnectPoint, Identifier<?>> finalAllocation = Maps.newHashMap(); allocation.forEach((key, value) -> { finalAllocation.putIfAbsent(key.src(), value); finalAllocation.putIfAbsent(key.dst(), value); }); return ImmutableMap.copyOf(finalAllocation); }
From source file:fi.vm.kapa.identification.proxy.background.SessionCleanup.java
public void runCleanup() { try {//from w ww . j a va 2 s . co m // Using time reference in minutes is good enough for a time frame for expiration long failedTTLThreshold = System.currentTimeMillis() - failedSessionsTTL * 60000; long activeTTLThreshold = System.currentTimeMillis() - activeSessionsTTL * 60000; Map<String, Map<AuthMethod, SessionDTO>> sessionsInCache = sessionHandlingUtils.getSessionsCache(); List<Pair<String, AuthMethod>> toBeRemoved = new ArrayList<>(); long originalSize = 0; for (Map<AuthMethod, SessionDTO> sessionDTOMap : sessionsInCache.values()) { originalSize += sessionDTOMap.size(); } logger.info("Currently there are {} sessions in cache", originalSize); sessionsInCache.forEach((key, sessionDTOMap) -> sessionDTOMap.forEach((authMethod, sessionDTO) -> { if ((sessionDTO.isValidated() && sessionDTO.getTimestamp() < activeTTLThreshold) || (!sessionDTO.isValidated() && sessionDTO.getTimestamp() < failedTTLThreshold)) { toBeRemoved.add(new Pair<>(key, authMethod)); } })); toBeRemoved .forEach((pair) -> sessionHandlingUtils.removeFromSessionCache(pair.getKey(), pair.getValue())); long finalSize = 0; for (Map<AuthMethod, SessionDTO> sessionDTOMap : sessionsInCache.values()) { finalSize += sessionDTOMap.size(); } logger.info("Removed {} expired sessions from cache", originalSize - finalSize); } catch (Exception e) { logger.error("Error running session cleanup", e); } }
From source file:com.redhat.red.build.koji.it.ImportBuildConnectionStressIT.java
private void runImport(final KojiClient client) throws Exception { KojiSessionInfo session = client.login(); String tagName = getClass().getSimpleName() + "-" + selectWords("-", 3); CreateTagRequest req = new CreateTagRequest(); req.setTagName(tagName);/*from w ww. ja v a2s . c o m*/ client.createTag(req, session); ProjectVersionRef topGav = generateGAV(); KojiImport.Builder importBuilder = initImport(topGav); boolean packageAdded = client.addPackageToTag(tagName, topGav, session); assertThat(packageAdded, equalTo(true)); List<Supplier<ImportFile>> fileSuppliers = new ArrayList<>( Arrays.asList(addPom(topGav, importBuilder), addJar(topGav, importBuilder))); for (int i = 1; i < MODULE_COUNT; i++) { ProjectVersionRef moduleGav = generateGAV(); fileSuppliers.add(addPom(moduleGav, importBuilder)); fileSuppliers.add(addJar(moduleGav, importBuilder)); } KojiImport importMetadata = importBuilder.build(); System.out.printf("Starting CGImport using client: %s\n metadata: %s\n fileSuppliers: %s\n session: %s", client, importMetadata, fileSuppliers, session); KojiImportResult result = client.importBuild(importMetadata, fileSuppliers, session); Map<String, KojijiErrorInfo> uploadErrors = result.getUploadErrors(); if (uploadErrors != null && !uploadErrors.isEmpty()) { StringBuilder sb = new StringBuilder(); sb.append("The following upload errors occurred:\n\n"); uploadErrors.forEach((k, v) -> sb.append(k).append(":\n\n ").append(v).append("\n\n")); fail(sb.toString()); } KojiBuildInfo buildInfo = result.getBuildInfo(); assertThat(buildInfo, notNullValue()); Integer taskId = client.tagBuild(tagName, buildInfo.getNvr(), session); assertThat(taskId, notNullValue()); KojiTaskInfo taskInfo = client.getTaskInfo(taskId, session); System.out.println("State of tag operation: " + taskInfo.getState()); }