Example usage for java.util Set forEach

List of usage examples for java.util Set forEach

Introduction

In this page you can find the example usage for java.util Set forEach.

Prototype

default void forEach(Consumer<? super T> action) 

Source Link

Document

Performs the given action for each element of the Iterable until all elements have been processed or the action throws an exception.

Usage

From source file:com.netflix.metacat.usermetadata.mysql.MysqlUserMetadataService.java

@Override
public List<QualifiedName> searchByOwners(final Set<String> owners) {
    final List<QualifiedName> result = Lists.newArrayList();
    final StringBuilder query = new StringBuilder(SQL.SEARCH_DEFINITION_METADATA_NAMES);
    final List<Object> paramList = Lists.newArrayList();
    query.append(" where 1=0");
    owners.forEach(s -> {
        query.append(" or data like ?");
        paramList.add("%\"userId\":\"" + s.trim() + "\"%");
    });/*from  w  w w  . jav  a  2 s.co  m*/
    final Object[] params = new Object[paramList.size()];
    final Connection connection = DBUtil.getReadConnection(poolingDataSource);
    try {
        // Handler for reading the result set
        final ResultSetHandler<Void> handler = rs -> {
            while (rs.next()) {
                final String definitionName = rs.getString("name");
                result.add(QualifiedName.fromString(definitionName, false));
            }
            return null;
        };
        new QueryRunner().query(connection, query.toString(), handler, paramList.toArray(params));
    } catch (SQLException e) {
        log.error("Sql exception", e);
        throw new UserMetadataServiceException("Failed to get definition data", e);
    } finally {
        DBUtil.closeReadConnection(connection);
    }
    return result;

}

From source file:org.commonjava.indy.promote.data.PromotionManager.java

/**
 * NOTE: Adding sourcePaths parameter here to cut down on number of paths for clearing from NFC.
 *
 * @param sourcePaths The set of paths that need to be cleared from the NFC.
 * @param store The store whose affected groups should have their NFC entries cleared
 * @throws IndyDataException//from   www  .j ava  2s .c om
 */
private void clearStoreNFC(final Set<String> sourcePaths, ArtifactStore store) throws IndyDataException {
    Set<String> paths = sourcePaths.stream()
            .map(sp -> sp.startsWith("/") && sp.length() > 1 ? sp.substring(1) : sp)
            .collect(Collectors.toSet());

    paths.forEach(path -> {
        ConcreteResource resource = new ConcreteResource(LocationUtils.toLocation(store), path);

        logger.debug("Clearing NFC path: {} from: {}\n\tResource: {}", path, store.getKey(), resource);
        nfc.clearMissing(resource);
    });

    Set<Group> groups = storeManager.query().getGroupsAffectedBy(store.getKey());
    if (groups != null) {
        groups.forEach(group -> paths.forEach(path -> {
            ConcreteResource resource = new ConcreteResource(LocationUtils.toLocation(group), path);

            logger.debug("Clearing NFC path: {} from: {}\n\tResource: {}", path, group.getKey(), resource);
            nfc.clearMissing(resource);
        }));
    }
}

From source file:com.spotify.styx.docker.KubernetesDockerRunner.java

void examineRunningWFISandAssociatedPods(PodList podList) {
    final Set<WorkflowInstance> runningWorkflowInstances = stateManager.activeStates().values().stream()
            .filter(runState -> runState.state().equals(RUNNING)).map(RunState::workflowInstance)
            .collect(toSet());/*from  www .ja  v  a2 s  . c  o m*/

    final Set<WorkflowInstance> workflowInstancesForPods = podList.getItems().stream()
            .filter(pod -> pod.getMetadata().getAnnotations().containsKey(STYX_WORKFLOW_INSTANCE_ANNOTATION))
            .map(pod -> WorkflowInstance
                    .parseKey(pod.getMetadata().getAnnotations().get(STYX_WORKFLOW_INSTANCE_ANNOTATION)))
            .collect(toSet());

    runningWorkflowInstances.removeAll(workflowInstancesForPods);
    runningWorkflowInstances.forEach(workflowInstance -> stateManager
            .receiveIgnoreClosed(Event.runError(workflowInstance, "No pod associated with this instance")));
}

From source file:org.onosproject.net.intent.impl.IntentInstaller.java

/**
 * Applies the specified intent updates to the environment by uninstalling
 * and installing the intents and updating the store references appropriately.
 *
 * @param toUninstall optional intent to uninstall
 * @param toInstall   optional intent to install
 *//*from  w  w w  .ja v  a2 s  . c  om*/
void apply(Optional<IntentData> toUninstall, Optional<IntentData> toInstall) {
    // Hook for handling success at intent installation level.
    Consumer<IntentInstallationContext> successConsumer = (ctx) -> {
        if (toInstall.isPresent()) {
            IntentData installData = toInstall.get();
            log.debug("Completed installing: {}", installData.key());
            installData.setState(INSTALLED);
            store.write(installData);
        } else if (toUninstall.isPresent()) {
            IntentData uninstallData = toUninstall.get();
            log.debug("Completed withdrawing: {}", uninstallData.key());
            switch (uninstallData.request()) {
            case INSTALL_REQ:
                uninstallData.setState(FAILED);
                break;
            case WITHDRAW_REQ:
            default: //TODO "default" case should not happen
                uninstallData.setState(WITHDRAWN);
                break;
            }
            // Intent has been withdrawn; we can clear the installables
            store.write(new IntentData(uninstallData, Collections.emptyList()));
        }
    };

    // Hook for handling errors at intent installation level
    Consumer<IntentInstallationContext> errorConsumer = (ctx) -> {
        // if toInstall was cause of error, then recompile (manage/increment counter, when exceeded -> CORRUPT)
        if (toInstall.isPresent()) {
            IntentData installData = toInstall.get();
            installData.setState(CORRUPT);
            installData.incrementErrorCount();
            store.write(installData);
        }
        // if toUninstall was cause of error, then CORRUPT (another job will clean this up)
        if (toUninstall.isPresent()) {
            IntentData uninstallData = toUninstall.get();
            uninstallData.setState(CORRUPT);
            uninstallData.incrementErrorCount();
            store.write(uninstallData);
        }
    };

    // Hooks at operation level
    Consumer<OperationContext> successOperationConsumer = (ctx) -> {
        ctx.intentContext.finishContext(ctx);
    };
    Consumer<OperationContext> errorOperationConsumer = (ctx) -> {
        if (ctx.toInstall.isPresent()) {
            IntentData installData = toInstall.get();
            log.warn("Failed installation operation for: {} {} due to {}", installData.key(),
                    installData.intent(), ctx.error());
        }
        if (ctx.toUninstall.isPresent()) {
            IntentData uninstallData = toUninstall.get();
            log.warn("Failed withdrawal operation for: {} {} due to {}", uninstallData.key(),
                    uninstallData.intent(), ctx.error());
        }
        ctx.intentContext.handleError(ctx);
    };

    // Create a context for tracking the backing operations for applying
    // the intents to the environment.
    IntentInstallationContext intentContext = new IntentInstallationContext(successConsumer, errorConsumer);
    Set<OperationContext> contexts = createContext(intentContext, toUninstall, toInstall);
    intentContext.pendingContexts = contexts;
    contexts.forEach(ctx -> {
        ctx.prepare(toUninstall, toInstall, successOperationConsumer, errorOperationConsumer);
        ctx.apply();
    });
}

From source file:com.sandbox.recipe.service.RecipeController.java

@CrossOrigin
@RequestMapping("/recipeSearch")
public Set<Recipe> recipeSearch(@RequestBody SearchCriteria criteria) {
    System.out.println("recipeSearch() criteria.searchText = " + criteria.getSearchText()
            + ", criteria.tags.size = " + (criteria.getTags() != null ? criteria.getTags().size() : "null"));
    Set<Recipe> results = new HashSet<>();

    if (StringUtils.isNotBlank(criteria.getSearchText())) {
        List<Recipe> temp1 = _recipeRepository.findByRecipeNameIgnoreCaseContainingOrNotesIgnoreCaseContaining(
                criteria.getSearchText(), criteria.getSearchText());

        if (CollectionUtils.isNotEmpty(temp1)) {
            results.addAll(temp1);/*from  w w  w .  ja  v a 2s  .c  om*/
        }
    }

    if (CollectionUtils.isNotEmpty(criteria.getTags())) {
        List<Recipe> temp2 = _recipeRepository.findByRecipeTagsIn(criteria.getTags());
        results.addAll(temp2);
    }

    if (CollectionUtils.isNotEmpty(criteria.getCookbookIds())) {
        List<Recipe> temp3 = _recipeRepository.findByCookbookIdIn(criteria.getCookbookIds());
        results.addAll(temp3);
    }

    System.out.println("recipeSearch() results.size() = " + results.size());
    results.forEach((recipe) -> {
        System.out.println("recipeSearch() recipe = " + recipe.getId() + ": " + recipe.getRecipeName());
    });
    return results;
}

From source file:org.apache.cassandra.index.SecondaryIndexManager.java

/**
 * When building an index against existing data in sstables, add the given partition to the index
 *//*from  w  ww .j  a va2  s  .  c om*/
public void indexPartition(UnfilteredRowIterator partition, OpOrder.Group opGroup, Set<Index> indexes,
        int nowInSec) {
    if (!indexes.isEmpty()) {
        DecoratedKey key = partition.partitionKey();
        Set<Index.Indexer> indexers = indexes.stream().map(index -> index.indexerFor(key, partition.columns(),
                nowInSec, opGroup, IndexTransaction.Type.UPDATE)).filter(Objects::nonNull)
                .collect(Collectors.toSet());

        indexers.forEach(Index.Indexer::begin);

        try (RowIterator filtered = UnfilteredRowIterators.filter(partition, nowInSec)) {
            if (!filtered.staticRow().isEmpty())
                indexers.forEach(indexer -> indexer.insertRow(filtered.staticRow()));

            while (filtered.hasNext()) {
                Row row = filtered.next();
                indexers.forEach(indexer -> indexer.insertRow(row));
            }
        }

        indexers.forEach(Index.Indexer::finish);
    }
}

From source file:nl.knaw.huc.di.tag.tagml.importer.TAGMLListener.java

@Override
public void exitMilestoneTag(MilestoneTagContext ctx) {
    if (!state.rootMarkupIsSet()) {
        errorListener.addBreakingError("%s The root markup cannot be a milestone tag.", errorPrefix(ctx));
    }/*from   w ww.j a v a  2 s .c om*/
    if (tagNameIsValid(ctx)) {
        String markupName = ctx.name().getText();
        //      LOG.debug("milestone.markupName=<{}>", markupName);
        ctx.annotation().forEach(annotation -> LOG.debug("milestone.annotation={{}}", annotation.getText()));
        Set<String> layers = extractLayerInfo(ctx.layerInfo());
        TAGTextNode tn = store.createTextNode("");
        addAndConnectToMarkup(tn);
        //      logTextNode(tn);
        TAGMarkup markup = addMarkup(ctx.name().getText(), ctx.annotation(), ctx);
        markup.addAllLayers(layers);
        layers.forEach(layerName -> {
            linkTextToMarkupForLayer(tn, markup, layerName);
            document.openMarkupInLayer(markup, layerName);
            document.closeMarkupInLayer(markup, layerName);

        });
        store.persist(markup.getDTO());
    }
}

From source file:org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor.java

@Override
public Object postProcessAfterInitialization(final Object bean, String beanName) {
    Class<?> targetClass = AopProxyUtils.ultimateTargetClass(bean);
    if (!this.nonAnnotatedClasses.contains(targetClass)) {
        Map<Method, Set<Scheduled>> annotatedMethods = MethodIntrospector.selectMethods(targetClass,
                (MethodIntrospector.MetadataLookup<Set<Scheduled>>) method -> {
                    Set<Scheduled> scheduledMethods = AnnotatedElementUtils
                            .getMergedRepeatableAnnotations(method, Scheduled.class, Schedules.class);
                    return (!scheduledMethods.isEmpty() ? scheduledMethods : null);
                });/*from ww w  .  j  a  va 2s .  c o  m*/
        if (annotatedMethods.isEmpty()) {
            this.nonAnnotatedClasses.add(targetClass);
            if (logger.isTraceEnabled()) {
                logger.trace("No @Scheduled annotations found on bean class: " + bean.getClass());
            }
        } else {
            // Non-empty set of methods
            annotatedMethods.forEach((method, scheduledMethods) -> scheduledMethods
                    .forEach(scheduled -> processScheduled(scheduled, method, bean)));
            if (logger.isDebugEnabled()) {
                logger.debug(annotatedMethods.size() + " @Scheduled methods processed on bean '" + beanName
                        + "': " + annotatedMethods);
            }
        }
    }
    return bean;
}

From source file:org.nanoframework.orm.jedis.AbstractRedisClient.java

@Override
public <T> Set<T> hkeys(final String key, final TypeReference<T> type) {
    final Set<String> keys = hkeys(key);
    if (!CollectionUtils.isEmpty(keys)) {
        final Set<T> sets = Sets.newHashSet();
        keys.forEach(item -> sets.add(parseObject(item, type)));
        return sets;
    }/*from w ww .  j a v  a2  s  .c  om*/

    return Collections.emptySet();
}

From source file:pt.ist.fenixedu.integration.ui.struts.action.koha.ExportUserInfoForKoha.java

public ActionForward getTeachersAndResearchers(final ActionMapping mapping, final ActionForm actionForm,
        final HttpServletRequest request, final HttpServletResponse response) throws Exception {
    final Spreadsheet spreadsheet = new Spreadsheet("TeachersAndResearchers");
    spreadsheet.setHeader("IST-ID").setHeader("*departamento").setHeader("nome").setHeader("email")
            .setHeader("telefone").setHeader("cgdCode");

    Set<Person> teachersAndResearchers = new HashSet<>();
    for (Teacher teacher : Bennu.getInstance().getTeachersSet()) {
        if (teacher.isActiveContractedTeacher()) {
            teachersAndResearchers.add(teacher.getPerson());
        }/*from   w  w  w.j  ava  2  s .  co  m*/
    }
    for (Employee employee : Bennu.getInstance().getEmployeesSet()) {
        PersonContractSituation currentResearcherContractSituation = employee.getPerson()
                .getPersonProfessionalData() != null
                        ? employee.getPerson().getPersonProfessionalData()
                                .getCurrentPersonContractSituationByCategoryType(CategoryType.RESEARCHER)
                        : null;
        if (currentResearcherContractSituation != null) {
            teachersAndResearchers.add(employee.getPerson());
        }
    }
    teachersAndResearchers.forEach(p -> addEmployeeInformation(spreadsheet, p));

    return sendXls(response, spreadsheet);
}