Example usage for java.util.stream Collectors toSet

List of usage examples for java.util.stream Collectors toSet

Introduction

In this page you can find the example usage for java.util.stream Collectors toSet.

Prototype

public static <T> Collector<T, ?, Set<T>> toSet() 

Source Link

Document

Returns a Collector that accumulates the input elements into a new Set .

Usage

From source file:org.fenixedu.bennu.spring.BennuSpringConfiguration.java

private Set<String> getBaseNames(ApplicationContext context) {
    final Set<String> baseNames = new HashSet<>();
    baseNames.add(getBundleBasename("BennuSpringResources"));
    final String[] beanNames = context.getBeanNamesForAnnotation(BennuSpringModule.class);
    for (String beanName : beanNames) {
        BennuSpringModule bennuSpringModuleAnnotation = context.findAnnotationOnBean(beanName,
                BennuSpringModule.class);
        if (bennuSpringModuleAnnotation != null) {
            baseNames.addAll(Arrays.stream(bennuSpringModuleAnnotation.bundles()).map(this::getBundleBasename)
                    .collect(Collectors.toSet()));
        }// w w  w  .ja  v  a2  s.c  o m
    }
    return baseNames;
}

From source file:com.chadekin.jadys.syntax.from.impl.FromClauseBuilderImpl.java

private Set<String> retrieveBasicActiveAliases() {
    boolean isLazy = getParent() != null && getParent() instanceof SelectClauseBuilder
            && ((SelectClauseBuilder) getParent()).isLazy();
    Set<String> parentAliases = getParent() == null ? Sets.newHashSet() : getParent().getAlias();
    Set<String> childAliases = Sets.newHashSet();
    JadysSqlQueryBuilder childBuilder = getChild();
    while (childBuilder != null) {
        childAliases.addAll(childBuilder.getAlias());
        childBuilder = childBuilder.getChild();
    }//w  w  w.  j  a v  a  2s  .  c o  m
    return joinStatements.entrySet().stream()
            .filter(entry -> isLazy || parentAliases.contains(entry.getKey())
                    || childAliases.contains(entry.getKey()))
            .map(entry -> extractAlias(entry.getValue())).flatMap(entry -> entry.stream())
            .filter(val -> joinStatements.keySet().contains(val)).collect(Collectors.toSet());
}

From source file:net.morimekta.idltool.cmd.RemoteStatus.java

/**
 * Show standard status for a remote./*w w  w.  j  a  va2 s .  c om*/
 *
 * @param first          If this is the first remote to print diffs.
 * @param sourceSha1sums The remote file to sha1sum map.
 * @param targetSha1sums The local file to sha1sum map.
 * @return If the next remote is the first to print diffs.
 */
private boolean showStatus(boolean first, @Nonnull String remoteName, @Nonnull File sourceDirectory,
        @Nonnull Map<String, String> sourceSha1sums, @Nonnull File targetDirectory,
        @Nonnull Map<String, String> targetSha1sums) throws IOException {
    Set<String> removedFiles = new TreeSet<>(targetSha1sums.keySet());
    removedFiles.removeAll(sourceSha1sums.keySet());

    Set<String> addedFiles = new TreeSet<>(sourceSha1sums.keySet());
    addedFiles.removeAll(targetSha1sums.keySet());

    Set<String> updatedFiles = new TreeSet<>(sourceSha1sums.keySet());
    updatedFiles.removeAll(addedFiles);

    updatedFiles = updatedFiles.stream().filter(f -> !targetSha1sums.get(f).equals(sourceSha1sums.get(f)))
            .collect(Collectors.toSet());

    Set<String> allFiles = new TreeSet<>();
    allFiles.addAll(removedFiles);
    allFiles.addAll(addedFiles);
    allFiles.addAll(updatedFiles);

    if (allFiles.size() == 0) {
        return first;
    }

    int longestName = allFiles.stream().mapToInt(String::length).max().orElse(0);
    int diffSeparatorLength = Math.max(72, longestName + 6 + remoteName.length());

    if (!first) {
        System.out.println();
    }
    System.out.println(String.format("%sUpdates on %s%s", Color.BOLD, remoteName, Color.CLEAR));
    for (String file : allFiles) {
        String paddedFile = StringUtils.rightPad(file, longestName);
        File sourceFile = new File(sourceDirectory, file);
        File targetFile = new File(targetDirectory, file);

        if (diff) {
            System.out.println();
            System.out.println(Color.DIM + Strings.times("#", diffSeparatorLength) + Color.CLEAR);
            paddedFile = remoteName + "/" + paddedFile;
        }

        if (removedFiles.contains(file)) {
            System.out.println(String.format("  %s%s%s (%sD%s)%s", Color.YELLOW, paddedFile, Color.CLEAR,
                    Color.RED, Color.CLEAR, getDiffStats(sourceFile, targetFile)));
        } else if (addedFiles.contains(file)) {
            System.out.println(String.format("  %s%s%s (%sA%s)%s", Color.YELLOW, paddedFile, Color.CLEAR,
                    Color.GREEN, Color.CLEAR, getDiffStats(sourceFile, targetFile)));
        } else {
            System.out.println(String.format("  %s%s%s    %s", Color.YELLOW, paddedFile, Color.CLEAR,
                    getDiffStats(sourceFile, targetFile)));
        }
        if (diff) {
            System.out.println(Color.DIM + Strings.times("-", diffSeparatorLength) + Color.CLEAR);
            printDiffLines(sourceFile, targetFile);
            System.out.println(Color.DIM + Strings.times("#", diffSeparatorLength) + Color.CLEAR);
        }
    }

    return false;
}

From source file:alfio.manager.user.UserManager.java

private boolean checkRole(User user, Set<Role> expectedRoles) {
    Set<String> roleNames = expectedRoles.stream().map(Role::getRoleName).collect(Collectors.toSet());
    return authorityRepository.checkRole(user.getUsername(), roleNames);
}

From source file:com.vsct.dt.hesperides.indexation.search.ApplicationSearch.java

/**
 * Find all platforms using by a module.
 *
 * @return a set of plaforms matching request
 *///from  w w  w  .  j  a va  2  s  .  c  o m
public Set<PlatformApplicationSearchResponse> getAllPlatformsUsingModules(final String moduleName,
        final String moduleVersion, final String isWorkingCopy) {
    String url = String.format("/platforms/_search?size=%1$s", SEARCH_SIZE);

    boolean boolIsWorkingCopy = true;
    if (isWorkingCopy.contains("release")) {
        boolIsWorkingCopy = false;
    }

    String body = TemplateContentGenerator.from(mustacheSearchAllPlatformUsingModules)
            .put("moduleName", moduleName).put("moduleVersion", moduleVersion)
            .put("isWorkingCopy", boolIsWorkingCopy).generate();

    ElasticSearchResponse<PlatformApplicationSearchResponse> esResponse = elasticSearchClient
            .withResponseReader(elasticSearchVsctPlatformApplicationReader).post(url, body);

    return esResponse.streamOfData().collect(Collectors.toSet());
}

From source file:org.ow2.proactive.procci.service.occi.MixinService.java

/**
 * Add a mixin in the database and update the references
 * @param mixin/*from  w w  w  .  j a  va 2  s. c o  m*/
 */
public void addMixin(Mixin mixin) {
    //add the new entity references
    cloudAutomationVariablesClient.post(mixin.getTitle(), mapObject(mixin.getRendering()));

    //add mixin to entity references
    for (String entityId : mixin.getEntities().stream().map(entity -> entity.getId())
            .collect(Collectors.toSet())) {
        addEntityReferences(entityId, mixin.getTitle());
    }
}

From source file:com.netflix.conductor.core.execution.WorkflowExecutor.java

public String startWorkflow(String name, int version, Map<String, Object> input, String correlationId,
        String parentWorkflowId, String parentWorkflowTaskId, String event, Map<String, String> taskToDomain)
        throws Exception {

    try {/*from ww  w.  j  av a2  s . c o  m*/

        if (input == null) {
            throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow");
        }

        WorkflowDef exists = metadata.get(name, version);
        if (exists == null) {
            throw new ApplicationException(Code.NOT_FOUND,
                    "No such workflow defined. name=" + name + ", version=" + version);
        }
        Set<String> missingTaskDefs = exists.all().stream()
                .filter(wft -> wft.getType().equals(WorkflowTask.Type.SIMPLE.name()))
                .map(wft2 -> wft2.getName()).filter(task -> metadata.getTaskDef(task) == null)
                .collect(Collectors.toSet());
        if (!missingTaskDefs.isEmpty()) {
            throw new ApplicationException(Code.INVALID_INPUT,
                    "Cannot find the task definitions for the following tasks used in workflow: "
                            + missingTaskDefs);
        }
        String workflowId = IDGenerator.generate();

        // Persist the Workflow
        Workflow wf = new Workflow();
        wf.setWorkflowId(workflowId);
        wf.setCorrelationId(correlationId);
        wf.setWorkflowType(name);
        wf.setVersion(version);
        wf.setInput(input);
        wf.setStatus(WorkflowStatus.RUNNING);
        wf.setParentWorkflowId(parentWorkflowId);
        wf.setParentWorkflowTaskId(parentWorkflowTaskId);
        wf.setOwnerApp(WorkflowContext.get().getClientApp());
        wf.setCreateTime(System.currentTimeMillis());
        wf.setUpdatedBy(null);
        wf.setUpdateTime(null);
        wf.setEvent(event);
        wf.setTaskToDomain(taskToDomain);
        edao.createWorkflow(wf);
        decide(workflowId);
        return workflowId;

    } catch (Exception e) {
        Monitors.recordWorkflowStartError(name);
        throw e;
    }
}

From source file:com.yahoo.bard.webservice.data.metric.TemplateDruidQuery.java

/**
 * Gather duplicate names across the collection of Aggregations and PostAggregations.
 *
 * @param aggregations  Set of Aggregations to inspect
 * @param postAggregations  Set of PostAggregations to inspect
 *
 * @return Set of collided names (if any)
 *///from w w w .  ja v a2  s  .c om
private Set<String> getNameCollisions(Collection<Aggregation> aggregations,
        Collection<PostAggregation> postAggregations) {
    Set<String> allNames = new HashSet<>();
    return Stream.concat(aggregations.stream(), postAggregations.stream()).map(MetricField::getName)
            .filter(not(allNames::add)) // Select names that already had been added to allNames
            .collect(Collectors.toSet());
}

From source file:nu.yona.server.goals.service.ActivityCategoryServiceTestConfiguration.java

@Test
public void updateActivityCategorySet_modifiedActivityCategories_setIsUpdatedCorrectly() {
    assertGetAllActivityCategoriesResult("gambling", "news");

    // modify/*from  w w  w . j  av  a2 s.c o m*/
    Set<ActivityCategoryDto> importActivityCategories = new HashSet<>();
    ActivityCategoryDto newsModified = new ActivityCategoryDto(news.getId(), usString("news"), false,
            new HashSet<>(Arrays.asList("refdag", "bbc", "atom feeds")), new HashSet<String>(),
            usString("Descr"));
    importActivityCategories.add(newsModified);
    ActivityCategoryDto gaming = new ActivityCategoryDto(UUID.randomUUID(), usString("gaming"), false,
            new HashSet<>(Arrays.asList("games")), new HashSet<String>(), usString("Descr"));
    importActivityCategories.add(gaming);

    service.updateActivityCategorySet(importActivityCategories);

    ArgumentCaptor<ActivityCategory> matchActivityCategory = ArgumentCaptor.forClass(ActivityCategory.class);
    // 1 added and 1 updated
    verify(mockRepository, times(2)).save(matchActivityCategory.capture());
    assertThat(
            matchActivityCategory.getAllValues().stream()
                    .map(x -> x.getLocalizableName().get(Translator.EN_US_LOCALE)).collect(Collectors.toSet()),
            containsInAnyOrder("news", "gaming"));
    // 1 deleted
    verify(mockRepository, times(1)).delete(matchActivityCategory.capture());
    assertThat(matchActivityCategory.getValue().getLocalizableName().get(Translator.EN_US_LOCALE),
            equalTo("gambling"));
}

From source file:com.romeikat.datamessie.core.base.dao.impl.DocumentDao.java

public Map<RawContent, Document> getForRawContents(final SharedSessionContract ssc,
        final Collection<RawContent> rawContents) {
    // Query for documents
    final Set<Long> documentIds = rawContents.stream().map(c -> c.getDocumentId()).collect(Collectors.toSet());
    final Map<Long, Document> documentsById = getIdsWithEntities(ssc, documentIds);

    // Map rawContents -> documents
    final Map<RawContent, Document> result = Maps.newHashMapWithExpectedSize(rawContents.size());
    for (final RawContent rawContent : rawContents) {
        final Document document = documentsById.get(rawContent.getDocumentId());
        result.put(rawContent, document);
    }/*  w w w.j  a  va2 s .  com*/
    return result;
}