Example usage for java.util.stream Collectors toSet

List of usage examples for java.util.stream Collectors toSet

Introduction

In this page you can find the example usage for java.util.stream Collectors toSet.

Prototype

public static <T> Collector<T, ?, Set<T>> toSet() 

Source Link

Document

Returns a Collector that accumulates the input elements into a new Set .

Usage

From source file:demo.enumj.EnumeratorDemo.java

private static void demoCollect(String pre) {
    final Enumerator<Integer> en = Enumerator.of(_123);
    final Set<Integer> s = en.collect(Collectors.toSet());
    System.out.println(pre + "Elements of collected set:");
    for (Integer i : s) {
        System.out.println(pre + pre + i);
    }//from   www  . j a  v a2 s  .co m
}

From source file:arxiv.xml.XMLParser.java

/**
 * Parse a single record of article metadata.
 * @throws ParseException if there is a parsing error
 *//*from ww w  . j  av  a  2  s. c o m*/
private ArticleMetadata parseRecord(RecordType xmlRecord, ZonedDateTime retrievalDateTime) {
    ArticleMetadata.ArticleMetadataBuilder articleBuilder = ArticleMetadata.builder();
    articleBuilder.retrievalDateTime(retrievalDateTime);

    HeaderType header = xmlRecord.getHeader();
    articleBuilder.identifier(normalizeSpace(header.getIdentifier()))
            .datestamp(parseDatestamp(normalizeSpace(header.getDatestamp())))
            .sets(header.getSetSpec().stream().map(StringUtils::normalizeSpace).collect(Collectors.toSet()))
            .deleted(header.getStatus() != null && header.getStatus() == StatusType.DELETED);

    @SuppressWarnings("unchecked")
    JAXBElement<ArXivRawType> jaxbElement = (JAXBElement<ArXivRawType>) xmlRecord.getMetadata().getAny();

    ArXivRawType metadata = jaxbElement.getValue();
    articleBuilder.id(normalizeSpace(metadata.getId())).submitter(normalizeSpace(metadata.getSubmitter()))
            .versions(metadata.getVersion().stream()
                    .map(versionType -> ArticleVersion.builder()
                            .versionNumber(parseVersionNumber(normalizeSpace(versionType.getVersion())))
                            .submissionTime(parseSubmissionTime(normalizeSpace(versionType.getDate())))
                            .size(normalizeSpace(versionType.getSize()))
                            .sourceType(normalizeSpace(versionType.getSourceType())).build())
                    .collect(Collectors.toSet()))
            .title(normalizeSpace(metadata.getTitle())).authors(normalizeSpace(metadata.getAuthors()))
            .categories(parseCategories(normalizeSpace(metadata.getCategories())))
            .comments(normalizeSpace(metadata.getComments())).proxy(normalizeSpace(metadata.getProxy()))
            .reportNo(normalizeSpace(metadata.getReportNo())).acmClass(normalizeSpace(metadata.getAcmClass()))
            .mscClass(normalizeSpace(metadata.getMscClass()))
            .journalRef(normalizeSpace(metadata.getJournalRef())).doi(normalizeSpace(metadata.getDoi()))
            .license(normalizeSpace(metadata.getLicense()))
            .articleAbstract(normalizeSpace(metadata.getAbstract()));

    return articleBuilder.build();
}

From source file:com.netflix.conductor.dao.dynomite.RedisExecutionDAOTest.java

@Test
public void testTaskCreateDups() throws Exception {
    List<Task> tasks = new LinkedList<>();
    String workflowId = UUID.randomUUID().toString();

    for (int i = 0; i < 3; i++) {
        Task task = new Task();
        task.setScheduledTime(1L);//w ww.ja  va2s  .  c o m
        task.setSeq(1);
        task.setTaskId(workflowId + "_t" + i);
        task.setReferenceTaskName("t" + i);
        task.setRetryCount(0);
        task.setWorkflowInstanceId(workflowId);
        task.setTaskDefName("task" + i);
        task.setStatus(Task.Status.IN_PROGRESS);
        tasks.add(task);
    }

    //Let's insert a retried task
    Task task = new Task();
    task.setScheduledTime(1L);
    task.setSeq(1);
    task.setTaskId(workflowId + "_t" + 2);
    task.setReferenceTaskName("t" + 2);
    task.setRetryCount(1);
    task.setWorkflowInstanceId(workflowId);
    task.setTaskDefName("task" + 2);
    task.setStatus(Task.Status.IN_PROGRESS);
    tasks.add(task);

    //Duplicate task!
    task = new Task();
    task.setScheduledTime(1L);
    task.setSeq(1);
    task.setTaskId(workflowId + "_t" + 1);
    task.setReferenceTaskName("t" + 1);
    task.setRetryCount(0);
    task.setWorkflowInstanceId(workflowId);
    task.setTaskDefName("task" + 1);
    task.setStatus(Task.Status.IN_PROGRESS);
    tasks.add(task);

    List<Task> created = dao.createTasks(tasks);
    assertEquals(tasks.size() - 1, created.size()); //1 less

    Set<String> srcIds = tasks.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount())
            .collect(Collectors.toSet());
    Set<String> createdIds = created.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount())
            .collect(Collectors.toSet());

    assertEquals(srcIds, createdIds);

    List<Task> pending = dao.getPendingTasksByWorkflow("task0", workflowId);
    assertNotNull(pending);
    assertEquals(1, pending.size());
    assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), pending.get(0)));

    List<Task> found = dao.getTasks(tasks.get(0).getTaskDefName(), null, 1);
    assertNotNull(found);
    assertEquals(1, found.size());
    assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), found.get(0)));
}

From source file:com.ikanow.aleph2.core.shared.services.MultiDataService.java

/** User c'tor - standard case
 * @param bucket/*from w ww  .j  a  v a  2 s .co m*/
 * @param context
 * @param maybe_get_buffer_name
 */
protected MultiDataService(final DataBucketBean bucket, final IServiceContext context,
        final Optional<Function<IGenericDataService, Optional<String>>> maybe_get_storage_type,
        final Optional<Function<IGenericDataService, Optional<String>>> maybe_get_buffer_name) {
    // Insert or overwrite mode:
    _doc_write_mode = getWriteMode(bucket);

    _services = DataServiceUtils.selectDataServices(bucket.data_schema(), context);

    _services.asMap().entrySet().stream().forEach(kv -> {
        final Set<String> vals = kv.getValue().stream().collect(Collectors.toSet());
        // (the order doesn't really matter here, so just to "look" sensible:)
        if (vals.contains(DataSchemaBean.SearchIndexSchemaBean.name)) {
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type, maybe_get_buffer_name);
            _crud_index_service = t2._1();
            _batch_index_service = t2._2();
            storeWriters(t2, vals);
        } else if (vals.contains(DataSchemaBean.DocumentSchemaBean.name)) {
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type, maybe_get_buffer_name);
            _crud_doc_service = t2._1();
            _batch_doc_service = t2._2();
            storeWriters(t2, vals);
        } else if (vals.contains(DataSchemaBean.DataWarehouseSchemaBean.name)) {
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type, maybe_get_buffer_name);
            _crud_data_warehouse_service = t2._1();
            _batch_data_warehouse_service = t2._2();
            storeWriters(t2, vals);
        } else if (vals.contains(DataSchemaBean.GraphSchemaBean.name)) {
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type, maybe_get_buffer_name);
            _crud_graph_service = t2._1();
            _batch_graph_service = t2._2();
            storeWriters(t2, vals);
        } else if (vals.contains(DataSchemaBean.ColumnarSchemaBean.name)) {
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type, maybe_get_buffer_name);
            _crud_columnar_service = t2._1();
            _batch_columnar_service = t2._2();
            storeWriters(t2, vals);
        } else if (vals.contains(DataSchemaBean.TemporalSchemaBean.name)) {
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type, maybe_get_buffer_name);
            _crud_temporal_service = t2._1();
            _batch_temporal_service = t2._2();
            storeWriters(t2, vals);
        }
        if (vals.contains(DataSchemaBean.StorageSchemaBean.name)) { // (note storage is a bit different, fix the "processed mode")
            Tuple2<IDataWriteService<JsonNode>, IDataWriteService.IBatchSubservice<JsonNode>> t2 = getWriters(
                    bucket, kv.getKey(), maybe_get_storage_type.map(Optional::of).orElseGet(() -> {
                        return Optional
                                .of(__ -> Optional.of(IStorageService.StorageStage.processed.toString()));
                    }), maybe_get_buffer_name);
            _crud_storage_service = t2._1();
            _batch_storage_service = t2._2();
            storeWriters(t2, vals);
        }
    });
}

From source file:io.gravitee.repository.mongodb.management.MongoApiRepository.java

private Set<Api> mapApis(Collection<ApiMongo> apis) {
    return apis.stream().map(this::mapApi).collect(Collectors.toSet());
}

From source file:ddf.catalog.metacard.duplication.DuplicationValidator.java

private ValidationViolation reportDuplicates(final Metacard metacard, String[] attributeNames,
        ValidationViolation.Severity severity) {

    Set<String> duplicates = new HashSet<>();
    ValidationViolation violation = null;

    final Set<String> uniqueAttributeNames = Stream.of(attributeNames)
            .filter(attribute -> metacard.getAttribute(attribute) != null).collect(Collectors.toSet());
    final Set<Attribute> uniqueAttributes = uniqueAttributeNames.stream()
            .map(attribute -> metacard.getAttribute(attribute)).collect(Collectors.toSet());
    if (!uniqueAttributes.isEmpty()) {
        LOGGER.debug("Checking for duplicates for id {} against attributes [{}]", metacard.getId(),
                collectionToString(uniqueAttributeNames));

        SourceResponse response = query(uniqueAttributes);
        if (response != null) {
            response.getResults().forEach(result -> duplicates.add(result.getMetacard().getId()));
        }//from w  w w  .  j av  a 2s  .  com
        if (!duplicates.isEmpty()) {

            violation = createViolation(uniqueAttributeNames, duplicates, severity);
            LOGGER.debug(violation.getMessage());
        }
    }
    return violation;
}

From source file:com.devicehive.websockets.handlers.NotificationHandlers.java

/**
 * Implementation of the <a href="http://www.devicehive.com/restful#WsReference/Client/notificationunsubscribe">
 * WebSocket API: Client: notification/unsubscribe</a> Unsubscribes from device notifications.
 *
 * @param session Current session/* w  ww .j  ava2s  .  co m*/
 * @return Json object with the following structure <code> { "action": {string}, "status": {string}, "requestId":
 * {object} } </code>
 */
@PreAuthorize("isAuthenticated() and hasPermission(null, 'GET_DEVICE_NOTIFICATION')")
public WebSocketResponse processNotificationUnsubscribe(JsonObject request, WebSocketSession session) {
    HivePrincipal principal = (HivePrincipal) SecurityContextHolder.getContext().getAuthentication()
            .getPrincipal();
    Optional<String> subId = Optional.ofNullable(request.get(SUBSCRIPTION_ID)).map(s -> {
        try {
            return s.getAsString();
        } catch (UnsupportedOperationException e) {
            logger.error("Subscription Id is null");
            return StringUtils.EMPTY;
        }
    });
    Set<String> deviceGuids = gson.fromJson(request.get(DEVICE_GUIDS), JsonTypes.STRING_SET_TYPE);
    logger.debug("notification/unsubscribe action. Session {} ", session.getId());
    if (!subId.isPresent() && deviceGuids == null) {
        List<DeviceVO> actualDevices = deviceService
                .list(null, null, null, null, null, null, null, true, null, null, principal).join();
        deviceGuids = actualDevices.stream().map(DeviceVO::getGuid).collect(Collectors.toSet());
        notificationService.unsubscribe(null, deviceGuids);
    } else if (subId.isPresent()) {
        notificationService.unsubscribe(subId.get(), deviceGuids);
    } else {
        notificationService.unsubscribe(null, deviceGuids);
    }
    logger.debug("notification/unsubscribe completed for session {}", session.getId());

    ((CopyOnWriteArraySet) session.getAttributes().get(SUBSCSRIPTION_SET_NAME)).remove(subId);
    return new WebSocketResponse();
}

From source file:com.evolveum.midpoint.model.impl.lens.EvaluationOrderImpl.java

@Override
public Collection<QName> getExtraRelations() {
    return orderMap.entrySet().stream()
            .filter(e -> !ObjectTypeUtil.isMembershipRelation(e.getKey())
                    && !ObjectTypeUtil.isDelegationRelation(e.getKey()) && e.getValue() > 0)
            .map(e -> e.getKey()).collect(Collectors.toSet());
}

From source file:com.epam.dlab.backendapi.service.impl.LibraryServiceImpl.java

@SuppressWarnings("unchecked")
private Document getLibsOfActiveComputationalResources(Document document) {
    Document computationalLibs = (Document) document.get(ExploratoryLibDAO.COMPUTATIONAL_LIBS);

    if (document.get(ExploratoryDAO.COMPUTATIONAL_RESOURCES) != null) {
        List<Document> computationalResources = (List<Document>) document
                .get(ExploratoryDAO.COMPUTATIONAL_RESOURCES);

        Set<String> terminated = computationalResources.stream().filter(
                doc -> doc.getString(BaseDAO.STATUS).equalsIgnoreCase(UserInstanceStatus.TERMINATED.toString()))
                .map(doc -> doc.getString("computational_name")).collect(Collectors.toSet());

        terminated.forEach(computationalLibs::remove);
    }// w  w  w  . j  a va 2s  . co m

    return computationalLibs;
}

From source file:net.dv8tion.jda.core.managers.GuildController.java

/**
 * Creates a new {@link net.dv8tion.jda.core.entities.Emote Emote} in this Guild.
 * <br>If one or more Roles are specified the new Emote will only be available to Members with any of the specified Roles (see {@link Member#canInteract(Emote)})
 * <br>For this to be successful, the logged in account has to have the {@link net.dv8tion.jda.core.Permission#MANAGE_EMOTES MANAGE_EMOTES} Permission.
 *
 * <p><b><u>Unicode emojis are not included as {@link net.dv8tion.jda.core.entities.Emote Emote}!</u></b>
 * <br>Roles may only be available for whitelisted accounts.
 *
 * <p>Possible {@link net.dv8tion.jda.core.requests.ErrorResponse ErrorResponses} caused by
 * the returned {@link net.dv8tion.jda.core.requests.RestAction RestAction} include the following:
 * <ul>/*from   w  w w . j a v a2  s . c  om*/
 *     <li>{@link net.dv8tion.jda.core.requests.ErrorResponse#MISSING_PERMISSIONS MISSING_PERMISSIONS}
 *     <br>The emote could not be created due to a permission discrepancy</li>
 *
 *     <li>{@link net.dv8tion.jda.core.requests.ErrorResponse#MISSING_ACCESS MISSING_ACCESS}
 *     <br>We were removed from the Guild before finishing the task</li>
 * </ul>
 *
 * @param  name
 *         The name for the new Emote
 * @param  icon
 *         The {@link net.dv8tion.jda.core.entities.Icon} for the new Emote
 * @param  roles
 *         The {@link net.dv8tion.jda.core.entities.Role Roles} the new Emote should be restricted to
 *         <br>If no roles are provided the Emote will be available to all Members of this Guild
 *
 * @throws net.dv8tion.jda.core.exceptions.PermissionException
 *         If the logged in account does not have the {@link net.dv8tion.jda.core.Permission#MANAGE_EMOTES MANAGE_EMOTES} Permission
 * @throws net.dv8tion.jda.core.exceptions.GuildUnavailableException
 *         If the guild is temporarily not {@link net.dv8tion.jda.core.entities.Guild#isAvailable() available}
 * @throws net.dv8tion.jda.core.exceptions.AccountTypeException
 *         If the logged in account is not from {@link net.dv8tion.jda.core.AccountType#CLIENT AccountType.CLIENT}
 *
 * @return {@link net.dv8tion.jda.core.requests.restaction.AuditableRestAction AuditableRestAction} - Type: {@link net.dv8tion.jda.core.entities.Emote Emote}
 *         <br>The newly created Emote
 */
@CheckReturnValue
public AuditableRestAction<Emote> createEmote(String name, Icon icon, Role... roles) {
    checkAvailable();
    checkPermission(Permission.MANAGE_EMOTES);
    Checks.notNull(name, "emote name");
    Checks.notNull(icon, "emote icon");

    if (getJDA().getAccountType() != AccountType.CLIENT)
        throw new AccountTypeException(AccountType.CLIENT);

    JSONObject body = new JSONObject();
    body.put("name", name);
    body.put("image", icon.getEncoding());
    if (roles.length > 0) // making sure none of the provided roles are null before mapping them to the snowflake id
        body.put("roles",
                Stream.of(roles).filter(Objects::nonNull).map(ISnowflake::getId).collect(Collectors.toSet()));

    Route.CompiledRoute route = Route.Emotes.CREATE_EMOTE.compile(guild.getId());
    return new AuditableRestAction<Emote>(getJDA(), route, body) {
        @Override
        protected void handleResponse(Response response, Request<Emote> request) {
            if (response.isOk()) {
                JSONObject obj = response.getObject();
                final long id = obj.getLong("id");
                String name = obj.getString("name");
                EmoteImpl emote = new EmoteImpl(id, guild).setName(name);
                // managed is false by default, should always be false for emotes created by client accounts.

                JSONArray rolesArr = obj.getJSONArray("roles");
                Set<Role> roleSet = emote.getRoleSet();
                for (int i = 0; i < rolesArr.length(); i++) {
                    roleSet.add(guild.getRoleById(rolesArr.getString(i)));
                }

                // put emote into cache
                ((GuildImpl) guild).getEmoteMap().put(id, emote);

                request.onSuccess(emote);
            } else
                request.onFailure(response);
        }
    };
}