Example usage for java.util Map forEach

List of usage examples for java.util Map forEach

Introduction

In this page you can find the example usage for java.util Map forEach.

Prototype

default void forEach(BiConsumer<? super K, ? super V> action) 

Source Link

Document

Performs the given action for each entry in this map until all entries have been processed or the action throws an exception.

Usage

From source file:org.dataconservancy.packaging.tool.integration.PackageGenerationTest.java

@Test
public void transformTest() throws Exception {

    PackageState state = initializer.initialize(DCS_PROFILE);
    OpenedPackage opened = packager.createPackage(state, folder.getRoot());

    DomainProfileService profileService = profileServiceFactory
            .getProfileService(opened.getPackageState().getDomainObjectRDF());

    Map<Node, NodeTransform> toTransform = new HashMap<>();

    opened.getPackageTree().walk(node -> {

        /* Transform all Metadata to DI + File */
        if (node.getNodeType().getDomainTypes().contains(URI.create(NS_DCS_ONTOLOGY_BOM + "Metadata"))) {
            profileService.getNodeTransforms(node).stream()
                    .filter(xform -> xform.getResultNodeType().getDomainTypes()
                            .contains(URI.create(NS_DCS_ONTOLOGY_BOM + "File")))
                    .forEach(xform -> toTransform.put(node, xform));
        }//from  www. ja v  a2  s. c  om
    });

    toTransform.forEach(profileService::transformNode);

    opened.getPackageState().setPackageTree(ipm2rdf.transformToRDF(opened.getPackageTree()));

    OpenedPackage openedAfterTransform = packager.createPackage(opened.getPackageState(), folder.getRoot());

    assertEquals(domainObjectSizes(opened.getPackageState()),
            domainObjectSizes(openedAfterTransform.getPackageState()));

}

From source file:org.silverpeas.core.calendar.icalendar.ICal4JExchangeImportTest.java

/**
 * Centralization of verification.<br>
 * <p>/*from ww  w . ja  va2s  . c  o  m*/
 * The mechanism is the following:<br>
 * <p/>
 * the first parameter represent the name of the file that contains events to import and the
 * second one is the list of expected calendar events.<br>
 * Each lines starting with '#' character is ignored.
 * </p>
 * @param fileNameOfImport the name of the file that contains events to import.
 */
@SuppressWarnings({ "unchecked", "Duplicates" })
private void importAndVerifyResult(String fileNameOfImport, List<CalendarEvent> expectedEvents,
        BiConsumer<Pair<CalendarEvent, List<CalendarEventOccurrence>>, Pair<CalendarEvent, List<CalendarEventOccurrence>>> assertConsumer)
        throws ImportException {

    Map<String, Pair<CalendarEvent, List<CalendarEventOccurrence>>> result = new HashedMap<>();

    iCalendarImporter.imports(
            ImportDescriptor.withInputStream(new ByteArrayInputStream(
                    getFileContent(fileNameOfImport).getBytes(StandardCharsets.UTF_8))),
            events -> result.putAll(
                    events.collect(Collectors.toMap(p -> p.getLeft().getExternalId(), Function.identity()))));

    Map<String, Pair<CalendarEvent, List<CalendarEventOccurrence>>> expected = expectedEvents.stream().collect(
            Collectors.toMap(CalendarEvent::getExternalId, e -> Pair.of(e, e.getPersistedOccurrences())));

    assertThat("The expected list contains several event with same external id", expected.size(),
            is(expectedEvents.size()));

    assertThat(result.keySet(), containsInAnyOrder(expected.keySet().toArray()));

    result.forEach((i, actualResult) -> {
        Pair<CalendarEvent, List<CalendarEventOccurrence>> expectedResult = expected.get(i);
        assertConsumer.accept(actualResult, expectedResult);
    });
}

From source file:org.silverpeas.core.calendar.ical4j.ICal4JImporter.java

@Override
public void imports(final ImportDescriptor descriptor,
        final Consumer<Stream<Pair<CalendarEvent, List<CalendarEventOccurrence>>>> consumer)
        throws ImportException {
    try {// w  w  w . ja va  2 s .  c o m
        PropertyFactoryRegistry propertyFactoryRegistry = new PropertyFactoryRegistry();
        propertyFactoryRegistry.register(HtmlProperty.PROPERTY_NAME, HtmlProperty.FACTORY);
        CalendarBuilder builder = new CalendarBuilder(CalendarParserFactory.getInstance().createParser(),
                propertyFactoryRegistry, new ParameterFactoryRegistry(),
                TimeZoneRegistryFactory.getInstance().createRegistry());
        Calendar calendar = builder.build(getCalendarInputStream(descriptor));
        if (calendar.getComponents().isEmpty()) {
            consumer.accept(Stream.empty());
            return;
        }
        calendar.validate();

        Mutable<ZoneId> zoneId = Mutable.of(ZoneOffset.systemDefault());

        Map<String, List<VEvent>> readEvents = new LinkedHashMap<>();
        calendar.getComponents().forEach(component -> {
            if (component instanceof VEvent) {
                VEvent vEvent = (VEvent) component;
                String vEventId = vEvent.getUid().getValue();
                List<VEvent> vEvents = readEvents.computeIfAbsent(vEventId, k -> new ArrayList<>());
                if (vEvent.getRecurrenceId() != null) {
                    vEvents.add(vEvent);
                } else {
                    vEvents.add(0, vEvent);
                }
            } else if (component instanceof VTimeZone) {
                VTimeZone vTimeZone = (VTimeZone) component;
                zoneId.set(toZoneId(vTimeZone.getTimeZoneId().getValue()));
            } else {
                SilverLogger.getLogger(this).debug("iCalendar component ''{0}'' is not handled",
                        component.getName());
            }
        });
        List<Pair<CalendarEvent, List<CalendarEventOccurrence>>> events = new ArrayList<>(readEvents.size());
        readEvents.forEach((vEventId, vEvents) -> {

            // For now the following stuffs are not handled:
            // - the attendees
            // - triggers
            VEvent vEvent = vEvents.remove(0);
            CalendarEvent event = eventFromICalEvent(zoneId, vEventId, vEvent);

            // Occurrences
            List<CalendarEventOccurrence> occurrences = new ArrayList<>(vEvents.size());
            vEvents.forEach(v -> {
                CalendarEventOccurrence occurrence = occurrenceFromICalEvent(zoneId, event, v);
                occurrences.add(occurrence);
            });

            if (!event.isRecurrent() && !occurrences.isEmpty()) {
                SilverLogger.getLogger(this)
                        .warn("event with uuid {0} has no recurrence set whereas {1,choice, 1#one linked "
                                + "occurrence exists| 1<{1} linked occurrences exist}... Setting a default "
                                + "recurrence (RRULE:FREQ=DAILY;COUNT=1) to get correct data for Silverpeas",
                                event.getExternalId(), occurrences.size());
                event.recur(Recurrence.every(1, TimeUnit.DAY).until(1));
            }

            // New event to perform
            events.add(Pair.of(event, occurrences));
        });

        // The events will be performed by the caller
        consumer.accept(events.stream());

    } catch (IOException | ParserException e) {
        throw new ImportException(e);
    }
}

From source file:dhbw.clippinggorilla.objects.user.UserUtils.java

/**
 * Creates a new User Profile and saves it to the DB
 *
 * @param u The User of the new Interestprofile
 * @param name The name of the Interestprofile
 * @param sources The selected sources/*from   ww  w.  j a va 2 s.  co m*/
 * @param tags The selected tags
 * @param categories The selected categories
 * @return Profile, if sth. went wrong: null
 */
public static InterestProfile createNewProfile(User u, String name, Map<Source, Boolean> sources,
        Set<String> tags, Map<Category, Boolean> categories) {
    //TODO: profile validation here!
    //Valdiation whether profile exists
    if (!InterestProfileUtils.checkNameUnique(u, name)) {
        return null;
    }
    try {
        //Insert profile data to user_profile table 
        String sql = "INSERT INTO " + Tables.USER_PROFILE + " (" + Columns.USER_ID + ", " + Columns.NAME
                + ") VALUES (?, ?)";
        PreparedStatement statement = Database.getConnection().prepareStatement(sql);
        statement.setInt(1, u.getId());
        statement.setString(2, name);
        statement.executeUpdate();

        //Get profile ID from table
        sql = "SELECT " + Columns.ID + " FROM " + Tables.USER_PROFILE + " WHERE " + Columns.USER_ID
                + " = ? AND " + Columns.NAME + " = ?";
        statement = Database.getConnection().prepareStatement(sql);
        statement.setInt(1, u.getId());
        statement.setString(2, name);
        ResultSet result = statement.executeQuery();
        result.next();
        int profileId = result.getInt(Columns.ID);

        //Insert sources
        sources.forEach((lambda_source, bool) -> {
            if (bool == true) {
                String sourceId = lambda_source.getId();
                try {
                    String lambda_sql = "INSERT INTO " + Tables.USER_PROFILE_SOURCE + " (" + Columns.PROFILE_ID
                            + ", " + Columns.SOURCE + ") VALUES (?, ?)";
                    PreparedStatement lambda_statement = Database.getConnection().prepareStatement(lambda_sql);
                    lambda_statement.setInt(1, profileId);
                    lambda_statement.setString(2, sourceId);
                    lambda_statement.executeUpdate();
                } catch (SQLException ex) {
                    Log.warning("Profile source insertion failed", ex);
                }
            }
        });

        //Insert Tags
        tags.forEach((lambda_tags) -> {
            try {
                String lambda_sql = "INSERT INTO " + Tables.USER_PROFILE_TAG + " (" + Columns.PROFILE_ID + ", "
                        + Columns.TAG + ") VALUES (?, ?)";
                PreparedStatement lambda_statement = Database.getConnection().prepareStatement(lambda_sql);
                lambda_statement.setInt(1, profileId);
                lambda_statement.setString(2, lambda_tags);
                lambda_statement.executeUpdate();
            } catch (SQLException ex) {
                Log.warning("Profile source insertion failed", ex);
            }
        });

        //Insert categories
        categories.forEach((category, bool) -> {
            if (bool == true) {
                String categoryId = category.getId();
                try {
                    String lambda_sql = "INSERT INTO " + Tables.USER_PROFILE_CATEGORY + " ("
                            + Columns.PROFILE_ID + ", " + Columns.CATEGORY + ") VALUES (?, ?)";
                    PreparedStatement lambda_statement = Database.getConnection().prepareStatement(lambda_sql);
                    lambda_statement.setInt(1, profileId);
                    lambda_statement.setString(2, categoryId);
                    lambda_statement.executeUpdate();
                } catch (SQLException ex) {
                    Log.warning("Profile source insertion failed", ex);
                }
            }
        });
        return InterestProfileUtils.getInterestProfile(profileId);
    } catch (SQLException ex) {
        Log.warning("Insertion of new profile failed", ex);
        return null;
    }
}

From source file:net.dv8tion.jda.managers.RoleManager.java

/**
 * Moves this Role up or down in the list of Roles (changing position attribute)
 * This change takes effect immediately!
 *
 * @param newPosition/*from   w  w  w .  jav a2 s .co m*/
 *      the amount of positions to move up (offset &lt; 0) or down (offset &gt; 0)
 * @return
 *      this
 */
public RoleManager move(int newPosition) {
    checkPermission(Permission.MANAGE_ROLES);
    checkPosition();
    int maxRolePosition = role.getGuild().getRolesForUser(role.getJDA().getSelfInfo()).get(0).getPosition();
    if (newPosition >= maxRolePosition)
        throw new PermissionException(
                "Cannot move to a position equal to or higher than the highest role that you have access to.");

    if (newPosition < 0 || newPosition == role.getPosition())
        return this;

    Map<Integer, Role> newPositions = new HashMap<>();
    Map<Integer, Role> currentPositions = role.getGuild().getRoles().stream()
            .collect(Collectors.toMap(role -> role.getPosition(), role -> role));

    //Remove the @everyone role from our working set.
    currentPositions.remove(-1);
    int searchIndex = newPosition > role.getPosition() ? newPosition : newPosition;
    int index = 0;
    for (Role r : currentPositions.values()) {
        if (r == role)
            continue;
        if (index == searchIndex) {
            newPositions.put(index, role);
            index++;
        }
        newPositions.put(index, r);
        index++;
    }
    //If the role was moved to the very top, this will make sure it is properly handled.
    if (!newPositions.containsValue(role))
        newPositions.put(newPosition, role);

    for (int i = 0; i < newPositions.size(); i++) {
        if (currentPositions.get(i) == newPositions.get(i))
            newPositions.remove(i);
    }

    JSONArray rolePositions = new JSONArray();
    newPositions.forEach((pos, r) -> {
        rolePositions.put(new JSONObject().put("id", r.getId()).put("position", pos + 1));
    });
    ((JDAImpl) role.getJDA()).getRequester().patch(
            Requester.DISCORD_API_PREFIX + "guilds/" + role.getGuild().getId() + "/roles", rolePositions);
    return this;
}

From source file:com.netflix.genie.agent.execution.services.impl.LaunchJobServiceImpl.java

/**
 * {@inheritDoc}/*from   w  ww . ja v  a2 s .  co m*/
 */
@Override
public void launchProcess(final File jobDirectory, final Map<String, String> environmentVariablesMap,
        final List<String> commandLine, final boolean interactive) throws JobLaunchException {

    if (!launched.compareAndSet(false, true)) {
        throw new IllegalStateException("Job already launched");
    }

    final ProcessBuilder processBuilder = new ProcessBuilder();

    // Validate job running directory
    if (jobDirectory == null) {
        throw new JobLaunchException("Job directory is null");
    } else if (!jobDirectory.exists()) {
        throw new JobLaunchException("Job directory does not exist: " + jobDirectory);
    } else if (!jobDirectory.isDirectory()) {
        throw new JobLaunchException("Job directory is not a directory: " + jobDirectory);
    } else if (!jobDirectory.canWrite()) {
        throw new JobLaunchException("Job directory is not writable: " + jobDirectory);
    }

    final Map<String, String> currentEnvironmentVariables = processBuilder.environment();

    if (environmentVariablesMap == null) {
        throw new JobLaunchException("Job environment variables map is null");
    }

    // Merge job environment variables into process inherited environment
    environmentVariablesMap.forEach((key, value) -> {
        final String replacedValue = currentEnvironmentVariables.put(key, value);
        if (StringUtils.isBlank(replacedValue)) {
            log.debug("Added job environment variable: {}={}", key, value);
        } else if (!replacedValue.equals(value)) {
            log.debug("Set job environment variable: {}={} (previous value: {})", key, value, replacedValue);
        }
    });

    // Validate arguments
    if (commandLine == null) {
        throw new JobLaunchException("Job command-line arguments is null");
    } else if (commandLine.isEmpty()) {
        throw new JobLaunchException("Job command-line arguments are empty");
    }

    // Configure arguments
    log.info("Job command-line: {}", Arrays.toString(commandLine.toArray()));

    final List<String> expandedCommandLine;
    try {
        expandedCommandLine = expandCommandLineVariables(commandLine,
                Collections.unmodifiableMap(currentEnvironmentVariables));
    } catch (final EnvUtils.VariableSubstitutionException e) {
        throw new JobLaunchException("Job command-line arguments variables could not be expanded");
    }

    if (!commandLine.equals(expandedCommandLine)) {
        log.info("Job command-line with variables expanded: {}",
                Arrays.toString(expandedCommandLine.toArray()));
    }

    processBuilder.command(expandedCommandLine);

    if (interactive) {
        processBuilder.inheritIO();
    } else {
        processBuilder.redirectError(PathUtils.jobStdErrPath(jobDirectory).toFile());
        processBuilder.redirectOutput(PathUtils.jobStdOutPath(jobDirectory).toFile());
    }

    if (killed.get()) {
        log.info("Job aborted, skipping launch");
    } else {
        log.info("Launching job");
        try {
            processReference.set(processBuilder.start());
        } catch (final IOException | SecurityException e) {
            throw new JobLaunchException("Failed to launch job: ", e);
        }
        log.info("Process launched (pid: {})", getPid(processReference.get()));
    }
}

From source file:org.nanoframework.orm.jedis.cluster.RedisClusterClientImpl.java

@Override
public boolean hmset(final String key, final Map<String, Object> map) {
    Assert.hasText(key);/*from w  w w. j a  va  2s. co m*/
    Assert.notEmpty(map);
    try {
        final Map<String, String> newMap = Maps.newHashMap();
        map.forEach((field, value) -> newMap.put(field, toJSONString(value)));
        return isOK(cluster.hmset(key, newMap));
    } catch (final Throwable e) {
        throw new RedisClientException(e.getMessage(), e);
    }
}

From source file:org.apache.samza.storage.ContainerStorageManager.java

/**
 * For each standby task, we remove its changeLogSSPs from changelogSSP map and add it to the task's taskSideInputSSPs.
 * The task's sideInputManager will consume and restore these as well.
 *
 * @param containerModel the container's model
 * @param changelogSystemStreams the passed in set of changelogSystemStreams
 * @return A map of changeLogSSP to storeName across all tasks, assuming no two stores have the same changelogSSP
 *///from  w w w .j av  a2s .com
private Map<String, SystemStream> getChangelogSystemStreams(ContainerModel containerModel,
        Map<String, SystemStream> changelogSystemStreams) {

    if (MapUtils.invertMap(changelogSystemStreams).size() != changelogSystemStreams.size()) {
        throw new SamzaException("Two stores cannot have the same changelog system-stream");
    }

    Map<SystemStreamPartition, String> changelogSSPToStore = new HashMap<>();
    changelogSystemStreams
            .forEach((storeName, systemStream) -> containerModel.getTasks().forEach((taskName, taskModel) -> {
                changelogSSPToStore.put(
                        new SystemStreamPartition(systemStream, taskModel.getChangelogPartition()), storeName);
            }));

    getTasks(containerModel, TaskMode.Standby).forEach((taskName, taskModel) -> {
        changelogSystemStreams.forEach((storeName, systemStream) -> {
            SystemStreamPartition ssp = new SystemStreamPartition(systemStream,
                    taskModel.getChangelogPartition());
            changelogSSPToStore.remove(ssp);
            this.taskSideInputSSPs.putIfAbsent(taskName, new HashMap<>());
            this.sideInputSystemStreams.put(storeName, Collections.singleton(ssp.getSystemStream()));
            this.taskSideInputSSPs.get(taskName).put(storeName, Collections.singleton(ssp));
        });
    });

    // changelogSystemStreams correspond only to active tasks (since those of standby-tasks moved to side inputs above)
    return MapUtils.invertMap(changelogSSPToStore).entrySet().stream()
            .collect(Collectors.toMap(Map.Entry::getKey, x -> x.getValue().getSystemStream()));
}

From source file:org.hawkular.apm.server.elasticsearch.AnalyticsServiceElasticsearch.java

@Override
public List<TransactionInfo> getTransactionInfo(String tenantId, Criteria criteria) {
    String index = client.getIndex(tenantId);
    if (!refresh(index)) {
        return null;
    }//  ww w.j  a  v  a2 s  .c o m

    if (criteria.getTransaction() != null) {
        // Copy criteria and clear the transaction field to ensure all
        // transactions are returned related to the other filter criteria
        criteria = new Criteria(criteria);
        criteria.setTransaction(null);
    }

    TermsBuilder cardinalityBuilder = AggregationBuilders.terms("cardinality")
            .field(ElasticsearchUtil.TRANSACTION_FIELD).order(Order.aggregation("_count", false))
            .size(criteria.getMaxResponseSize());

    BoolQueryBuilder query = buildQuery(criteria, ElasticsearchUtil.TRANSACTION_FIELD, Trace.class);
    SearchRequestBuilder request = getBaseSearchRequestBuilder(TRACE_TYPE, index, criteria, query, 0)
            .addAggregation(cardinalityBuilder);

    SearchResponse response = getSearchResponse(request);

    Terms terms = response.getAggregations().get("cardinality");
    List<TransactionInfo> txnInfo = terms.getBuckets().stream()
            .map(AnalyticsServiceElasticsearch::toTransactionInfo).collect(Collectors.toList());

    // If config service available, check if there is a transaction config for the list of
    // transactions being returned
    if (configService != null) {
        Map<String, TransactionConfig> btcs = configService.getTransactions(tenantId, 0);

        txnInfo.forEach(ti -> {
            TransactionConfig btc = btcs.get(ti.getName());
            if (btc != null) {
                ti.setLevel(btc.getLevel());
                ti.setStaticConfig(true);
                btcs.remove(ti.getName());
            }
        });

        // Add entry for remaining transaction configs
        btcs.forEach((k, v) -> txnInfo
                .add(new TransactionInfo().setName(k).setLevel(v.getLevel()).setStaticConfig(true)));
    }

    // Sort the list by transaction name
    Collections.sort(txnInfo, new Comparator<TransactionInfo>() {
        @Override
        public int compare(TransactionInfo ti1, TransactionInfo ti2) {
            return ti1.getName().compareTo(ti2.getName());
        }

    });

    return txnInfo;
}

From source file:org.nanoframework.orm.jedis.cluster.RedisClusterClientImpl.java

@Override
public long zadd(final String key, final Map<Object, Double> values) {
    Assert.hasText(key);//w w w.j  ava2  s  .  c  o  m
    Assert.notEmpty(values);
    try {
        final Map<String, Double> scoreMembers = Maps.newHashMap();
        values.forEach((member, score) -> scoreMembers.put(toJSONString(member), score));
        return cluster.zadd(key, scoreMembers);
    } catch (final Throwable e) {
        throw new RedisClientException(e.getMessage(), e);
    }
}