Example usage for java.util Set forEach

List of usage examples for java.util Set forEach

Introduction

In this page you can find the example usage for java.util Set forEach.

Prototype

default void forEach(Consumer<? super T> action) 

Source Link

Document

Performs the given action for each element of the Iterable until all elements have been processed or the action throws an exception.

Usage

From source file:org.fcrepo.kernel.modeshape.services.BatchServiceImpl.java

/**
 * Every REAP_INTERVAL milliseconds, check for expired sessions. If the
 * tx is expired, roll it back and remove it from the registry.
 *///from   ww  w.  j a  v a  2  s. c  o m
@Override
@Scheduled(fixedRate = REAP_INTERVAL)
public void removeExpired() {
    final Set<String> reapable = sessions.entrySet().stream().filter(e -> e.getValue().getExpires().isPresent())
            .filter(e -> e.getValue().getExpires().get().isBefore(now())).map(Map.Entry::getKey)
            .collect(toSet());
    reapable.forEach(key -> {
        final FedoraSession s = sessions.get(key);
        if (s != null) {
            try {
                s.expire();
            } catch (final RepositoryRuntimeException e) {
                LOGGER.error("Got exception rolling back expired session {}: {}", s, e.getMessage());
            }
        }
        sessions.remove(key);
    });
}

From source file:org.apache.bookkeeper.common.conf.ConfigDef.java

private void save(PrintStream stream) {
    for (ConfigKeyGroup group : groups) {
        writeConfigKeyGroup(stream, group);
        stream.println();//from   ww w.  j  ava 2s .  c o  m
        Set<ConfigKey> groupKeys = settings.getOrDefault(group.name(), Collections.emptySet());
        groupKeys.forEach(key -> {
            writeConfigKey(stream, key);
            stream.println();
        });
    }
}

From source file:org.apache.jena.fuseki.build.FusekiConfig.java

/** Convenience operation to populate a {@link DataService} with the conventional default services. */
public static void populateStdServices(DataService dataService, boolean allowUpdate) {
    Set<Endpoint> endpoints = new HashSet<>();

    accEndpoint(endpoints, Operation.Query, "query");
    accEndpoint(endpoints, Operation.Query, "sparql");
    if (!allowUpdate) {
        accEndpoint(endpoints, Operation.GSP_R, "data");
    } else {/*from   w  ww .  j a v  a  2s.co  m*/
        accEndpoint(endpoints, Operation.GSP_RW, "data");
        accEndpoint(endpoints, Operation.GSP_R, "get");
        accEndpoint(endpoints, Operation.Update, "update");
        accEndpoint(endpoints, Operation.Upload, "upload");
    }
    // Dataset
    accEndpoint(endpoints, Operation.Query);
    accEndpoint(endpoints, Operation.GSP_R);
    if (allowUpdate) {
        accEndpoint(endpoints, Operation.Update);
        accEndpoint(endpoints, Operation.GSP_RW);
    }

    // Add to DataService.
    endpoints.forEach(dataService::addEndpoint);
}

From source file:org.apache.samza.system.hdfs.HdfsSystemConsumer.java

/**
 * {@inheritDoc}//from   w  ww.  j a  v  a 2 s .  c om
 */
@Override
public Map<SystemStreamPartition, List<IncomingMessageEnvelope>> poll(
        Set<SystemStreamPartition> systemStreamPartitions, long timeout) throws InterruptedException {
    systemStreamPartitions.forEach(systemStreamPartition -> {
        Future status = readerRunnableStatus.get(systemStreamPartition);
        if (status.isDone()) {
            try {
                status.get();
            } catch (ExecutionException | InterruptedException e) {
                MultiFileHdfsReader reader = readers.get(systemStreamPartition);
                LOG.warn(String.format("Detect failure in ReaderRunnable for ssp: %s. Try to reconnect now.",
                        systemStreamPartition), e);
                reader.reconnect();
                readerRunnableStatus.put(systemStreamPartition,
                        executorService.submit(new ReaderRunnable(reader)));
            }
        }
    });
    return super.poll(systemStreamPartitions, timeout);
}

From source file:co.com.carpco.altablero.hibernate.bll.ClassRoomBll.java

private void initializeCache() {
    CachingProvider cachingProvider = Caching.getCachingProvider();
    CacheManager cacheManager = cachingProvider.getCacheManager();

    MutableConfiguration<Integer, Set> config = new MutableConfiguration<Integer, Set>()
            .setTypes(Integer.class, Set.class).setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(ONE_DAY))
            .setWriteThrough(true).setReadThrough(true)
            .setCacheLoaderFactory(new Factory<CacheLoader<Integer, Set>>() {

                @Override/*  w ww .  ja  va2 s  .  c  om*/
                public CacheLoader<Integer, Set> create() {
                    return new CacheLoader<Integer, Set>() {

                        @Override
                        public Set load(Integer key) throws CacheLoaderException {
                            final Set<ClassRoomBO> classRoomBOSet = new HashSet<>();
                            Set<BzClassRoom> bzClassRoomSet = classRoomDao.getClassRoomSet(key);
                            if (bzClassRoomSet != null && bzClassRoomSet.size() > 0) {
                                bzClassRoomSet.forEach((BzClassRoom bzClassRoom) -> classRoomBOSet
                                        .add(new ClassRoomBO(bzClassRoom)));
                            }
                            return classRoomBOSet;
                        }

                        @Override
                        public Map<Integer, Set> loadAll(Iterable<? extends Integer> itrbl)
                                throws CacheLoaderException {
                            Map<Integer, Set> answer = new HashMap<>();
                            itrbl.forEach((Integer k) -> answer.put(k, load(k)));

                            return answer;
                        }
                    };
                }
            }).setCacheWriterFactory(new Factory<CacheWriter<Integer, Set>>() {
                @Override
                public CacheWriter<Integer, Set> create() {
                    CacheWriter<Integer, Set> writer = new CacheWriter<Integer, Set>() {

                        @Override
                        public void write(Cache.Entry<? extends Integer, ? extends Set> entry)
                                throws CacheWriterException {

                        }

                        @Override
                        public void writeAll(Collection<Cache.Entry<? extends Integer, ? extends Set>> clctn)
                                throws CacheWriterException {

                        }

                        @Override
                        public void delete(Object o) throws CacheWriterException {

                        }

                        @Override
                        public void deleteAll(Collection<?> clctn) throws CacheWriterException {

                        }

                    };
                    return writer;
                }
            }).setStatisticsEnabled(true);

    cache = cacheManager.createCache("classRoomXYearCache", config);
    config.isReadThrough();
}

From source file:org.apache.metron.stellar.dsl.functions.HashFunctionsTest.java

@Test
public void allAlgorithmsForMessageDigestShouldBeAbleToHash() throws Exception {
    final String valueToHash = "My value to hash";
    final Set<String> algorithms = Security.getAlgorithms("MessageDigest");

    algorithms.forEach(algorithm -> {
        try {// w  ww  .j  av  a2 s.  c om
            final MessageDigest expected = MessageDigest.getInstance(algorithm);
            expected.update(valueToHash.getBytes(StandardCharsets.UTF_8));

            assertEquals(expectedHexString(expected), hash.apply(Arrays.asList(valueToHash, algorithm)));
        } catch (NoSuchAlgorithmException e) {
            throw new RuntimeException(e);
        }
    });
}

From source file:org.apache.metron.stellar.dsl.functions.HashFunctionsTest.java

@Test
public void allAlgorithmsForMessageDigestShouldBeAbleToHashDirectStellarCall() throws Exception {
    final String valueToHash = "My value to hash";
    final Set<String> algorithms = Security.getAlgorithms("MessageDigest");

    algorithms.forEach(algorithm -> {
        try {//  w w  w . j av  a2 s .c o  m
            final Object actual = run("HASH('" + valueToHash + "', '" + algorithm + "')",
                    Collections.emptyMap());

            final MessageDigest expected = MessageDigest.getInstance(algorithm);
            expected.update(valueToHash.getBytes(StandardCharsets.UTF_8));

            assertEquals(expectedHexString(expected), actual);
        } catch (NoSuchAlgorithmException e) {
            throw new RuntimeException(e);
        }
    });
}

From source file:org.wso2.extension.siddhi.execution.var.models.parametric.ParametricVaRCalculator.java

/**
 * @param portfolio/*from w  ww. j a  v  a 2s .co  m*/
 * @return Get weightage matrix for a given portfolio
 */
private double[][] getWeightageMatrix(Portfolio portfolio) {
    Set<String> keys = portfolio.getAssetListKeySet();
    int numberOfAssets = keys.size();
    double[][] weightageMatrix = new double[1][numberOfAssets];

    final int[] i = { 0 };
    keys.forEach((symbol) -> {
        Asset asset = getAssetPool().get(symbol);
        weightageMatrix[0][i[0]] = asset.getCurrentStockPrice() * portfolio.getCurrentAssetQuantities(symbol)
                / portfolio.getTotalPortfolioValue();
        i[0]++;
    });

    return weightageMatrix;
}

From source file:org.artifactory.ui.rest.service.builds.buildsinfo.tabs.licenses.BuildLicensesService.java

@Override
public void execute(ArtifactoryRestRequest request, RestResponse response) {
    try {//from w w w  .j a v a2  s  .c  om
        String name = request.getPathParamByKey("name");
        String buildNumber = request.getPathParamByKey("number");
        String buildStarted = DateUtils.formatBuildDate(Long.parseLong(request.getPathParamByKey("date")));
        Boolean authFind = Boolean.valueOf(request.getQueryParamByKey("autoFind"));
        Build build = getBuild(name, buildNumber, buildStarted, response);
        // fetch license
        Multimap<RepoPath, ModuleLicenseModel> repoPathLicenseModuleModel = getRepoPathLicenseModuleModelMultimap(
                build, authFind);
        if (repoPathLicenseModuleModel != null && !repoPathLicenseModuleModel.isEmpty()) {
            Collection<ModuleLicenseModel> values = repoPathLicenseModuleModel.values();
            // fetch published modules
            Set<ModuleLicenseModel> publishedModules = getPublishedModulesFromModelList(values,
                    build.getModules());
            // filter published modules from licenses
            publishedModules.forEach(published -> values.remove(published));
            // fetch build license summary
            Set<String> scopes = getScopeMapping(values);
            BuildLicenseModel buildLicenseModel = new BuildLicenseModel(values, publishedModules, scopes);
            response.iModel(buildLicenseModel);
            // get scopes
        }
    } catch (ParseException e) {
        log.error(e.toString());
        response.error("error with retrieving build licenses");
        return;
    }
}

From source file:io.kodokojo.endpoint.UserSparkEndpoint.java

private UserDto getUserDto(User user) {
    UserDto res = new UserDto(user);
    Set<String> projectConfigIds = projectStore.getProjectConfigIdsByUserIdentifier(user.getIdentifier());
    List<UserProjectConfigIdDto> userProjectConfigIdDtos = new ArrayList<>();
    projectConfigIds.forEach(id -> {
        String projectId = projectStore.getProjectIdByProjectConfigurationId(id);
        UserProjectConfigIdDto userProjectConfigIdDto = new UserProjectConfigIdDto(id);
        userProjectConfigIdDto.setProjectId(projectId);
        userProjectConfigIdDtos.add(userProjectConfigIdDto);
    });//from   w  w  w  .ja v a2s. co m
    res.setProjectConfigurationIds(userProjectConfigIdDtos);
    return res;
}