Example usage for java.util Set forEach

List of usage examples for java.util Set forEach

Introduction

In this page you can find the example usage for java.util Set forEach.

Prototype

default void forEach(Consumer<? super T> action) 

Source Link

Document

Performs the given action for each element of the Iterable until all elements have been processed or the action throws an exception.

Usage

From source file:se.uu.it.cs.recsys.service.preference.ConstraintSolverPreferenceBuilder.java

private static boolean inPlanYear(Course course, Set<CourseSchedule> scheduleInfo) {
    Set<Boolean> bingo = new HashSet<>();

    scheduleInfo.forEach(schedule -> {
        if (Short.compare(course.getTaughtYear(), schedule.getTaughtYear()) == 0
                && Short.compare(course.getStartPeriod(), schedule.getStartPeriod()) == 0) {
            bingo.add(Boolean.TRUE);
        }//from   w w w. ja v a 2 s  .  c  om
    });

    return !bingo.isEmpty();

}

From source file:org.jboss.tools.windup.ui.internal.issues.IssueDetailsView.java

public static void addPrism(Document doc) {
    try {/*from  w  w w  .  j  av a 2 s .c o  m*/
        Bundle bundle = WindupUIPlugin.getDefault().getBundle();
        Elements codeElements = doc.getElementsByTag("code");
        codeElements.forEach(element -> {
            Set<String> classNames = element.classNames();
            Set<String> newNames = Sets.newHashSet();
            classNames.forEach(className -> {
                // prismjs requires prefix, i'm not sure about another/easier workaround.
                newNames.add("language-" + className);
            });
            element.classNames(newNames);
        });

        DocumentType type = new DocumentType("html", "", "", "");
        doc.insertChildren(0, Lists.newArrayList(type));

        Element head = doc.head();
        Element css = doc.createElement("link");

        URL fileURL = FileLocator.find(bundle, new Path("html/prism.css"), null);
        String srcPath = FileLocator.resolve(fileURL).getPath();

        css.attr("href", srcPath);
        css.attr("rel", "stylesheet");
        head.appendChild(css);

        Element body = doc.body();
        Element script = doc.createElement("script");

        fileURL = FileLocator.find(bundle, new Path("html/prism.js"), null);
        srcPath = FileLocator.resolve(fileURL).getPath();

        script.attr("src", srcPath);
        body.appendChild(script);
    } catch (Exception e) {
        WindupUIPlugin.log(e);
    }
}

From source file:org.roda.wui.api.controllers.UserManagementHelper.java

private static String getUserExtra(UserExtraBundle extra) throws GenericException {
    Handlebars handlebars = new Handlebars();
    Map<String, String> data = new HashMap<>();
    handlebars.registerHelper("field", (o, options) -> {
        return options.fn();
    });//from  w  w w  .  j a v a  2 s  .  com

    try (InputStream templateStream = RodaCoreFactory.getConfigurationFileAsStream(
            RodaConstants.USERS_TEMPLATE_FOLDER + "/" + RodaConstants.USER_EXTRA_METADATA_FILE)) {
        String rawTemplate = IOUtils.toString(templateStream, RodaConstants.DEFAULT_ENCODING);
        Template tmpl = handlebars.compileInline(rawTemplate);

        if (extra != null) {
            Set<MetadataValue> values = extra.getValues();
            if (values != null) {
                values.forEach(metadataValue -> {
                    String val = metadataValue.get("value");
                    if (val != null) {
                        val = val.replaceAll("\\s", "");
                        if (!"".equals(val)) {
                            data.put(metadataValue.get("name"), metadataValue.get("value"));
                        }
                    }
                });
            }
        }

        // result = RodaUtils.indentXML(result);
        return tmpl.apply(data);
    } catch (IOException e) {
        LOGGER.error("Error getting template from stream");
    }

    return "";
}

From source file:org.apdplat.superword.extract.PartOfSpeechExtractor.java

public static void compensate(Set<Word> words) {
    Set<Word> minus = WordSources.minus(WordSources.getSyllabusVocabulary(), words);
    LOGGER.debug("?" + minus.size());
    minus.forEach(w -> {
        LOGGER.debug(w.getWord());/*  w w  w  . java 2 s  .  c  o m*/
        Word word = parseWord(w.getWord());
        if (word != null && !word.getPartOfSpeeches().isEmpty()) {
            words.add(word);
        }
    });
}

From source file:index.IndexManager.java

public static Triple<SolrInputDocument, Collection<String>, Collection<String>> index(Document document) {
    final SolrInputDocument index = new SolrInputDocument();
    index.setField("id", document.location());
    index.setField("time", String.valueOf(System.currentTimeMillis()));
    index.setField("title", document.title());

    final Set<String> links = document.select("a[href]").stream().map(e -> e.attr("abs:href"))
            .collect(Collectors.toSet());
    final Set<String> media = document.select("[src]").stream().map(e -> e.attr("abs:src"))
            .collect(Collectors.toSet());

    links.forEach(link -> index.addField("link", link));
    media.forEach(link -> index.addField("media", link));

    formatText(document.getElementsByTag("h1").stream()).forEach(e -> index.addField("h1", e));

    formatText(document.getElementsByTag("h2").stream()).forEach(e -> index.addField("h2", e));

    formatText(document.getElementsByTag("h3").stream()).forEach(e -> index.addField("h3", e));

    formatText(document.getElementsByTag("strong").stream()).forEach(e -> index.addField("strong", e));

    formatText(document.getElementsByTag("em").stream()).forEach(e -> index.addField("em", e));

    formatText(document.getElementsByTag("b").stream()).forEach(e -> index.addField("b", e));

    formatText(document.getElementsByTag("u").stream()).forEach(e -> index.addField("u", e));

    formatText(document.getElementsByTag("i").stream()).forEach(e -> index.addField("i", e));

    int i = 0;/*ww w .j a  v a2 s.  co  m*/
    Collection<String> text = chunkToLength(document.text());
    for (String chunk : text)
        index.addField(++i + "_text", chunk);

    return Triple.of(index, links, media);
}

From source file:org.apdplat.superword.tools.WordClassifier.java

public static void classify(Set<Word> words) {
    LOGGER.debug("??" + words.size());
    AtomicInteger i = new AtomicInteger();
    Map<String, List<String>> data = new HashMap<>();
    words.forEach(word -> {
        if (i.get() % 1000 == 999) {
            save(data);//from www  .j  a v a2s .  c o  m
        }
        showStatus(data, i.incrementAndGet(), words.size(), word.getWord());
        String html = getContent(word.getWord());
        //LOGGER.debug("?HTML" +html);
        while (html.contains("??ip?")) {
            //IP?
            DynamicIp.toNewIp();
            html = getContent(word.getWord());
        }
        if (StringUtils.isNotBlank(html)) {
            parse(word.getWord(), html, data);
            if (!NOT_FOUND_WORDS.contains(word.getWord())) {
                ORIGIN_HTML.add(word.getWord() + "??" + html);
            }
        } else {
            NOT_FOUND_WORDS.add(word.getWord());
        }

    });
    //?
    save(data);
    LOGGER.debug("??" + words.size());
}

From source file:pcgen.system.Main.java

private static void logSystemProps() {
    Properties props = System.getProperties();
    StringWriter writer = new StringWriter();
    PrintWriter pwriter = new PrintWriter(writer);
    pwriter.println();//  w w w .j a v  a 2  s . c  o  m
    pwriter.println("-- listing properties --"); //$NON-NLS-1$
    // Manually output the property values to avoid them being cut off at 40 characters
    Set<String> keys = props.stringPropertyNames();
    //$NON-NLS-1$
    keys.forEach(key -> {
        pwriter.println(key + '=' + props.getProperty(key));
    });
    Logging.log(Level.CONFIG, writer.toString());
}

From source file:org.jamocha.dn.compiler.ecblocks.MaximalColumns.java

private static <O extends ECOccurrenceNode, B extends BindingNode, C extends Column<O, B>> void disperse(
        final Set<Edge<O, B>> edges, final Function<Set<Edge<O, B>>, C> ctor,
        final HashMap<Edge<O, B>, C> target) {
    final C column = ctor.apply(edges);
    edges.forEach(edge -> target.put(edge, column));
}

From source file:org.alfresco.module.org_alfresco_module_rm.api.PublicAPITestUtil.java

/**
 * Check the consistency of the public API exposed from the given package. For each class in the package that is
 * annotated {@link AlfrescoPublicApi}, check that no exposed methods (or fields, constructors, etc.) use
 * non-public-API classes from Alfresco.
 *
 * @param basePackageName The package to check classes within.
 * @param knownBadReferences Any references that would cause this test to fail, but which we don't want to change.
 *            The keys should be public API classes within our code and the values should be the non-public-API
 *            class that is being referenced.
 *///from  www .j  a  v a 2  s. c  o m
public static void testPublicAPIConsistency(String basePackageName,
        SetMultimap<Class<?>, Class<?>> knownBadReferences) {
    Reflections reflections = new Reflections(basePackageName);
    Set<Class<?>> publicAPIClasses = reflections.getTypesAnnotatedWith(AlfrescoPublicApi.class, true);

    SetMultimap<Class<?>, Class<?>> referencedFrom = HashMultimap.create();
    Set<Class<?>> referencedClasses = new HashSet<>();
    for (Class<?> publicAPIClass : publicAPIClasses) {
        Set<Class<?>> referencedClassesFromClass = getReferencedClassesFromClass(publicAPIClass,
                new HashSet<>());
        referencedClassesFromClass.forEach(clazz -> referencedFrom.put(clazz, publicAPIClass));

        // Remove any references in knownBadReferences and error if an expected reference wasn't found.
        if (knownBadReferences.containsKey(publicAPIClass)) {
            for (Class<?> clazz : knownBadReferences.get(publicAPIClass)) {
                assertTrue(
                        "Supplied knownBadReferences expects " + clazz + " to be referenced by "
                                + publicAPIClass + ", but no such error was found",
                        referencedClassesFromClass.remove(clazz));
            }
        }

        referencedClasses.addAll(referencedClassesFromClass);
    }

    List<String> errorMessages = new ArrayList<>();
    for (Class<?> referencedClass : referencedClasses) {
        if (isInAlfresco(referencedClass) && !isPartOfPublicApi(referencedClass)) {
            Set<String> referencerNames = referencedFrom.get(referencedClass).stream().map(c -> c.getName())
                    .collect(Collectors.toSet());
            errorMessages.add(referencedClass.getName() + " <- " + StringUtils.join(referencerNames, ", "));
        }
    }

    if (!errorMessages.isEmpty()) {
        System.out.println("Errors found:");
        System.out.println(StringUtils.join(errorMessages, "\n"));
    }

    assertEquals("Found references to non-public API classes from public API classes.", Collections.emptyList(),
            errorMessages);
}

From source file:org.apache.hadoop.hive.kafka.TransactionalKafkaWriter.java

/**
 * Given a query workingDirectory as table_directory/hive_query_id/ will fetch the open transaction states.
 * Table directory is {@link org.apache.hadoop.hive.metastore.api.Table#getSd()#getLocation()}.
 * Hive Query ID is inferred from the JobConf see {@link KafkaStorageHandler#getQueryId()}.
 *
 * The path to a transaction state is as follow.
 * .../{@code queryWorkingDir}/{@code TRANSACTION_DIR}/{@code writerId}/{@code producerEpoch}
 *
 * The actual state is stored in the file {@code producerEpoch}.
 * The file contains a {@link Long} as internal producer Id and a {@link Short} as the producer epoch.
 * According to Kafka API, highest epoch corresponds to the active Producer, therefore if there is multiple
 * {@code producerEpoch} files will pick the maximum based on {@link Short::compareTo}.
 *
 * @param fs File system handler./*from  w  w  w. j a v  a2 s  .c  o  m*/
 * @param queryWorkingDir Query working Directory, see:
 *                        {@link KafkaStorageHandler#getQueryWorkingDir(org.apache.hadoop.hive.metastore.api.Table)}.
 * @return Map of Transaction Ids to Pair of Kafka Producer internal ID (Long) and producer epoch (short)
 * @throws IOException if any of the IO operations fail.
 */
static Map<String, Pair<Long, Short>> getTransactionsState(FileSystem fs, Path queryWorkingDir)
        throws IOException {
    //list all current Dir
    final Path transactionWorkingDir = new Path(queryWorkingDir, TRANSACTION_DIR);
    final FileStatus[] files = fs.listStatus(transactionWorkingDir);
    final Set<FileStatus> transactionSet = Arrays.stream(files).filter(FileStatus::isDirectory)
            .collect(Collectors.toSet());
    Set<Path> setOfTxPath = transactionSet.stream().map(FileStatus::getPath).collect(Collectors.toSet());
    ImmutableMap.Builder<String, Pair<Long, Short>> builder = ImmutableMap.builder();
    setOfTxPath.forEach(path -> {
        final String txId = path.getName();
        try {
            FileStatus[] epochFiles = fs.listStatus(path);
            // List all the Epoch if any and select the max.
            // According to Kafka API recent venison of Producer with the same TxID will have greater epoch and same PID.
            Optional<Short> maxEpoch = Arrays.stream(epochFiles).filter(FileStatus::isFile)
                    .map(fileStatus -> Short.valueOf(fileStatus.getPath().getName())).max(Short::compareTo);
            short epoch = maxEpoch.orElseThrow(() -> new RuntimeException(
                    "Missing sub directory epoch from directory [" + path.toString() + "]"));
            Path openTxFileName = new Path(path, String.valueOf(epoch));
            long internalId;
            try (FSDataInputStream inStream = fs.open(openTxFileName)) {
                internalId = inStream.readLong();
                short fileEpoch = inStream.readShort();
                if (epoch != fileEpoch) {
                    throw new RuntimeException(String.format("Was expecting [%s] but got [%s] from path [%s]",
                            epoch, fileEpoch, path.toString()));
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
            builder.put(txId, Pair.of(internalId, epoch));
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    });
    return builder.build();
}