Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:com.uber.hoodie.TestCleaner.java

/**
 * Test Helper for Cleaning by versions logic from HoodieWriteClient API perspective
 *
 * @param insertFn Insert API to be tested
 * @param upsertFn Upsert API to be tested
 * @param isPreppedAPI Flag to indicate if a prepped-version is used. If true, a wrapper function will be used during
 * record generation to also tag the regards (de-dupe is implicit as we use uniq record-gen APIs)
 * @throws Exception in case of errors/* w  w  w . ja v a  2s  . c o m*/
 */
private void testInsertAndCleanByCommits(
        Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> insertFn,
        Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> upsertFn,
        boolean isPreppedAPI) throws Exception {
    int maxCommits = 3; // keep upto 3 commits from the past
    HoodieWriteConfig cfg = getConfigBuilder().withCompactionConfig(HoodieCompactionConfig.newBuilder()
            .withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainCommits(maxCommits)
            .build()).withParallelism(1, 1).withBulkInsertParallelism(1).build();
    HoodieWriteClient client = new HoodieWriteClient(jsc, cfg);

    final Function2<List<HoodieRecord>, String, Integer> recordInsertGenWrappedFunction = generateWrapRecordsFn(
            isPreppedAPI, cfg, dataGen::generateInserts);

    final Function2<List<HoodieRecord>, String, Integer> recordUpsertGenWrappedFunction = generateWrapRecordsFn(
            isPreppedAPI, cfg, dataGen::generateUniqueUpdates);

    insertFirstBigBatchForClientCleanerTest(cfg, client, recordInsertGenWrappedFunction, insertFn);

    // Keep doing some writes and clean inline. Make sure we have expected number of files remaining.
    HoodieTestUtils.monotonicIncreasingCommitTimestamps(8, 1).stream().forEach(newCommitTime -> {
        try {
            client.startCommitWithTime(newCommitTime);
            List<HoodieRecord> records = recordUpsertGenWrappedFunction.apply(newCommitTime, 100);

            List<WriteStatus> statuses = upsertFn.apply(client, jsc.parallelize(records, 1), newCommitTime)
                    .collect();
            // Verify there are no errors
            assertNoWriteErrors(statuses);

            HoodieTableMetaClient metadata = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
            HoodieTable table1 = HoodieTable.getHoodieTable(metadata, cfg, jsc);
            HoodieTimeline activeTimeline = table1.getCompletedCommitTimeline();
            Optional<HoodieInstant> earliestRetainedCommit = activeTimeline.nthFromLastInstant(maxCommits - 1);
            Set<HoodieInstant> acceptableCommits = activeTimeline.getInstants().collect(Collectors.toSet());
            if (earliestRetainedCommit.isPresent()) {
                acceptableCommits.removeAll(
                        activeTimeline.findInstantsInRange("000", earliestRetainedCommit.get().getTimestamp())
                                .getInstants().collect(Collectors.toSet()));
                acceptableCommits.add(earliestRetainedCommit.get());
            }

            TableFileSystemView fsView = table1.getFileSystemView();
            // Need to ensure the following
            for (String partitionPath : dataGen.getPartitionPaths()) {
                List<HoodieFileGroup> fileGroups = fsView.getAllFileGroups(partitionPath)
                        .collect(Collectors.toList());
                for (HoodieFileGroup fileGroup : fileGroups) {
                    Set<String> commitTimes = new HashSet<>();
                    fileGroup.getAllDataFiles().forEach(value -> {
                        logger.debug("Data File - " + value);
                        commitTimes.add(value.getCommitTime());
                    });
                    assertEquals(
                            "Only contain acceptable versions of file should be present", acceptableCommits
                                    .stream().map(HoodieInstant::getTimestamp).collect(Collectors.toSet()),
                            commitTimes);
                }
            }
        } catch (IOException ioe) {
            throw new RuntimeException(ioe);
        }
    });
}

From source file:com.github.fge.jsonschema.servlets.SyntaxValidateServlet.java

@Override
public void doPost(final HttpServletRequest req, final HttpServletResponse resp)
        throws ServletException, IOException {
    final Set<String> params = Sets.newHashSet();

    /*/*  w w  w  .  ja  va 2 s .  co m*/
     * First, check our parameters
     */
    /*
     * Why, in 2013, doesn't servlet-api provide an Iterator<String>?
     *
     * Well, at least, Jetty's implementation has a generified Enumeration.
     * Still, that sucks.
     */
    final Enumeration<String> enumeration = req.getParameterNames();

    // FIXME: no duplicates, it seems, but I cannot find the spec which
    // guarantees that
    while (enumeration.hasMoreElements())
        params.add(enumeration.nextElement());

    // We have required parameters
    if (!params.containsAll(Request.required())) {
        log.warn("Missing parameters! Someone using me as a web service?");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing parameters");
        return;
    }

    // We don't want extraneous parameters
    params.removeAll(Request.valid());

    if (!params.isEmpty()) {
        log.warn("Invalid parameters! Someone using me as a web service?");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid parameters");
        return;
    }

    final String rawSchema = req.getParameter(Request.SCHEMA);

    // Set correct content type
    resp.setContentType(MediaType.JSON_UTF_8.toString());

    final JsonNode ret;
    try {
        ret = buildResult(rawSchema);
    } catch (ProcessingException e) {
        // Should not happen!
        log.error("Uh, syntax validation failed!", e);
        resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
        return;
    }

    final OutputStream out = resp.getOutputStream();

    try {
        out.write(ret.toString().getBytes(Charset.forName("UTF-8")));
        out.flush();
    } finally {
        Closeables.closeQuietly(out);
    }
}

From source file:org.carewebframework.ui.spring.FrameworkAppContext.java

/**
 * Constructor for creating an application context. Disallows bean overrides by default.
 * // w ww. j  av  a2s .c  o m
 * @param desktop The desktop associated with this application context. Will be null for the
 *            root application context.
 * @param testConfig If true, use test profiles.
 * @param locations Optional list of configuration file locations. If not specified, defaults to
 *            the default configuration locations ({@link #getDefaultConfigLocations}).
 */
public FrameworkAppContext(Desktop desktop, boolean testConfig, String... locations) {
    super();
    setAllowBeanDefinitionOverriding(false);
    this.desktop = desktop;
    ConfigurableEnvironment env = getEnvironment();
    Set<String> aps = new LinkedHashSet<String>();
    Collections.addAll(aps, env.getActiveProfiles());

    if (desktop != null) {
        desktop.setAttribute(APP_CONTEXT_ATTRIB, this);
        final Session session = desktop.getSession();
        final ServletContext sc = session.getWebApp().getServletContext();
        final WebApplicationContext rootContext = WebApplicationContextUtils
                .getRequiredWebApplicationContext(sc);
        setDisplayName("Child XmlWebApplicationContext " + desktop);
        setParent(rootContext);
        setServletContext(sc);
        this.ctxListener = new ContextClosedListener();
        getParent().getBean(APPLICATION_EVENT_MULTICASTER_BEAN_NAME, ApplicationEventMulticaster.class)
                .addApplicationListener(this.ctxListener);
        // Set up profiles (remove root profiles merged from parent)
        aps.removeAll(Arrays.asList(Constants.PROFILES_ROOT));
        Collections.addAll(aps, testConfig ? Constants.PROFILES_DESKTOP_TEST : Constants.PROFILES_DESKTOP_PROD);
    } else {
        AppContextFinder.rootContext = this;
        Collections.addAll(aps, testConfig ? Constants.PROFILES_ROOT_TEST : Constants.PROFILES_ROOT_PROD);
        env.getPropertySources().addLast(new LabelPropertySource(this));
        env.getPropertySources().addLast(new DomainPropertySource(this));
    }

    env.setActiveProfiles(aps.toArray(new String[aps.size()]));
    setConfigLocations(locations == null || locations.length == 0 ? null : locations);
}

From source file:com.aurel.track.item.ItemBL.java

/**
 * Get all descendants which are not included in the original query result
 * @param baseWorkItemIDsArr/* ww  w.j  a va  2  s.  c o  m*/
 * @param direction
 * @param archived
 * @param deleted
 * @param itemTypesList
 * @return
 */
public static Set<Integer> getChildHierarchy(int[] baseWorkItemIDsArr, Integer direction, Integer archived,
        Integer deleted, List<Integer> itemTypesList) {
    Set<Integer> allDescendentIDsSet = new HashSet<Integer>();
    Set<Integer> directChildrenIDsSet;
    int[] directChildrenArr = baseWorkItemIDsArr;
    Set<Integer> toRemoveSet = GeneralUtils.createSetFromIntArr(baseWorkItemIDsArr);
    do {
        //get the next level of children workItems
        List<TWorkItemBean> directChildrenWorkItems = getChildren(directChildrenArr,
                PARENT_CHILD_EXPRESSION.ALL_NOT_CLOSED_CHILDREN == direction, archived, deleted, itemTypesList);
        //get the ID set of children
        directChildrenIDsSet = GeneralUtils.createIntegerSetFromBeanList(directChildrenWorkItems);
        //remove the children which are already present in the gathered list (theoretically remove might occur only first time)
        directChildrenIDsSet.removeAll(toRemoveSet);

        //gather the not yet present children to the list
        allDescendentIDsSet.addAll(directChildrenIDsSet);
        toRemoveSet.addAll(directChildrenIDsSet);
        directChildrenArr = GeneralUtils.createIntArrFromSet(directChildrenIDsSet);
    } while (directChildrenIDsSet != null && !directChildrenIDsSet.isEmpty());
    return allDescendentIDsSet;
}

From source file:com.evolveum.midpoint.repo.sql.closure.AbstractOrgClosureTest.java

private void checkChildrenSets(Set<String> oidsToCheck) {
    SimpleDirectedGraph<String, DefaultEdge> tc = (SimpleDirectedGraph) orgGraph.clone();
    TransitiveClosure.INSTANCE.closeSimpleDirectedGraph(tc);
    for (String subroot : oidsToCheck) {
        LOGGER.info("Checking descendants of {}", subroot);
        Set<String> expectedChildren = new HashSet<>();
        for (DefaultEdge edge : tc.incomingEdgesOf(subroot)) {
            expectedChildren.add(tc.getEdgeSource(edge));
        }//from www. j  a v  a  2  s.c  o m
        expectedChildren.add(subroot);
        LOGGER.trace("Expected children: {}", expectedChildren);
        Set<String> actualChildren = getActualChildrenOf(subroot);
        LOGGER.trace("Actual children: {}", actualChildren);

        Set<String> expectedMinusActual = new HashSet<>(expectedChildren);
        expectedMinusActual.removeAll(actualChildren);
        if (!expectedMinusActual.isEmpty()) {
            System.out.println("Expected-Actual = " + expectedMinusActual);
        }
        Set<String> actualMinusExpected = new HashSet<>(actualChildren);
        actualMinusExpected.removeAll(expectedChildren);
        if (!actualMinusExpected.isEmpty()) {
            System.out.println("Actual-Expected = " + actualMinusExpected);
        }
        assertEquals("Incorrect children for " + subroot, expectedChildren, actualChildren);
    }
}

From source file:com.aurel.track.fieldType.fieldChange.apply.MultipleTreeFieldChangeApply.java

/**
 * Sets the workItemBean's attribute//ww w  .j a v  a 2 s. c o m
 * @param workItemContext
 * @param workItemBean
 * @param fieldID
 * @param parameterCode
 * @param value   
 * @return ErrorData if an error is found
 */
@Override
public List<ErrorData> setWorkItemAttribute(WorkItemContext workItemContext, TWorkItemBean workItemBean,
        Integer parameterCode, Object value) {
    if (getSetter() == FieldChangeSetters.SET_NULL || getSetter() == FieldChangeSetters.SET_REQUIRED) {
        return super.setWorkItemAttribute(workItemContext, workItemBean, parameterCode, value);
    }
    Integer[] selectedValues = (Integer[]) value;
    if (getSetter() == FieldChangeSetters.SET_TO) {
        workItemBean.setAttribute(activityType, selectedValues);
        return null;
    }
    Object originalValue = workItemBean.getAttribute(activityType, parameterCode);
    Object[] originalSelections = null;
    if (originalValue != null) {
        try {
            //multiple values are loaded in the workItem as Object[], not as Integer[] !!! 
            originalSelections = (Object[]) originalValue;
        } catch (Exception e) {
            LOGGER.debug(
                    "Getting the original object array value for " + value + " failed with " + e.getMessage());
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
    }
    Set<Integer> originalSet = new HashSet<Integer>();
    if (originalSelections != null && originalSelections.length > 0) {
        for (int i = 0; i < originalSelections.length; i++) {
            try {
                originalSet.add((Integer) originalSelections[i]);
            } catch (Exception e) {
                LOGGER.info("Transforming the original object value " + originalSelections[i]
                        + " to Integer failed with " + e.getMessage());
                LOGGER.info(ExceptionUtils.getStackTrace(e));
            }
        }
    }
    Set<Integer> bulkSelectionsSet = GeneralUtils.createSetFromIntegerArr(selectedValues);
    switch (getSetter()) {
    case FieldChangeSetters.ADD_ITEMS:
        originalSet.addAll(bulkSelectionsSet);
        workItemBean.setAttribute(activityType, parameterCode,
                GeneralUtils.createIntegerArrFromCollection(originalSet));
        break;
    case FieldChangeSetters.REMOVE_ITEMS:
        originalSet.removeAll(bulkSelectionsSet);
        workItemBean.setAttribute(activityType, parameterCode,
                GeneralUtils.createIntegerArrFromCollection(originalSet));
        break;
    default:
        break;
    }
    return null;
}

From source file:grails.plugin.searchable.internal.compass.config.DefaultGrailsDomainClassMappingSearchableCompassConfigurator.java

/**
 * Configure Compass ready for it to be built
 *
 * @param compassConfiguration runtime configuration instance
 * @param configurationContext a context allowing flexible parameter passing
 *///from w w  w. ja  v a  2 s .  c o  m
public void configure(CompassConfiguration compassConfiguration, Map configurationContext) {
    Assert.notNull(grailsApplication, "grailsApplication cannot be null");
    Assert.notNull(classMappingConfigurators, "classMappingConfigurators cannot be null");

    CompositeSearchableGrailsDomainClassCompassClassMapper classMapper = null;

    // determine which classes are mapped by which strategy
    Map classesByStrategy = new HashMap();
    Collection grailsDomainClasses = SearchableUtils.getGrailsDomainClasses(grailsApplication);
    Set mappableClasses = new HashSet();
    Set notMapped = new HashSet(grailsDomainClasses);
    for (int i = 0; i < classMappingConfigurators.length; i++) {
        SearchableGrailsDomainClassMappingConfigurator configurator = classMappingConfigurators[i];

        // Total hack. This seems to be the easiest way to initialise this
        // particular property mapping configurator.
        if (configurator instanceof SearchableClassPropertySearchableGrailsDomainClassMappingConfigurator) {
            classMapper = ((SearchableClassPropertySearchableGrailsDomainClassMappingConfigurator) configurator)
                    .getMappingDescriptionProviderManager();
            classMapper.init(compassConfiguration, (Map) configurationContext.get("customConverters"),
                    defaultExcludes, defaultFormats);
        }

        Collection classes = configurator.getMappedBy(notMapped);
        if (classes != null) {
            notMapped.removeAll(classes);
            if (LOG.isDebugEnabled()) {
                for (Iterator iter = classes.iterator(); iter.hasNext();) {
                    GrailsDomainClass grailsDomainClass = (GrailsDomainClass) iter.next();
                    LOG.debug("Mapping class [" + grailsDomainClass.getClazz().getName() + "] with strategy ["
                            + configurator.getName() + "]");
                }
            }
            classesByStrategy.put(classMappingConfigurators[i], classes);
            mappableClasses.addAll(classes);
        }
    }

    // Deal with any domain classes configured through the application's runtime
    // config. This is treated differently to the other configuration options
    // because it can override existing mapping information. Also, it requires
    // access to the application config object.
    AppConfigClassMapper overrideClassMapper = new AppConfigClassMapper(grailsApplication.getConfig());
    overrideClassMapper.init(compassConfiguration, (Map) configurationContext.get("customConverters"),
            defaultExcludes, defaultFormats);

    AppConfigMappingConfigurator appConfigConfigurator = new AppConfigMappingConfigurator(
            grailsApplication.getConfig());
    appConfigConfigurator.setMappingDescriptionProviderManager(overrideClassMapper);
    appConfigConfigurator.setCompassClassMappingXmlBuilder(classMappingXmlBuilder);

    Collection appConfigMapped = appConfigConfigurator.getMappedBy(grailsDomainClasses);
    mappableClasses.addAll(appConfigMapped);

    // Check whether search has been explicitly removed from any domain classes.
    Collection unmapped = appConfigConfigurator.getUnmapped(grailsDomainClasses);
    mappableClasses.removeAll(unmapped);
    notMapped.addAll(unmapped);

    if (LOG.isDebugEnabled() && !notMapped.isEmpty()) {
        for (Iterator iter = notMapped.iterator(); iter.hasNext();) {
            GrailsDomainClass grailsDomainClass = (GrailsDomainClass) iter.next();
            LOG.debug("No mapping strategy found for class [" + grailsDomainClass.getClazz().getName()
                    + "]: assuming this class is not searchable");
        }
    }

    // map classes in the order defined by the classMappingConfigurators
    for (int i = 0; i < classMappingConfigurators.length; i++) {
        SearchableGrailsDomainClassMappingConfigurator classMappingConfigurator = classMappingConfigurators[i];
        Collection classes = (Collection) classesByStrategy.get(classMappingConfigurator);
        if (classes != null && !classes.isEmpty()) {
            classMappingConfigurator.configureMappings(compassConfiguration, configurationContext, classes,
                    mappableClasses);
        }
    }

    // Finally, execute the config-based configurator so that it can add and
    // override mappings.
    if (appConfigMapped != null && !appConfigMapped.isEmpty()) {
        appConfigConfigurator.configureMappings(compassConfiguration, configurationContext, appConfigMapped,
                mappableClasses);
    }
}

From source file:edu.snu.leader.spatial.Agent.java

/**
 * Logs all the observed group membership events
 *//* ww w .j av  a  2s . c o  m*/
private void logObservedGroupMembershipEvents() {
    // Find all the agents we aren't observing any more
    if (!AgentCommunicationType.GLOBAL.equals(_communicationType)) {
        // Get the previously observed agents
        Set<Agent> previouslyObserved = _observedAgentMemberships.keySet();

        // Remove all the ones we currently observe
        previouslyObserved.removeAll(_currentNearestNeighbors);

        // Remove all the remaining ones from the maps
        for (Agent agent : previouslyObserved) {
            // Get their last observed group
            Group lastObservedGroup = _observedAgentMemberships.get(agent);

            // Remove it from the observed group history
            List<Agent> groupHistory = _observedGroupHistories.get(lastObservedGroup);
            if (null != groupHistory) {
                groupHistory.remove(agent);
            } else {
                // Something is foobar
                _LOG.error("Group history for id=[" + lastObservedGroup.getID()
                        + "] is missing, but we previously observed an agent in that group");
            }

            // Remove it from the agent membership
            _observedAgentMemberships.remove(agent);
        }
    }

    // Iterate through all our neighbors to process their group membership
    for (Agent agent : _currentNearestNeighbors) {
        // Have we observed them before?
        if (AgentCommunicationType.GLOBAL.equals(_communicationType)
                || _observedAgentMemberships.containsKey(agent)) {
            // Yup.  Check to see if their group membership changed
            Group lastObservedGroup = _observedAgentMemberships.get(agent);
            Group currentObservedGroup = agent.getGroup();
            if ((null == lastObservedGroup) || !lastObservedGroup.equals(currentObservedGroup)) {
                // Yup.
                // Remove them from the old group history
                if (null != lastObservedGroup) {
                    List<Agent> oldGroupHistory = _observedGroupHistories.get(lastObservedGroup);
                    if (null != oldGroupHistory) {
                        oldGroupHistory.remove(agent);
                    }
                }

                // Add them to the new one
                List<Agent> newGroupHistory = _observedGroupHistories.get(currentObservedGroup);
                if (null == newGroupHistory) {
                    // Create it
                    newGroupHistory = new LinkedList<Agent>();
                    _observedGroupHistories.put(currentObservedGroup, newGroupHistory);
                }
                newGroupHistory.add(agent);

                // Change their group
                _observedAgentMemberships.put(agent, currentObservedGroup);

                //                    if( _LOG.isDebugEnabled() )
                //                    {
                //                        Object lastGroupID = "????";
                //                        if( null != lastObservedGroup )
                //                        {
                //                            lastGroupID = lastObservedGroup.getID();
                //                        }
                //                        _LOG.debug( "Agent ["
                //                                + getID()
                //                                + "] observed agent ["
                //                                + agent.getID()
                //                                + "] change groups from ["
                //                                + lastGroupID
                //                                + "] to ["
                //                                + currentObservedGroup.getID()
                //                                + "] for a total of ["
                //                                + newGroupHistory.size()
                //                                + "] agents" );
                //                    }
            }
        } else {
            // Nope, they are new.  Add them to the maps.
            Group observedGroup = agent.getGroup();
            _observedAgentMemberships.put(agent, observedGroup);
            List<Agent> groupHistory = _observedGroupHistories.get(observedGroup);
            if (null == groupHistory) {
                groupHistory = new LinkedList<Agent>();
                _observedGroupHistories.put(observedGroup, groupHistory);
            }
            groupHistory.add(agent);

            //                _LOG.debug( "Agent ["
            //                        + getID()
            //                        + "] observed new agent ["
            //                        + agent.getID()
            //                        + "] in group ["
            //                        + agent.getGroup().getID()
            //                        + "]" );
        }
    }

}

From source file:com.ggvaidya.scinames.model.Dataset.java

/**
 * Returns a Stream of all distinct names recognized at the end of this checklist.
 * //from  www.ja v  a  2s  .  com
 * For a checklist, this is every name in every row, plus names added by explicit
 * changes (which overrule the dataset), minus names removed by explicit changes.
 * 
 * For a dataset, it's (prevDataset's recognized names) + 
 * (names added by explicit and implicit changes) - (names removed by explicit
 * and implicit changes).
 * 
 * @param proj Required for filtering changes
 * @return A Stream of recognized names as at the end of this checklist.
 */
public Stream<Name> getRecognizedNames(Project proj) {
    // Start with names we explicitly add.
    Set<Name> addedNames = getChanges(proj).flatMap(ch -> ch.getToStream()).collect(Collectors.toSet());
    Set<Name> initialNames = new HashSet<>(addedNames);

    // If this is not a checklist, then pass through previously recognized names.
    if (prevDataset != null)
        initialNames.addAll(proj.getRecognizedNames(prevDataset));

    // Delete names we explicitly delete.
    Set<Name> deletedNames = getChanges(proj).flatMap(ch -> ch.getFromStream()).collect(Collectors.toSet());

    Set<Name> finalList = initialNames.stream().filter(n -> {
        // Filter out names that have been deleted, EXCEPT those that
        // have been explicitly added (such as in a lump or split).
        if (deletedNames.contains(n)) {
            if (addedNames.contains(n))
                return true; // don't filter
            else
                return false; // do filter
        } else
            return true; // don't filter
    }).collect(Collectors.toSet());

    // This should be the same as the names in a checklist!
    // Double-check!
    if (isChecklist() && !finalList.equals(getNamesInAllRows())) {
        // TODO: OKAY, so this is caused by the following scenario:
        //   - We explicitly rename "Osteocephalus vilmae" to "Hylomantis buckleyi" within a dataset
        //    - We do that because AmphibiaWeb *says* they are duplicates.
        //   - However, this dataset has rows for *both* vilmae and buckleyi.
        //   - So how?
        //      - We fix the discrepancy by recognizing all the names in the rows -- whether
        //        or not they're reflected in the changes.

        Set<Name> finalListButNotInRows = new HashSet<>(finalList);
        finalListButNotInRows.removeAll(getNamesInAllRows());

        Set<Name> rowNamesButNotFinalList = new HashSet<>(getNamesInAllRows());
        rowNamesButNotFinalList.removeAll(finalList);

        LOGGER.warning("Discrepency in calculating recognized names for " + this + ":\n"
                + "\t - Final list but not in rows: " + finalListButNotInRows + "\n"
                + "\t - Rows but not in final list: " + rowNamesButNotFinalList + "\n" + "\t - Name count: "
                + initialNames.size() + " + " + addedNames.size() + " - " + deletedNames.size() + " = "
                + (initialNames.size() + addedNames.size() - deletedNames.size()) + " (but should be "
                + finalList.size() + ")\n"
                + "Species in the rows but not in final count will be added to the list of recognized names.");

        finalList.addAll(rowNamesButNotFinalList);
    }

    return finalList.stream();
}

From source file:lyonlancer5.karasu.block.BlockRedstoneWire.java

/**
 * Recalculates all surrounding wires and causes all needed updates
 * /*from w  ww .  j  a  va  2  s  . c o m*/
 * @author panda
 * 
 * @param worldIn   World
 * @param pos      Position that needs updating
 */
private void updateSurroundingRedstone(World worldIn, BlockPos pos) {
    // Recalculate the connected wires
    calculateCurrentChanges(worldIn, pos);

    // Set to collect all the updates, to only execute them once. Ordering required.
    Set<BlockPos> blocksNeedingUpdate = Sets.newLinkedHashSet();

    // Add the needed updates
    for (BlockPos posi : updatedRedstoneWire) {
        addBlocksNeedingUpdate(worldIn, posi, blocksNeedingUpdate);
    }
    // Add all other updates to keep known behaviors
    // They are added in a backwards order because it preserves a commonly used behavior with the update order
    Iterator<BlockPos> it = Lists.<BlockPos>newLinkedList(updatedRedstoneWire).descendingIterator();
    while (it.hasNext()) {
        addAllSurroundingBlocks(it.next(), blocksNeedingUpdate);
    }
    // Remove updates on the wires as they just were updated
    blocksNeedingUpdate.removeAll(updatedRedstoneWire);
    /* Avoid unnecessary updates on the just updated wires
    * A huge scale test showed about 40% more ticks per second
    * It's probably less in normal usage but likely still worth it
    */
    updatedRedstoneWire.clear();

    // Execute updates
    for (BlockPos posi : blocksNeedingUpdate) {
        //worldIn.notifyBlockOfStateChange(posi, this);
        Helper.notifyBlockChange(worldIn, posi, this);
    }
}