Example usage for java.util TreeSet removeAll

List of usage examples for java.util TreeSet removeAll

Introduction

In this page you can find the example usage for java.util TreeSet removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:eu.ggnet.dwoss.report.assist.ReportUtil.java

/**
 * Returns a set containing only non reportable lines that are not of the RETURNS type.
 * It's not allowed to have a null value in the collection.
 * <p>/* w  ww .j  a  v a  2s. c  o m*/
 * @param allLines
 * @param reportAble
 * @return
 */
public static NavigableSet<ReportLine> filterActiveInfo(Collection<ReportLine> allLines,
        Collection<ReportLine> reportAble) {
    TreeSet<ReportLine> treeSet = new TreeSet<>(allLines);
    treeSet.removeAll(reportAble);
    for (Iterator<ReportLine> it = treeSet.iterator(); it.hasNext();) {
        ReportLine reportLine = it.next();
        if (reportLine.getDocumentType() == DocumentType.RETURNS)
            it.remove();
    }
    return treeSet;

}

From source file:ImageIOTest.java

/**
 * Gets a set of "preferred" format names of all image writers. The preferred format name is the
 * first format name that a writer specifies.
 * @return the format name set/*from  w w w  .jav a  2  s  . c  o  m*/
 */
public static Set<String> getWriterFormats() {
    TreeSet<String> writerFormats = new TreeSet<String>();
    TreeSet<String> formatNames = new TreeSet<String>(Arrays.asList(ImageIO.getWriterFormatNames()));
    while (formatNames.size() > 0) {
        String name = formatNames.iterator().next();
        Iterator<ImageWriter> iter = ImageIO.getImageWritersByFormatName(name);
        ImageWriter writer = iter.next();
        String[] names = writer.getOriginatingProvider().getFormatNames();
        String format = names[0];
        if (format.equals(format.toLowerCase()))
            format = format.toUpperCase();
        writerFormats.add(format);
        formatNames.removeAll(Arrays.asList(names));
    }
    return writerFormats;
}

From source file:org.apache.geode.BundledJarsJUnitTest.java

@Test
public void verifyBundledJarsHaveNotChanged() throws IOException {
    TreeMap<String, String> sortedJars = getBundledJars();
    Stream<String> lines = sortedJars.entrySet().stream().map(entry -> removeVersion(entry.getKey()));
    Set<String> bundledJarNames = new TreeSet<>(lines.collect(Collectors.toSet()));

    Files.write(Paths.get("bundled_jars.txt"), bundledJarNames);

    TreeSet<String> newJars = new TreeSet<>(bundledJarNames);
    newJars.removeAll(expectedJars);
    TreeSet<String> missingJars = new TreeSet<>(expectedJars);
    missingJars.removeAll(bundledJarNames);

    String message = "The bundled jars have changed. Please make sure you update the licence and notice"
            + "\nas described in https://cwiki.apache.org/confluence/display/GEODE/License+Guide+for+Contributors"
            + "\nWhen fixed, copy geode-assembly/build/test/bundled_jars.txt"
            + "\nto src/test/resources/expected_jars.txt" + "\nRemoved Jars\n--------------\n"
            + String.join("\n", missingJars) + "\n\nAdded Jars\n--------------\n" + String.join("\n", newJars)
            + "\n\n";

    assertTrue(message, expectedJars.equals(bundledJarNames));

}

From source file:org.opensextant.solrtexttagger.AbstractTaggerTest.java

/** Asserts the sorted arrays are equals, with a helpful error message when not.
 * @param message//from  w w w  .ja  v  a2  s.com
 * @param expecteds
 * @param actuals
 */
public void assertSortedArrayEquals(String message, Object[] expecteds, Object[] actuals) {
    AssertionError error = null;
    try {
        assertArrayEquals(null, expecteds, actuals);
    } catch (AssertionError e) {
        error = e;
    }
    if (error == null)
        return;
    TreeSet<Object> expectedRemaining = new TreeSet<>(Arrays.asList(expecteds));
    expectedRemaining.removeAll(Arrays.asList(actuals));
    if (!expectedRemaining.isEmpty())
        fail(message + ": didn't find expected " + expectedRemaining.first() + " (of "
                + expectedRemaining.size() + "); " + error);
    TreeSet<Object> actualsRemaining = new TreeSet<>(Arrays.asList(actuals));
    actualsRemaining.removeAll(Arrays.asList(expecteds));
    fail(message + ": didn't expect " + actualsRemaining.first() + " (of " + actualsRemaining.size() + "); "
            + error);
}

From source file:org.opendatakit.security.server.SecurityServiceUtil.java

/**
 * Method to enforce an access configuration constraining only registered users, authenticated
 * users and anonymous access./*from  w w  w .  ja  v a  2s . c  om*/
 * 
 * Add additional checks of the incoming parameters and patch things up if the incoming list of
 * users omits the super-user.
 * 
 * @param users
 * @param anonGrants
 * @param allGroups
 * @param cc
 * @throws DatastoreFailureException
 * @throws AccessDeniedException
 */
public static final void setStandardSiteAccessConfiguration(ArrayList<UserSecurityInfo> users,
        ArrayList<GrantedAuthorityName> allGroups, CallingContext cc)
        throws DatastoreFailureException, AccessDeniedException {

    // remove anonymousUser from the set of users and collect its
    // permissions (anonGrantStrings) which will be placed in
    // the granted authority hierarchy table.
    List<String> anonGrantStrings = new ArrayList<String>();
    {
        UserSecurityInfo anonUser = null;
        for (UserSecurityInfo i : users) {
            if (i.getType() == UserType.ANONYMOUS) {
                anonUser = i;
                // clean up grants for anonymousUser --
                // ignore anonAuth (the grant under which we will place things)
                // and forbid Site Admin
                for (GrantedAuthorityName a : i.getAssignedUserGroups()) {
                    if (anonAuth.getAuthority().equals(a.name()))
                        continue; // avoid circularity...
                    // only allow ROLE_ATTACHMENT_VIEWER and GROUP_ assignments.
                    if (!a.name().startsWith(GrantedAuthorityName.GROUP_PREFIX)) {
                        continue;
                    }
                    // do not allow Site Admin assignments for Anonymous --
                    // or Tables super-user or Tables Administrator.
                    // those all give access to the full set of users on the system
                    // and giving that information to Anonymous is a security
                    // risk.
                    if (GrantedAuthorityName.GROUP_SITE_ADMINS.equals(a)
                            || GrantedAuthorityName.GROUP_ADMINISTER_TABLES.equals(a)
                            || GrantedAuthorityName.GROUP_SUPER_USER_TABLES.equals(a)) {
                        continue;
                    }
                    anonGrantStrings.add(a.name());
                }
                break;
            }
        }
        if (anonUser != null) {
            users.remove(anonUser);
        }
    }

    // scan through the users and remove any entries under assigned user groups
    // that do not begin with GROUP_.
    //
    // Additionally, if the user is an e-mail, remove the GROUP_DATA_COLLECTORS
    // permission since ODK Collect does not support oauth2 authentication.
    {
        TreeSet<GrantedAuthorityName> toRemove = new TreeSet<GrantedAuthorityName>();
        for (UserSecurityInfo i : users) {
            // only working with registered users
            if (i.getType() != UserType.REGISTERED) {
                continue;
            }
            // get the list of assigned groups
            // -- this is not a copy -- we can directly manipulate this.
            TreeSet<GrantedAuthorityName> assignedGroups = i.getAssignedUserGroups();

            // scan the set of assigned groups and remove any that don't begin with GROUP_
            toRemove.clear();
            for (GrantedAuthorityName name : assignedGroups) {
                if (!name.name().startsWith(GrantedAuthorityName.GROUP_PREFIX)) {
                    toRemove.add(name);
                }
            }
            if (!toRemove.isEmpty()) {
                assignedGroups.removeAll(toRemove);
            }
            // for e-mail accounts, remove the Data Collector permission since ODK Collect
            // does not support an oauth2 authentication mechanism.
            if (i.getEmail() != null) {
                assignedGroups.remove(GrantedAuthorityName.GROUP_DATA_COLLECTORS);
            }
        }
    }

    // find the entry(entries) for the designated super-user(s)
    String superUserUsername = cc.getUserService().getSuperUserUsername();
    int expectedSize = ((superUserUsername != null) ? 1 : 0);
    ArrayList<UserSecurityInfo> superUsers = new ArrayList<UserSecurityInfo>();
    for (UserSecurityInfo i : users) {
        if (i.getType() == UserType.REGISTERED) {
            if (i.getUsername() != null && superUserUsername != null
                    && i.getUsername().equals(superUserUsername)) {
                superUsers.add(i);
            }
        }
    }

    if (superUsers.size() != expectedSize) {
        // we are missing one or both super-users.
        // remove any we have and recreate them from scratch.
        users.removeAll(superUsers);
        superUsers.clear();

        // Synthesize a UserSecurityInfo object for the super-user(s)
        // and add it(them) to the list.

        try {
            List<RegisteredUsersTable> tList = RegisteredUsersTable.assertSuperUsers(cc);

            for (RegisteredUsersTable t : tList) {
                UserSecurityInfo i = new UserSecurityInfo(t.getUsername(), t.getFullName(), t.getEmail(),
                        UserSecurityInfo.UserType.REGISTERED);
                superUsers.add(i);
                users.add(i);
            }

        } catch (ODKDatastoreException e) {
            e.printStackTrace();
            throw new DatastoreFailureException("Incomplete update");
        }
    }

    // reset super-user privileges to have (just) site admin privileges
    // even if caller attempts to change, add, or remove them.
    for (UserSecurityInfo i : superUsers) {
        TreeSet<GrantedAuthorityName> grants = new TreeSet<GrantedAuthorityName>();
        grants.add(GrantedAuthorityName.GROUP_SITE_ADMINS);
        grants.add(GrantedAuthorityName.ROLE_SITE_ACCESS_ADMIN);
        // override whatever the user gave us.
        i.setAssignedUserGroups(grants);
    }

    try {
        // enforce our fixed set of groups and their inclusion hierarchy.
        // this is generally a no-op during normal operations.
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(siteAuth,
                SecurityServiceUtil.siteAdministratorGrants, cc);
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(administerTablesAuth,
                SecurityServiceUtil.administerTablesGrants, cc);
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(superUserTablesAuth,
                SecurityServiceUtil.superUserTablesGrants, cc);
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(synchronizeTablesAuth,
                SecurityServiceUtil.synchronizeTablesGrants, cc);
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(dataOwnerAuth,
                SecurityServiceUtil.dataOwnerGrants, cc);
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(dataViewerAuth,
                SecurityServiceUtil.dataViewerGrants, cc);
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(dataCollectorAuth,
                SecurityServiceUtil.dataCollectorGrants, cc);

        // place the anonymous user's permissions in the granted authority table.
        GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(anonAuth, anonGrantStrings, cc);

        // get all granted authority names
        TreeSet<String> authorities = GrantedAuthorityHierarchyTable
                .getAllPermissionsAssignableGrantedAuthorities(cc.getDatastore(), cc.getCurrentUser());
        // remove the groups that have structure (i.e., those defined above).
        authorities.remove(siteAuth.getAuthority());
        authorities.remove(administerTablesAuth.getAuthority());
        authorities.remove(superUserTablesAuth.getAuthority());
        authorities.remove(synchronizeTablesAuth.getAuthority());
        authorities.remove(dataOwnerAuth.getAuthority());
        authorities.remove(dataViewerAuth.getAuthority());
        authorities.remove(dataCollectorAuth.getAuthority());
        authorities.remove(anonAuth.getAuthority());

        // delete all hierarchy structures under anything else.
        // i.e., if somehow USER_IS_REGISTERED had been granted GROUP_FORM_MANAGER
        // then this loop would leave USER_IS_REGISTERED without any grants.
        // (it repairs the database to conform to our privilege hierarchy expectations).
        List<String> empty = Collections.emptyList();
        for (String s : authorities) {
            GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(new SimpleGrantedAuthority(s), empty,
                    cc);
        }

        // declare all the users (and remove users that are not in this set)
        Map<UserSecurityInfo, String> pkMap = setUsers(users, cc);

        // now, for each GROUP_..., update the user granted authority
        // table with the users that have that GROUP_... assignment.
        setUsersOfGrantedAuthority(pkMap, siteAuth, cc);
        setUsersOfGrantedAuthority(pkMap, administerTablesAuth, cc);
        setUsersOfGrantedAuthority(pkMap, superUserTablesAuth, cc);
        setUsersOfGrantedAuthority(pkMap, synchronizeTablesAuth, cc);
        setUsersOfGrantedAuthority(pkMap, dataOwnerAuth, cc);
        setUsersOfGrantedAuthority(pkMap, dataViewerAuth, cc);
        setUsersOfGrantedAuthority(pkMap, dataCollectorAuth, cc);
        // all super-users would already have their site admin role and
        // we leave that unchanged. The key is to ensure that the
        // super users are in the users list so they don't get
        // accidentally removed and that they have siteAuth group
        // membership. I.e., we don't need to manage ROLE_SITE_ACCESS_ADMIN
        // here. it is done elsewhere.

    } catch (ODKDatastoreException e) {
        e.printStackTrace();
        throw new DatastoreFailureException("Incomplete update");
    } finally {
        Datastore ds = cc.getDatastore();
        User user = cc.getCurrentUser();
        try {
            SecurityRevisionsTable.setLastRegisteredUsersRevisionDate(ds, user);
        } catch (ODKDatastoreException e) {
            // if it fails, use RELOAD_INTERVAL to force reload.
            e.printStackTrace();
        }
        try {
            SecurityRevisionsTable.setLastRoleHierarchyRevisionDate(ds, user);
        } catch (ODKDatastoreException e) {
            // if it fails, use RELOAD_INTERVAL to force reload.
            e.printStackTrace();
        }
    }
}

From source file:org.egov.works.master.service.ContractorService.java

public String[] getContractorMasterMandatoryFields() {
    final TreeSet<String> set = new TreeSet<>(Arrays.asList(getcontractorMasterSetMandatoryFields()));
    set.removeAll(Arrays.asList(getcontractorMasterSetHiddenFields()));
    return set.toArray(new String[set.size()]);
}

From source file:org.openqa.selenium.TakesScreenshotTest.java

private void compareColors(Set<String> expectedColors, Set<String> actualColors) {
    TreeSet<String> c = new TreeSet<String>(expectedColors);
    c.removeAll(actualColors);
    if (!c.isEmpty()) {
        fail("Unknown expected color is generated: " + c.toString() + ", \n" + " actual colors are: "
                + actualColors.toString());
    }//w w  w. j  a v a  2  s .c  om

    if (actualColors.containsAll(expectedColors)) {
        // all is ok
    } else {
        actualColors.removeAll(expectedColors);
        fail("Unknown colors are presented at screenshot: " + actualColors.toString() + " \n"
                + " expected colors are excluded: " + expectedColors.toString());
    }
}

From source file:org.apache.jackrabbit.oak.plugins.blob.FileLineDifferenceIteratorTest.java

@Test
public void testRandomized() throws Exception {
    Random r = new Random(0);
    for (int i = 0; i < 10000; i++) {
        TreeSet<String> marked = new TreeSet<String>();
        TreeSet<String> all = new TreeSet<String>();
        TreeSet<String> diff = new TreeSet<String>();
        int size = r.nextInt(5);
        for (int a = 0; a < size; a++) {
            marked.add("" + r.nextInt(10));
        }/* www  .j  av a 2  s  .c  om*/
        size = r.nextInt(5);
        for (int a = 0; a < size; a++) {
            all.add("" + r.nextInt(10));
        }
        diff.addAll(all);
        diff.removeAll(marked);
        String m = marked.toString().replaceAll("[ \\[\\]]", "");
        String a = all.toString().replaceAll("[ \\[\\]]", "");
        assertDiff(m, a, new ArrayList<String>(diff));
    }
}

From source file:edu.mbl.jif.imaging.mmtiff.FileSet.java

/**
 * Completes the current time point of an aborted acquisition with blank images, so that it can
 * be opened correctly by ImageJ/BioForamts
 *//* ww  w  . j  av  a2 s.  c o  m*/
private void completeFrameWithBlankImages(int frame) throws JSONException, MMScriptException {

    int numFrames = MDUtils.getNumFrames(mpTiff_.summaryMetadata_);
    int numSlices = MDUtils.getNumSlices(mpTiff_.summaryMetadata_);
    int numChannels = MDUtils.getNumChannels(mpTiff_.summaryMetadata_);
    if (numFrames > frame + 1) {
        TreeSet<String> writtenImages = new TreeSet<String>();
        for (MultipageTiffWriter w : tiffWriters_) {
            writtenImages.addAll(w.getIndexMap().keySet());
            w.setAbortedNumFrames(frame + 1);
        }
        int positionIndex = MDUtils.getIndices(writtenImages.first())[3];
        if (mpTiff_.omeTiff_) {
            mpTiff_.omeMetadata_.setNumFrames(positionIndex, frame + 1);
        }
        TreeSet<String> lastFrameLabels = new TreeSet<String>();
        for (int c = 0; c < numChannels; c++) {
            for (int z = 0; z < numSlices; z++) {
                lastFrameLabels.add(MDUtils.generateLabel(c, z, frame, positionIndex));
            }
        }
        lastFrameLabels.removeAll(writtenImages);
        try {
            for (String label : lastFrameLabels) {
                tiffWriters_.getLast().writeBlankImage(label);
                if (mpTiff_.omeTiff_) {
                    JSONObject dummyTags = new JSONObject();
                    int channel = Integer.parseInt(label.split("_")[0]);
                    int slice = Integer.parseInt(label.split("_")[1]);
                    MDUtils.setChannelIndex(dummyTags, channel);
                    MDUtils.setFrameIndex(dummyTags, frame);
                    MDUtils.setSliceIndex(dummyTags, slice);
                    mpTiff_.omeMetadata_.addImageTagsToOME(dummyTags, ifdCount_, baseFilename_,
                            currentTiffFilename_);
                }
            }
        } catch (IOException ex) {
            ReportingUtils.logError("problem writing dummy image");
        }
    }
}

From source file:org.commoncrawl.mapred.ec2.parser.EC2ParserTask.java

public EC2ParserTask(Configuration conf) throws Exception {

    super(conf);//from w  ww.  ja va 2s .c o m

    if (!conf.getBoolean(CONF_PARAM_TEST_MODE, false)) {
        conf.set(VALID_SEGMENTS_PATH_PROPERTY, VALID_SEGMENTS_PATH);
        conf.set(SEGMENT_PATH_PROPERTY, SEGMENTS_PATH);
        conf.set(JOB_LOGS_PATH_PROPERTY, JOB_LOGS_PATH);
        conf.set(CHECKPOIINTS_PATH_PROPERTY, CHECKPOINTS_PATH);

        jobThreadSemaphore = new Semaphore(-(MAX_SIMULTANEOUS_JOBS - 1));

    } else {
        conf.set(VALID_SEGMENTS_PATH_PROPERTY, TEST_VALID_SEGMENTS_PATH);
        conf.set(SEGMENT_PATH_PROPERTY, TEST_SEGMENTS_PATH);
        conf.set(JOB_LOGS_PATH_PROPERTY, TEST_JOB_LOGS_PATH);

        jobThreadSemaphore = new Semaphore(0);
        maxSimultaneousJobs = 1;
    }

    FileSystem fs = FileSystem.get(new URI("s3n://aws-publicdatasets"), conf);
    LOG.info(
            "FileSystem is:" + fs.getUri() + " Scanning for candidates at path:" + CRAWL_LOG_INTERMEDIATE_PATH);
    TreeSet<Path> candidateSet = buildCandidateList(fs, new Path(CRAWL_LOG_INTERMEDIATE_PATH));
    LOG.info("Scanning for completed segments");
    List<Path> processedLogs = scanForCompletedSegments(fs, conf);
    LOG.info("Found " + processedLogs.size() + " processed logs");
    // remove processed from candidate set ... 
    candidateSet.removeAll(processedLogs);
    // ok we are ready to go .. 
    LOG.info("There are: " + candidateSet.size() + " logs in need of parsing");
    while (candidateSet.size() != 0) {
        ImmutableList.Builder<Path> pathBuilder = new ImmutableList.Builder<Path>();
        Iterator<Path> iterator = Iterators.limit(candidateSet.iterator(), LOGS_PER_ITERATION);
        while (iterator.hasNext()) {
            pathBuilder.add(iterator.next());
            iterator.remove();
        }
        LOG.info("Queueing Parse");
        queue(fs, conf, pathBuilder.build());
        LOG.info("Queued Parse");

        // in test mode, queue only a single segment's worth of data 
        if (conf.getBoolean(CONF_PARAM_TEST_MODE, false)) {
            LOG.info("Test Mode - Queueing only a single Item");
            break;
        }
    }

    // queue shutdown items 
    for (int i = 0; i < maxSimultaneousJobs; ++i) {
        _queue.put(new QueueItem());
    }
}