Example usage for java.util Set equals

List of usage examples for java.util Set equals

Introduction

In this page you can find the example usage for java.util Set equals.

Prototype

boolean equals(Object o);

Source Link

Document

Compares the specified object with this set for equality.

Usage

From source file:org.apache.geode.internal.cache.partitioned.PersistentPartitionedRegionTestBase.java

protected void waitForBucketRecovery(final VM vm2, final Set<Integer> lostBuckets, final String regionName) {
    vm2.invoke(new SerializableRunnable() {
        public void run() {
            Cache cache = getCache();//  w ww .  j  ava 2s.c  o  m
            PartitionedRegion region = (PartitionedRegion) cache.getRegion(regionName);
            final PartitionedRegionDataStore dataStore = region.getDataStore();
            Wait.waitForCriterion(new WaitCriterion() {

                public boolean done() {
                    Set<Integer> vm2Buckets = dataStore.getAllLocalBucketIds();
                    return lostBuckets.equals(vm2Buckets);
                }

                public String description() {
                    return "expected to recover " + lostBuckets + " buckets, now have "
                            + dataStore.getAllLocalBucketIds();
                }
            }, MAX_WAIT, 100, true);
        }
    });
}

From source file:com.google.gwt.emultest.java.util.TreeSetTest.java

/**
 * Test method for 'java.util.Object.equals(Object)'.
 *
 * @see java.util.Set#equals(Object)/*  w  w  w .jav a  2  s  .  co m*/
 */
public void testEquals() {
    Set<E> set0 = createSet();
    Set<E> set1 = createSet();
    assertTrue(set0.equals(set1));
    set0.add(getKeys()[0]);
    set1.add(getKeys()[0]);
    assertTrue(set0.equals(set0));
    assertTrue(set0.equals(set1));
    set0.add(getKeys()[1]);
    assertFalse(set0.equals(set1));
}

From source file:org.bibsonomy.webapp.controller.actions.BatchEditController.java

@Override
public View workOn(final BatchEditCommand command) {
    final RequestWrapperContext context = command.getContext();

    /*//from www.  j  a v a  2s  . c  om
     * We store the referer in the command, to send the user back to the 
     * page he's coming from at the end of the posting process. 
     */
    if (!present(command.getReferer())) {
        command.setReferer(requestLogic.getReferer());
    }

    /*
     * check if user is logged in
     */
    if (!context.isUserLoggedIn()) {
        throw new AccessDeniedNoticeException("please log in", "error.general.login");
    }

    /*
     * check if ckey is valid
     */
    if (!context.isValidCkey()) {
        errors.reject("error.field.valid.ckey");
        return Views.ERROR;
    }

    /*
     * get user name
     */
    final String loginUserName = context.getLoginUser().getName();

    log.debug("batch edit for user " + loginUserName + " started");

    /* *******************************************************
     * FIRST: determine some flags which control the operation
     * *******************************************************/
    /*
     * the type of resource we're dealing with 
     */
    final Set<Class<? extends Resource>> resourceTypes = command.getResourcetype();
    boolean postsArePublications = false;
    Class<? extends Resource> resourceClass = null;
    if (resourceTypes.size() == 1) {
        postsArePublications = resourceTypes.contains(BibTex.class);
        resourceClass = resourceTypes.iterator().next();
    } else {
        // TODO: exception 
        throw new IllegalArgumentException("please provide a resource type");
    }

    /*
     * FIXME: rename/check setting of that flag in the command
     */
    final boolean flagMeansDelete = command.getDeleteCheckedPosts();
    /*
     * When the user can flag posts to be deleted, this means those
     * posts already exist. Thus, all other posts must be updated.
     * 
     * The other setting is, where the posts don't exist in the database
     * (only in the session) and where they must be stored.
     */
    final boolean updatePosts = flagMeansDelete;

    log.debug("resourceType: " + resourceTypes + ", delete: " + flagMeansDelete + ", update: " + updatePosts);

    /* *******************************************************
     * SECOND: get the data we're working on
     * *******************************************************/
    /*
     * posts that are flagged are either deleted or ignored 
     */
    final Map<String, Boolean> postFlags = command.getDelete();
    /*
     * put the posts from the session into a hash map (for faster access)
     */
    final Map<String, Post<? extends Resource>> postMap = getPostMap(updatePosts);
    /*
     * the tags that should be added to all posts
     */
    final Set<Tag> addTags = getAddTags(command.getTags());
    /*
     * for each post we have its old tags and its new tags
     */
    final Map<String, String> newTagsMap = command.getNewTags();
    final Map<String, String> oldTagsMap = command.getOldTags();

    log.debug("#postFlags: " + postFlags.size() + ", #postMap: " + postMap.size() + ", #addTags: "
            + addTags.size() + ", #newTags: " + newTagsMap.size() + ", #oldTags: " + oldTagsMap.size());

    /* *******************************************************
     * THIRD: initialize temporary variables (lists)
     * *******************************************************/
    /*
     * create lists for the different types of actions 
     */
    final List<String> postsToDelete = new LinkedList<String>(); // delete
    final List<Post<?>> postsToUpdate = new LinkedList<Post<?>>(); // update/store
    /*
     * All posts will get the same date.
     */
    final Date now = new Date();

    /* *******************************************************
     * FOURTH: prepare the posts
     * *******************************************************/
    /*
     * loop through all hashes and check for each post, what to do
     */
    for (final String intraHash : newTagsMap.keySet()) {
        log.debug("working on post " + intraHash);
        /*
         * short check if hash is correct
         */
        if (intraHash.length() != HASH_LENGTH) {
            continue;
        }
        /*
         * has this post been flagged by the user? 
         */
        if (postFlags.containsKey(intraHash) && postFlags.get(intraHash)) {
            log.debug("post has been flagged");
            /*
             * The post has been flagged by the user.
             * Depending on the meaning of this flag, we add the 
             * post to the list of posts to be deleted or just
             * ignore it.
             */
            if (flagMeansDelete) {
                /*
                 * flagged posts should be deleted, i.e., add them
                 * to the list of posts to be deleted and work on 
                 * the next post.
                 */
                postsToDelete.add(intraHash);
            }
            /*
             * flagMeansDelete = true:  delete the post
             * flagMeansDelete = false: ignore the post (neither save nor update it)
             */
            continue;
        }
        /*
         * We must store/update the post, thus we parse and check its tags
         */
        try {
            final Set<Tag> oldTags = TagUtils.parse(oldTagsMap.get(intraHash));
            final Set<Tag> newTags = TagUtils.parse(newTagsMap.get(intraHash));
            /*
             * we add all global tags to the set of new tags
             */
            newTags.addAll(getTagsCopy(addTags));
            /*
             * if we want to update the posts, we only need to update posts
             * where the tags have changed
             */
            if (updatePosts && oldTags.equals(newTags)) {
                /*
                 * tags haven't changed, nothing to do
                 */
                continue;
            }
            /*
             * For the create/update methods we need a post -> 
             * create/get one.
             */
            final Post<?> post;
            if (updatePosts) {
                /*
                 * we need only a "mock" posts containing the hash, the date
                 * and the tags, since only the post's tags are updated 
                 */
                final Post<Resource> postR = new Post<Resource>();
                postR.setResource(RESOURCE_FACTORY.createResource(resourceClass));
                postR.getResource().setIntraHash(intraHash);
                post = postR;
            } else {
                /*
                 * we get the complete post from the session, and store
                 * it in the database
                 */
                post = postMap.get(intraHash);
            }
            /*
             * Finally, add the post to the list of posts that should 
             * be stored or updated.
             */
            if (!present(post)) {
                log.warn("post with hash " + intraHash + " not found for user " + loginUserName
                        + " while updating tags");
            } else {
                /*
                 * set the date and the tags for this post 
                 * (everything else should already be set or not be changed)
                 */
                post.setDate(now);
                post.setTags(newTags);
                postsToUpdate.add(post);
            }

        } catch (final RecognitionException ex) {
            log.debug("can't parse tags of resource " + intraHash + " for user " + loginUserName, ex);
        }
    }

    /* *******************************************************
     * FIFTH: update the database
     * *******************************************************/
    /*
     * delete posts
     */
    if (present(postsToDelete)) {
        log.debug("deleting " + postsToDelete.size() + " posts for user " + loginUserName);
        try {
            this.logic.deletePosts(loginUserName, postsToDelete);
        } catch (final IllegalStateException e) {
            // ignore - posts were already deleted
        }
    }

    /*
     * after update/store contains all posts with errors, to show them the user for correction
     */
    final List<Post<? extends Resource>> postsWithErrors = new LinkedList<Post<? extends Resource>>();
    /*
     * We need to add the list command already here, otherwise we get an 
     * org.springframework.beans.InvalidPropertyException
     */
    addPostListToCommand(command, postsArePublications, postsWithErrors);

    /*
     * update/store posts
     */
    if (updatePosts) {
        log.debug("updating " + postsToUpdate.size() + " posts for user " + loginUserName);
        updatePosts(postsToUpdate, resourceClass, postMap, postsWithErrors, PostUpdateOperation.UPDATE_TAGS,
                loginUserName);
    } else {
        log.debug("storing " + postsToUpdate.size() + " posts for user " + loginUserName);
        storePosts(postsToUpdate, resourceClass, postMap, postsWithErrors, command.isOverwrite(),
                loginUserName);
    }

    log.debug("finished batch edit for user " + loginUserName);

    /* *******************************************************
     * SIXTH: return to view
     * *******************************************************/
    /*
     * handle AJAX requests
     */
    if ("ajax".equals(command.getFormat())) {
        return Views.AJAX_EDITTAGS;
    }

    /*
     * return to batch edit view on errors
     */
    if (errors.hasErrors()) {
        if (postsArePublications) {
            return Views.BATCHEDITBIB;
        }
        return Views.BATCHEDITURL;
    }

    /*
     * return to the page the user was initially coming from
     */
    return this.getFinalRedirect(command.getReferer(), loginUserName);
}

From source file:com.clican.pluto.common.support.spring.BeanPropertyRowMapper.java

/**
 * Extract the values for all columns in the current row.
 * <p>/* w w  w. j a v a 2  s.  c  om*/
 * Utilizes public setters and result set metadata.
 * 
 * @see java.sql.ResultSetMetaData
 */
public Object mapRow(ResultSet rs, int rowNumber) throws SQLException {
    Assert.state(this.mappedClass != null, "Mapped class was not specified");
    Object mappedObject = BeanUtils.instantiateClass(this.mappedClass);
    BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(mappedObject);
    initBeanWrapper(bw);

    ResultSetMetaData rsmd = rs.getMetaData();
    int columnCount = rsmd.getColumnCount();
    Set<String> populatedProperties = (isCheckFullyPopulated() ? new HashSet<String>() : null);

    for (int index = 1; index <= columnCount; index++) {
        String column = JdbcUtils.lookupColumnName(rsmd, index).toLowerCase();
        PropertyDescriptor pd = (PropertyDescriptor) this.mappedFields.get(column);
        if (pd != null) {
            try {
                Object value = getColumnValue(rs, index, pd);
                if (logger.isDebugEnabled() && rowNumber == 0) {
                    logger.debug("Mapping column '" + column + "' to property '" + pd.getName() + "' of type "
                            + pd.getPropertyType());
                }
                bw.setPropertyValue(pd.getName(), value);
                if (populatedProperties != null) {
                    populatedProperties.add(pd.getName());
                }
            } catch (NotWritablePropertyException ex) {
                throw new DataRetrievalFailureException(
                        "Unable to map column " + column + " to property " + pd.getName(), ex);
            }
        }
    }

    if (populatedProperties != null && !populatedProperties.equals(this.mappedProperties)) {
        throw new InvalidDataAccessApiUsageException("Given ResultSet does not contain all fields "
                + "necessary to populate object of class [" + this.mappedClass + "]: " + this.mappedProperties);
    }

    return mappedObject;
}

From source file:ponzu.impl.test.Verify.java

public static void assertSetsEqual(String setName, Set<?> expectedSet, Set<?> actualSet) {
    try {/*from  w w  w. java  2 s  .  c o m*/
        if (null == expectedSet) {
            Assert.assertNull(setName + " should be null", actualSet);
            return;
        }

        assertObjectNotNull(setName, actualSet);
        assertSize(setName, expectedSet.size(), actualSet);

        if (!actualSet.equals(expectedSet)) {
            MutableSet<?> inExpectedOnlySet = UnifiedSet.newSet(expectedSet);
            inExpectedOnlySet.removeAll(actualSet);

            int numberDifferences = inExpectedOnlySet.size();
            String message = setName + ": " + numberDifferences + " elements different.";

            if (numberDifferences > MAX_DIFFERENCES) {
                Assert.fail(message);
            }

            MutableSet<?> inActualOnlySet = UnifiedSet.newSet(actualSet);
            inActualOnlySet.removeAll(expectedSet);

            junit.framework.Assert.failNotEquals(message, inExpectedOnlySet, inActualOnlySet);
        }
    } catch (AssertionError e) {
        throwMangledException(e);
    }
}

From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java

private void checkSearchedMembers(Set<String> pids, Map<String, Item> memberSearchMap) throws RestException {
    if (!pids.equals(memberSearchMap.keySet())) {
        HashSet<String> notMembers = new HashSet<String>(pids);
        notMembers.removeAll(memberSearchMap.keySet());
        HashSet<String> missingPids = new HashSet<String>(memberSearchMap.keySet());
        missingPids.removeAll(pids);//  w  w  w.j  a  v a 2 s . c o  m
        throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PID,
                "Not member PIDs: " + notMembers.toString() + "\nMissing PIDs: " + missingPids.toString());
    }
}

From source file:fr.inria.soctrace.framesoc.ui.piechart.view.StatisticsPieChartView.java

private boolean areListsEqual(List<Object> l1, List<Object> l2) {
    if (l1 == null) {
        return l2 == null;
    }/*from w  ww  . j a  va 2  s .  co m*/
    if (l2 == null) {
        return l1 == null;
    }
    Set<Object> s1 = new HashSet<>(l1);
    Set<Object> s2 = new HashSet<>(l2);
    return s1.equals(s2);
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestStandbySafeModeImpl.java

@Test
public void testRandomReports() throws Exception {
    setUp("testRandomReports");
    int totalNodes = 10;
    List<DatanodeID> datanodes = new ArrayList<DatanodeID>();
    for (int i = 0; i < totalNodes; i++) {
        DatanodeID node = generateRandomDatanodeID();
        datanodes.add(node);//w w  w . j  a  va  2 s  . c  o m
    }

    assertFalse(safeMode.canLeave());
    safeMode.setSafeModeStateForTesting(SafeModeState.FAILOVER_IN_PROGRESS);

    Set<DatanodeID> expectedR = new HashSet<DatanodeID>();
    Set<DatanodeID> expectedH = new HashSet<DatanodeID>();
    for (DatanodeID node : datanodes) {
        // Add live node.
        if (random.nextBoolean()) {
            safeMode.addLiveNodeForTesting(node);
            expectedH.add(node);
        }

        // Report heartbeat.
        if (random.nextBoolean()) {
            int times = 1; // random.nextInt(3);
            for (int i = 0; i < times; i++) {
                safeMode.reportHeartBeat(node);
                expectedR.add(node);
                expectedH.remove(node);
            }
        }
        // Report primaryClear.
        if (random.nextBoolean()) {
            int times = 1;// random.nextInt(3);
            for (int i = 0; i < times; i++) {
                safeMode.reportPrimaryCleared(node);
                expectedR.remove(node);
            }
        }
    }

    LOG.info("expected : " + expectedR.size() + " actual : " + safeMode.getOutStandingReports().size());
    LOG.info("expected : " + expectedH.size() + " actual : " + safeMode.getOutStandingHeartbeats().size());
    assertTrue(expectedR.equals(safeMode.getOutStandingReports()));
    assertTrue(expectedH.equals(safeMode.getOutStandingHeartbeats()));
    if (expectedR.size() == 0 && expectedH.size() == 0) {
        assertTrue(safeMode.canLeave());
    } else {
        assertFalse(safeMode.canLeave());
    }
}

From source file:co.cask.cdap.internal.app.services.http.handlers.ProgramLifecycleHttpHandlerTest.java

@Test
public void testServiceSpecification() throws Exception {
    deploy(AppWithServices.class);
    HttpResponse response = doGet("/v3/namespaces/default/apps/AppWithServices/services/NoOpService");
    Assert.assertEquals(200, response.getStatusLine().getStatusCode());

    Set<ServiceHttpEndpoint> expectedEndpoints = ImmutableSet.of(new ServiceHttpEndpoint("GET", "/ping"),
            new ServiceHttpEndpoint("POST", "/multi"), new ServiceHttpEndpoint("GET", "/multi"),
            new ServiceHttpEndpoint("GET", "/multi/ping"));

    GsonBuilder gsonBuilder = new GsonBuilder();
    gsonBuilder.registerTypeAdapter(ServiceSpecification.class, new ServiceSpecificationCodec());
    Gson gson = gsonBuilder.create();/*from   w  ww. j av  a  2  s  .c om*/
    ServiceSpecification specification = readResponse(response, ServiceSpecification.class, gson);

    Set<ServiceHttpEndpoint> returnedEndpoints = new HashSet<>();
    for (HttpServiceHandlerSpecification httpServiceHandlerSpecification : specification.getHandlers()
            .values()) {
        returnedEndpoints.addAll(httpServiceHandlerSpecification.getEndpoints());
    }

    Assert.assertEquals("NoOpService", specification.getName());
    Assert.assertTrue(returnedEndpoints.equals(expectedEndpoints));
}

From source file:org.apache.bookkeeper.bookie.CookieTest.java

/**
 * Test that if a directory is added to an existing bookie, and
 * allowStorageExpansion option is true, the bookie should come online.
 */// w ww .j  a  v a 2  s  .co m
@Test(timeout = 60000)
public void testStorageExpansionOption() throws Exception {
    String ledgerDir0 = newDirectory();
    String indexDir0 = newDirectory();
    String journalDir = newDirectory();
    ServerConfiguration conf = TestBKConfiguration.newServerConfiguration()
            .setZkServers(zkUtil.getZooKeeperConnectString()).setJournalDirName(journalDir)
            .setLedgerDirNames(new String[] { ledgerDir0 }).setIndexDirName(new String[] { indexDir0 })
            .setBookiePort(bookiePort).setAllowStorageExpansion(true);

    Bookie b = new Bookie(conf); // should work fine
    b.start();
    b.shutdown();
    b = null;

    // add a few additional ledger dirs
    String[] lPaths = new String[] { ledgerDir0, newDirectory(), newDirectory() };
    Set<String> configuredLedgerDirs = Sets.newHashSet(lPaths);
    conf.setLedgerDirNames(lPaths);

    // add an extra index dir
    String[] iPaths = new String[] { indexDir0, newDirectory() };
    Set<String> configuredIndexDirs = Sets.newHashSet(iPaths);
    conf.setIndexDirName(iPaths);

    try {
        b = new Bookie(conf);
    } catch (BookieException.InvalidCookieException ice) {
        fail("Should have been able to start the bookie");
    }

    List<File> l = b.getLedgerDirsManager().getAllLedgerDirs();
    HashSet<String> bookieLedgerDirs = Sets.newHashSet();
    for (File f : l) {
        // Using the parent path because the bookie creates a 'current'
        // dir under the ledger dir user provides
        bookieLedgerDirs.add(f.getParent());
    }
    assertTrue("Configured ledger dirs: " + configuredLedgerDirs + " doesn't match bookie's ledger dirs: "
            + bookieLedgerDirs, configuredLedgerDirs.equals(bookieLedgerDirs));

    l = b.getIndexDirsManager().getAllLedgerDirs();
    HashSet<String> bookieIndexDirs = Sets.newHashSet();
    for (File f : l) {
        bookieIndexDirs.add(f.getParent());
    }
    assertTrue("Configured Index dirs: " + configuredIndexDirs + " doesn't match bookie's index dirs: "
            + bookieIndexDirs, configuredIndexDirs.equals(bookieIndexDirs));

    b.shutdown();

    // Make sure that substituting an older ledger directory
    // is not allowed.
    String[] lPaths2 = new String[] { lPaths[0], lPaths[1], newDirectory() };
    conf.setLedgerDirNames(lPaths2);
    try {
        b = new Bookie(conf);
        fail("Should not have been able to start the bookie");
    } catch (BookieException.InvalidCookieException ice) {
        // correct behavior
    }

    // Finally make sure that not including the older ledger directories
    // is not allowed. Remove one of the older ledger dirs
    lPaths2 = new String[] { lPaths[0], lPaths[1] };
    conf.setLedgerDirNames(lPaths2);
    try {
        b = new Bookie(conf);
        fail("Should not have been able to start the bookie");
    } catch (BookieException.InvalidCookieException ice) {
        // correct behavior
    }
}