Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.sfs.whichdoctor.dao.VoteDAOImpl.java

/**
 * Loads an array of groups based on the submitted voteNumber and year.
 *
 * @param voteNumber the vote number/*from www  .  j  av  a  2 s . c  o m*/
 * @param year the year
 *
 * @return the collection< group bean>
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
@SuppressWarnings("unchecked")
public final Collection<GroupBean> findElections(final int voteNumber, final int year)
        throws WhichDoctorDaoException {

    if (year == 0) {
        throw new WhichDoctorDaoException("Sorry a valid year is required");
    }

    Collection<GroupBean> elections = new ArrayList<GroupBean>();

    dataLogger.info("Loading elections for: " + voteNumber + "/" + year);

    /* Identify the referenceGUID from the vote number and year */
    int referenceGUID = PersonBean.getVoterGUID(voteNumber, year);

    TreeMap<Integer, Integer> electionList = new TreeMap<Integer, Integer>();

    try {
        Collection<Integer> guids = this.getJdbcTemplateReader().query(
                this.getSQL().getValue("vote/findPossibleElections"), new Object[] { year, referenceGUID },
                new RowMapper() {
                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        return rs.getInt("GUID");
                    }
                });

        for (Integer guid : guids) {
            electionList.put(guid, guid);
        }
    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for this search: " + ie.getMessage());
    }

    try {
        Collection<Integer> guids = this.getJdbcTemplateReader().query(
                this.getSQL().getValue("vote/findVotedElections"), new Object[] { year, referenceGUID },
                new RowMapper() {
                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        return rs.getInt("GUID");
                    }
                });

        for (Integer guid : guids) {
            if (electionList.containsKey(guid)) {
                // This election has been voted for already, remove it from the list
                electionList.remove(guid);
            }
        }
    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for this search: " + ie.getMessage());
    }

    if (electionList.size() > 0) {
        // An unvoted election exists in the map, perform a group search to load it
        Collection<Object> guidCollection = new ArrayList<Object>();
        for (Integer groupGUID : electionList.keySet()) {
            guidCollection.add(groupGUID);
        }
        try {
            SearchBean search = this.searchDAO.initiate("group", null);
            search.setLimit(0);
            search.setOrderColumn("groups.Weighting");
            search.setOrderColumn2("groups.Name");
            search.setSearchArray(guidCollection, "Unvoted list of elections");

            BuilderBean loadDetails = new BuilderBean();
            loadDetails.setParameter("CANDIDATES", true);

            SearchResultsBean results = this.searchDAO.search(search, loadDetails);

            if (results != null) {
                // Add each group to the election array
                for (Object result : results.getSearchResults()) {
                    GroupBean election = (GroupBean) result;
                    elections.add(election);
                }
            }
        } catch (Exception e) {
            dataLogger.error("Error performing election search: " + e.getMessage());
            throw new WhichDoctorDaoException("Error performing election search: " + e.getMessage());
        }
    }
    return elections;
}

From source file:semlink.apps.uvig.Generator.java

/**
 * Prints an HTML TD element containing a selected word list.  The word lists
 * are constructed during the HTML generation phase (see {@link uvi.Sweeper} class).
 *
 * @param title the title for the column
 * @param list the word list to display//from  w w w .  j  av a 2s.  c  o  m
 * @see uvi.Generator#generateReferencePage()
 */
private static void printReferenceColumn(String title, String list, int whichColor) {
    String referencePage = "";
    if (list.equals("GenThemRole") || list.equals("NPThemRole"))
        referencePage = "../themroles/";
    else if (list.equals("SynRestr"))
        referencePage = "../synrestrs/";
    else if (list.equals("SelRestr"))
        referencePage = "../selrestrs/";
    else if (list.equals("Predicate"))
        referencePage = "../predicates/";
    else if (list.equals("VerbFeatures"))
        referencePage = "../verbfeatures/";

    String color = (whichColor == 1) ? "RefTable" : "RefTable2";
    Q.oh(4, "<TD width='25%' class='" + color + "'>");
    Q.oh(5, "<TABLE width='100%' cellspacing=0>");
    Q.oh(6, "<TR><TD class='RefGroup'>" + title + "</TD></TR>");
    Q.oh(6, "<TR><TD class='RefCounts' style='border-bottom: 1px black dashed;'><NOBR>"
            + refMap.getMemberCount(list) + " unique values / " + refMap.getMemberships(list)
            + " total uses</NOBR></TD></TR>");
    Q.oh(6, "<TR><TD>");
    Q.oh(7, "<BR>");
    Q.oh(7, "<UL>");

    TreeMap<String, Integer> members = refMap.getMembers(list);

    if (members == null) {
        Q.oh(8, "<LI><I>no occurrences</I></LI>");
    } else {

        int mem = 0;
        for (String member : members.keySet()) {
            Integer count = members.get(member);
            String extra = "";
            if (mem == 0) {
                extra = " uses";
            }
            String countStr = " <B>(" + count + extra + ")</B>";
            Q.oh(8, "<LI><a href=\"" + referencePage + member.replace("-", "_") + ".php\" class=VerbLinks>"
                    + member + countStr + "</a></LI>");
            mem++;
        }

    }

    Q.oh(7, "</UL>");
    Q.oh(7, "<BR>");
    Q.oh(6, "</TD></TR>");
    Q.oh(5, "</TABLE>");
    Q.oh(4, "</TD>");
}

From source file:hydrograph.ui.dataviewer.filter.FilterConditionsDialog.java

/**
 * Redraws the table in order to add or delete the grouping columns 
 * @param tableViewer//from   w w  w .j  a v a  2 s.c  om
 * @param conditionsList
 * @param btnAddGrp
 * @param groupSelectionMap
 * @param isRemote
 */
public void redrawAllColumns(TableViewer tableViewer, List<Condition> conditionsList, Button btnAddGrp,
        TreeMap<Integer, List<List<Integer>>> groupSelectionMap, boolean isRemote) {

    TableViewerColumn addButtonTableViewerColumn = createTableColumns(tableViewer, "", 28);
    addButtonTableViewerColumn
            .setLabelProvider(getAddButtonCellProvider(tableViewer, conditionsList, groupSelectionMap));

    TableViewerColumn removeButtonTableViewerColumn = createTableColumns(tableViewer, "", 28);
    removeButtonTableViewerColumn.setLabelProvider(
            getRemoveButtonCellProvider(tableViewer, conditionsList, btnAddGrp, groupSelectionMap, isRemote));

    TableViewerColumn groupButtonTableViewerColumn = createTableColumns(tableViewer, "", 40);
    groupButtonTableViewerColumn
            .setLabelProvider(getGroupCheckCellProvider(tableViewer, conditionsList, btnAddGrp));

    for (int key : groupSelectionMap.keySet()) {

        TableViewerColumn dummyTableViewerColumn = createTableColumns(tableViewer, "", 20);
        dummyTableViewerColumn
                .setLabelProvider(getDummyColumn(tableViewer, conditionsList, key, groupSelectionMap));
    }

    TableViewerColumn relationalDropDownColumn = createTableColumns(tableViewer, Messages.RELATIONAL_OPERATOR,
            120);
    relationalDropDownColumn.setLabelProvider(getRelationalCellProvider(tableViewer, conditionsList, isRemote));

    TableViewerColumn fieldNameDropDownColumn = createTableColumns(tableViewer, Messages.FIELD_NAME, 150);
    fieldNameDropDownColumn.setLabelProvider(getFieldNameCellProvider(tableViewer, conditionsList, isRemote));

    TableViewerColumn conditionalDropDownColumn = createTableColumns(tableViewer, Messages.CONDITIONAL_OPERATOR,
            130);
    conditionalDropDownColumn
            .setLabelProvider(getConditionalCellProvider(tableViewer, conditionsList, isRemote));

    TableViewerColumn value1TextBoxColumn = createTableColumns(tableViewer, Messages.VALUE1, 150);
    value1TextBoxColumn.setLabelProvider(getValue1CellProvider(tableViewer, conditionsList, isRemote));

    TableViewerColumn valueTextBoxValue2Column = createTableColumns(tableViewer, Messages.VALUE2, 150);
    valueTextBoxValue2Column.setLabelProvider(getValue2CellProvider(tableViewer, conditionsList, isRemote));

    btnAddGrp.setEnabled(false);

    tableViewer.refresh();
}

From source file:com.espertech.esper.rowregex.EventRowRegexNFAView.java

private List<RegexNFAStateEntry> rankEndStatesWithinPartitionByStart(List<RegexNFAStateEntry> endStates) {
    if (endStates.isEmpty()) {
        return endStates;
    }/*w  w  w. j a va2  s  .  co m*/
    if (endStates.size() == 1) {
        return endStates;
    }

    TreeMap<Integer, Object> endStatesPerBeginEvent = new TreeMap<Integer, Object>();
    for (RegexNFAStateEntry entry : endStates) {
        Integer endNum = entry.getMatchBeginEventSeqNo();
        Object value = endStatesPerBeginEvent.get(endNum);
        if (value == null) {
            endStatesPerBeginEvent.put(endNum, entry);
        } else if (value instanceof List) {
            List<RegexNFAStateEntry> entries = (List<RegexNFAStateEntry>) value;
            entries.add(entry);
        } else {
            List<RegexNFAStateEntry> entries = new ArrayList<RegexNFAStateEntry>();
            entries.add((RegexNFAStateEntry) value);
            entries.add(entry);
            endStatesPerBeginEvent.put(endNum, entries);
        }
    }

    if (endStatesPerBeginEvent.size() == 1) {
        List<RegexNFAStateEntry> endStatesUnranked = (List<RegexNFAStateEntry>) endStatesPerBeginEvent.values()
                .iterator().next();
        if (matchRecognizeSpec.isAllMatches()) {
            return endStatesUnranked;
        }
        RegexNFAStateEntry chosen = rankEndStates(endStatesUnranked);
        return Collections.singletonList(chosen);
    }

    List<RegexNFAStateEntry> endStatesRanked = new ArrayList<RegexNFAStateEntry>();
    Set<Integer> keyset = endStatesPerBeginEvent.keySet();
    Integer[] keys = keyset.toArray(new Integer[keyset.size()]);
    for (Integer key : keys) {
        Object value = endStatesPerBeginEvent.remove(key);
        if (value == null) {
            continue;
        }

        RegexNFAStateEntry entryTaken;
        if (value instanceof List) {
            List<RegexNFAStateEntry> endStatesUnranked = (List<RegexNFAStateEntry>) value;
            if (endStatesUnranked.isEmpty()) {
                continue;
            }
            entryTaken = rankEndStates(endStatesUnranked);

            if (matchRecognizeSpec.isAllMatches()) {
                endStatesRanked.addAll(endStatesUnranked); // we take all matches and don't rank except to determine skip-past
            } else {
                endStatesRanked.add(entryTaken);
            }
        } else {
            entryTaken = (RegexNFAStateEntry) value;
            endStatesRanked.add(entryTaken);
        }
        // could be null as removals take place

        if (entryTaken != null) {
            if (matchRecognizeSpec.getSkip().getSkip() == MatchRecognizeSkipEnum.PAST_LAST_ROW) {
                int skipPastRow = entryTaken.getMatchEndEventSeqNo();
                removeSkippedEndStates(endStatesPerBeginEvent, skipPastRow);
            } else if (matchRecognizeSpec.getSkip().getSkip() == MatchRecognizeSkipEnum.TO_NEXT_ROW) {
                int skipPastRow = entryTaken.getMatchBeginEventSeqNo();
                removeSkippedEndStates(endStatesPerBeginEvent, skipPastRow);
            }
        }
    }

    return endStatesRanked;
}

From source file:org.esa.nest.gpf.SliceAssemblyOp.java

private Product[] determineSliceProducts() throws Exception {
    if (sourceProducts.length < 2) {
        throw new Exception("Slice assembly requires at least two consecutive slice products");
    }/*  www .j a v  a2  s .co  m*/

    final TreeMap<Integer, Product> productSet = new TreeMap<>();
    for (Product srcProduct : sourceProducts) {
        final MetadataElement origMetaRoot = AbstractMetadata.getOriginalProductMetadata(srcProduct);
        final MetadataElement generalProductInformation = getGeneralProductInformation(origMetaRoot);
        if (!isSliceProduct(generalProductInformation)) {
            throw new Exception(srcProduct.getName() + " is not a slice product");
        }

        final int totalSlices = generalProductInformation.getAttributeInt("totalSlices");
        final int sliceNumber = generalProductInformation.getAttributeInt("sliceNumber");
        //System.out.println("SliceAssemblyOp.determineSliceProducts: totalSlices = " + totalSlices + "; slice product name = " + srcProduct.getName() + "; prod type = " + srcProduct.getProductType() + "; sliceNumber = " + sliceNumber);

        productSet.put(sliceNumber, srcProduct);
    }

    //check if consecutive
    Integer prev = productSet.firstKey();
    // Note that "The set's iterator returns the keys in ascending order".
    for (Integer i : productSet.keySet()) {
        if (!i.equals(prev)) {
            if (!prev.equals(i - 1)) {
                throw new Exception("Products are not consecutive slices");
            }
            prev = i;
        }
    }

    // Note that "If productSet makes any guarantees as to what order its elements
    // are returned by its iterator, toArray() must return the elements in
    // the same order".
    return productSet.values().toArray(new Product[productSet.size()]);
}

From source file:org.sakaiproject.tool.assessment.ui.bean.author.AssessmentSettingsBean.java

/**
 * Returns all groups for site/*from  w w  w  . java2 s.  c  om*/
 * @return
 */
public SelectItem[] getGroupsForSite() {
    SelectItem[] groupSelectItems = new SelectItem[0];
    TreeMap sortedSelectItems = new TreeMap();
    Site site = null;
    try {
        site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext());
        Collection groups = site.getGroups();
        if (groups != null && groups.size() > 0) {
            groupSelectItems = new SelectItem[groups.size()];
            Iterator groupIter = groups.iterator();
            while (groupIter.hasNext()) {
                Group group = (Group) groupIter.next();
                String title = group.getTitle();
                String groupId = group.getId();
                String uniqueTitle = title + groupId;
                sortedSelectItems.put(uniqueTitle.toUpperCase(), new SelectItem(group.getId(), title));
            }
            Set keySet = sortedSelectItems.keySet();
            groupIter = keySet.iterator();
            int i = 0;
            while (groupIter.hasNext()) {
                groupSelectItems[i++] = (SelectItem) sortedSelectItems.get(groupIter.next());
            }
        }
    } catch (IdUnusedException ex) {
        // No site available
    }
    return groupSelectItems;
}

From source file:org.commoncrawl.service.listcrawler.CrawlList.java

private static void validateListCode(final File dataDirectory, long listId) throws IOException {

    final String urlList[] = new String[] { "http://www.yahoo.com/1", "http://www.google.com/1",
            "http://www.cnn.com/1", "http://www.yahoo.com/2", "http://www.google.com/2",
            "http://www.cnn.com/2" };

    File tempFile = File.createTempFile("CrawlList", "validateListInit");
    File localTempFile = new File(dataDirectory, tempFile.getName());

    generateTestURLFile(localTempFile, urlList);

    final TreeMap<String, URLFP> urlToFPMap = new TreeMap<String, URLFP>();
    final TreeMap<URLFP, String> urlFPToString = new TreeMap<URLFP, String>();

    for (String url : urlList) {
        URLFP fp = URLUtils.getURLFPFromURL(url, true);
        urlToFPMap.put(url, fp);//from  w w w. j  av  a  2  s. c  om
        urlFPToString.put(fp, url);
    }

    final TreeMap<URLFP, ProxyCrawlHistoryItem> itemsToMarkComplete = new TreeMap<URLFP, ProxyCrawlHistoryItem>();

    ProxyCrawlHistoryItem item1 = new ProxyCrawlHistoryItem();

    item1.setCrawlStatus(CrawlURL.FailureReason.RobotsExcluded);
    item1.setOriginalURL(urlList[1]);

    ProxyCrawlHistoryItem item2 = new ProxyCrawlHistoryItem();

    item2.setCrawlStatus(0);
    item2.setOriginalURL(urlList[3]);
    item2.setHttpResultCode(301);
    item2.setRedirectURL("http://www.yahoo.com/3");
    item2.setRedirectStatus(0);
    item2.setRedirectHttpResult(200);

    ProxyCrawlHistoryItem item3 = new ProxyCrawlHistoryItem();

    item3.setCrawlStatus(0);
    item3.setOriginalURL(urlList[4]);
    item3.setHttpResultCode(301);
    item3.setRedirectURL("http://www.google.com/3");
    item3.setRedirectStatus(CrawlURL.FailureReason.IOException);

    itemsToMarkComplete.put(urlToFPMap.get(item1.getOriginalURL()), item1);
    itemsToMarkComplete.put(urlToFPMap.get(item2.getOriginalURL()), item2);
    itemsToMarkComplete.put(urlToFPMap.get(item3.getOriginalURL()), item3);

    final Set<URLFP> itemsToMarkCompleteFPSet = itemsToMarkComplete.keySet();
    final Set<URLFP> itemsNotMarked = new TreeSet<URLFP>(urlToFPMap.values());
    itemsNotMarked.removeAll(itemsToMarkCompleteFPSet);

    CrawlHistoryStorage storage = new CrawlHistoryStorage() {

        @Override
        public void syncList(long listId, TreeSet<URLFP> matchCriteria, ItemUpdater targetList)
                throws IOException {

            for (URLFP matchItem : matchCriteria) {
                if (itemsToMarkCompleteFPSet.contains(matchItem)) {
                    targetList.updateItemState(matchItem, itemsToMarkComplete.get(matchItem));
                }
            }
        }

        @Override
        public File getLocalDataDir() {
            return dataDirectory;
        }
    };

    CrawlList list1 = new CrawlList(storage, listId, localTempFile, 0);

    for (int pass = 0; pass < 2; ++pass) {

        CrawlList list = null;

        if (pass == 0) {
            System.out.println("Pass 0 - Initialize from URLList");
            list = list1;
        } else {
            System.out.println("Pass 1 - Initialize from OnDisk Data");
            list = new CrawlList(storage, listId);
        }

        // iterate fingerprints 
        for (URLFP fingerprint : urlToFPMap.values()) {
            ProxyCrawlHistoryItem itemRetrieved = list.getHistoryItemFromURLFP(fingerprint);
            if (itemsToMarkCompleteFPSet.contains(fingerprint)) {
                ProxyCrawlHistoryItem itemExpected = itemsToMarkComplete.get(fingerprint);
                Assert.assertTrue(itemExpected.equals(itemRetrieved));
            } else {
                Assert.assertTrue(itemRetrieved.getOriginalURL().equals(urlFPToString.get(fingerprint))
                        && !itemRetrieved.isFieldDirty(ProxyCrawlHistoryItem.Field_CRAWLSTATUS)
                        && !itemRetrieved.isFieldDirty(ProxyCrawlHistoryItem.Field_HTTPRESULTCODE)
                        && !itemRetrieved.isFieldDirty(ProxyCrawlHistoryItem.Field_REDIRECTHTTPRESULT)
                        && !itemRetrieved.isFieldDirty(ProxyCrawlHistoryItem.Field_REDIRECTSTATUS)
                        && !itemRetrieved.isFieldDirty(ProxyCrawlHistoryItem.Field_REDIRECTURL));
            }
        }
    }
    // validate string code does not update when strings have not changed 
    item3.setRedirectStatus(0);
    item3.setRedirectHttpResult(200);

    long variableDataLength = list1._variableDataFile.length();
    long fixedDataLength = list1._fixedDataFile.length();

    list1.updateItemState(urlToFPMap.get(item3.getOriginalURL()), item3);

    Assert.assertTrue(fixedDataLength == list1._fixedDataFile.length());
    Assert.assertTrue(variableDataLength == list1._variableDataFile.length());

    list1.queueUnCrawledItems(new CrawlQueueLoader() {

        @Override
        public void queueURL(URLFP urlfp, String url) {
            Assert.assertTrue(itemsNotMarked.contains(urlfp));
            Assert.assertTrue(urlFPToString.get(urlfp).equals(url));
        }

        @Override
        public void flush() {
            // TODO Auto-generated method stub

        }

    });

}

From source file:org.apache.hadoop.mapred.CoronaJobInProgress.java

/**
 * Terminate the job and all its component tasks.
 * Calling this will lead to marking the job as failed/killed. Cleanup
 * tip will be launched. If the job has not inited, it will directly call
 * terminateJob as there is no need to launch cleanup tip.
 * This method is reentrant./*from www. j a  v  a2 s  . com*/
 * @param jobTerminationState job termination state
 */
private void terminateUnprotected(int jobTerminationState) {
    this.terminated = jobTerminationState;
    if (!tasksInited) {
        //init could not be done, we just terminate directly.
        terminateJob(jobTerminationState);
        return;
    }

    if ((status.getRunState() == JobStatus.RUNNING) || (status.getRunState() == JobStatus.PREP)) {
        LOG.info("Killing job '" + this.status.getJobID() + "'");
        if (jobTerminationState == JobStatus.FAILED) {
            if (jobFailed) {//reentrant
                return;
            }
            jobFailed = true;
        } else if (jobTerminationState == JobStatus.KILLED) {
            if (jobKilled) {//reentrant
                return;
            }
            jobKilled = true;
        }
        // clear all unclean tasks
        clearUncleanTasksUnprotected();
        //
        // kill all TIPs.
        //
        for (int i = 0; i < setup.length; i++) {
            setup[i].kill();
        }
        for (int i = 0; i < maps.length; i++) {
            maps[i].kill();
            TreeMap<TaskAttemptID, String> activeTasks = maps[i].getActiveTasksCopy();
            for (TaskAttemptID attempt : activeTasks.keySet()) {
                TaskStatus status = maps[i].getTaskStatus(attempt);
                if (status != null) {
                    failedTask(maps[i], attempt, JOB_KILLED_REASON, status.getPhase(), false,
                            status.getTaskTracker(), null);
                } else {
                    failedTask(maps[i], attempt, JOB_KILLED_REASON, Phase.MAP, false, EMPTY_TRACKER_NAME, null);
                }
            }
        }
        for (int i = 0; i < reduces.length; i++) {
            reduces[i].kill();
            TreeMap<TaskAttemptID, String> activeTasks = reduces[i].getActiveTasksCopy();
            for (TaskAttemptID attempt : activeTasks.keySet()) {
                TaskStatus status = reduces[i].getTaskStatus(attempt);
                if (status != null) {
                    failedTask(reduces[i], attempt, JOB_KILLED_REASON, status.getPhase(), false,
                            status.getTaskTracker(), null);
                } else {
                    failedTask(reduces[i], attempt, JOB_KILLED_REASON, Phase.REDUCE, false, EMPTY_TRACKER_NAME,
                            null);
                }
            }
        }

        // Moved job to a terminal state if no job cleanup is needed. In case the
        // job is killed, we do not perform cleanup. This is because cleanup
        // cannot be guaranteed - the process running Corona JT could just be killed.
        if (!jobSetupCleanupNeeded || jobTerminationState == JobStatus.KILLED) {
            terminateJobUnprotected(jobTerminationState);
        }
    }
}

From source file:com.sfs.whichdoctor.dao.IsbEntityDAOImpl.java

/**
 * Check mandatory group membership./*from  w  ww  .j  ava2  s. c o m*/
 *
 * @param entityGUID the entity guid
 * @param delete the entity reference if present (otherwise create if not)
 * @param privileges the privileges
 */
private void checkMandatoryGroups(final int entityGUID, final boolean delete, final PrivilegesBean privileges) {

    if (this.mandatoryGroups != null) {

        UserBean systemUser = getSystemUser("ISB", "System");

        dataLogger.debug("Mandatory groups exist");

        for (Integer groupGUID : this.mandatoryGroups.keySet()) {

            TreeMap<String, ItemBean> items = new TreeMap<String, ItemBean>();
            String isbMapping = "";
            try {
                final GroupBean group = this.groupDAO.loadGUID(groupGUID);
                isbMapping = group.getGroupDN();
            } catch (WhichDoctorDaoException wde) {
                dataLogger.error("Error loading the parent group: " + wde.getMessage());
            }

            try {
                items = this.itemDAO.load(groupGUID, false, "Group", "Member", entityGUID, null, null);
            } catch (WhichDoctorDaoException wde) {
                dataLogger.error("Error performing search for items: " + wde.getMessage());
            }

            if (items != null && items.size() == 0 && !delete) {

                dataLogger.debug("Items do not exist and create selected");

                // No items exist and create is requested.
                final ItemBean item = new ItemBean();
                item.setObject1GUID(groupGUID);
                item.setObject2GUID(entityGUID);
                item.setWeighting(WEIGHTING);
                item.setPermission(PERMISSION);
                item.setItemType(ITEMTYPE);

                try {
                    this.itemDAO.create(item, systemUser, privileges, isbMapping);
                } catch (WhichDoctorDaoException wde) {
                    dataLogger.error("Error creating the new item: " + wde.getMessage());
                }
            }
            if (items != null && items.size() > 0 && delete) {

                // Items exist and delete is requested.
                dataLogger.debug("Items exist and delete selected");

                for (String key : items.keySet()) {
                    final ItemBean item = items.get(key);

                    try {
                        this.itemDAO.delete(item, systemUser, privileges, isbMapping);
                    } catch (WhichDoctorDaoException wde) {
                        dataLogger.error("Error deleting the existing item: " + wde.getMessage());
                    }
                }
            }
        }
    }
}

From source file:com.eucalyptus.tests.awssdk.S3ListMpuTests.java

@Test
public void delimiterAndPrefix() throws Exception {
    testInfo(this.getClass().getSimpleName() + " - delimiterAndPrefix");
    try {/*from w w w .jav a  2  s . com*/
        int numPrefixes = 3 + random.nextInt(3); // 3-5 prefixes
        int numKeys = 3 + random.nextInt(3); // 3-5 keys
        int numUploads = 3 + random.nextInt(3); // 3-5 uploads
        String delimiter = "/";
        TreeMap<String, TreeMap<String, List<String>>> prefixKeyUploadIdMap = new TreeMap<String, TreeMap<String, List<String>>>();

        print("Number of prefixes: " + numPrefixes);
        print("Number of keys per prefix: " + numKeys);
        print("Number of uploads per key: " + numUploads);

        // Generate some keys and uploads for each prefix
        for (int i = 0; i < numPrefixes; i++) {
            String prefix = UUID.randomUUID().toString().replaceAll("-", "") + delimiter;

            // Generate some mpus for keys starting with prefix
            TreeMap<String, List<String>> keyUploadIdMap = initiateMpusForMultipleKeys(s3ClientA, accountA,
                    numKeys, numUploads - 1, prefix);

            // Generate some mpus for a key that is just the prefix
            keyUploadIdMap.put(prefix, initiateMpusForKey(s3ClientA, accountA, prefix, bucketName, numUploads));

            // Put the prefix and key-uploadId into the map
            prefixKeyUploadIdMap.put(prefix, keyUploadIdMap);
        }

        // Using the delimiter verify the mpu listing for common prefixes
        MultipartUploadListing listing = listMpu(s3ClientA, accountA, bucketName, null, null, null, delimiter,
                null, false);
        assertTrue("Expected no multipart uploads but got some",
                listing.getMultipartUploads() == null || listing.getMultipartUploads().isEmpty());
        assertTrue("Expected " + numPrefixes + " common prefixes but got " + listing.getCommonPrefixes().size(),
                listing.getCommonPrefixes().size() == numPrefixes);

        Iterator<String> commonPrefixIterator = listing.getCommonPrefixes().iterator();

        for (String prefix : prefixKeyUploadIdMap.keySet()) {
            String commonPrefix = commonPrefixIterator.next();
            assertTrue("Expected common prefix to be " + prefix + ", but got " + commonPrefix,
                    StringUtils.equals(prefix, commonPrefix));
        }

        assertTrue("Expected common prefixes iterator to be empty", !commonPrefixIterator.hasNext());

        // Using both prefix and delimiter, verify that mpu listing contains only one common prefix
        for (String prefix : prefixKeyUploadIdMap.keySet()) {
            // Remove the delimiter from the prefix before listing
            listing = listMpu(s3ClientA, accountA, bucketName, null, null,
                    new String(prefix).replaceAll(delimiter, ""), delimiter, null, false);
            assertTrue("Expected 1 common prefix but got " + listing.getCommonPrefixes().size(),
                    listing.getCommonPrefixes().size() == 1);
            assertTrue(
                    "Expected common prefix to be " + prefix + ", but got "
                            + listing.getCommonPrefixes().get(0),
                    StringUtils.equals(listing.getCommonPrefixes().get(0), prefix));
            assertTrue("Expected no multipart uploads but got some",
                    listing.getMultipartUploads() == null || listing.getMultipartUploads().isEmpty());
        }
    } catch (AmazonServiceException ase) {
        printException(ase);
        assertThat(false, "Failed to run delimiterAndPrefix");
    }
}