Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:com.glaf.base.modules.branch.springmvc.BranchUserController.java

/**
 * /*  w w w . j  a  v  a  2s .  c o m*/
 * 
 * @param request
 * @param modelMap
 * @return
 */
@RequestMapping(params = "method=saveRole")
public ModelAndView saveRole(HttpServletRequest request, ModelMap modelMap) {
    logger.debug(RequestUtils.getParameterMap(request));
    ViewMessages messages = new ViewMessages();
    long userId = ParamUtil.getIntParameter(request, "user_id", 0);
    SysUser user = sysUserService.findById(userId);// 
    if (user != null && user.getDeptId() > 0) {// 
        String actorId = RequestUtils.getActorId(request);
        List<Long> nodeIds = complexUserService.getUserManageBranchNodeIds(actorId);

        SysDepartment department = sysDepartmentService.findById(user.getDeptId());
        /**
         * ???
         */
        if (department != null && department.getNodeId() > 0) {
            SysTree tree = sysTreeService.findById(department.getNodeId());
            if (tree != null && nodeIds.contains(tree.getId())) {
                long[] id = ParamUtil.getLongParameterValues(request, "id");// ???
                if (id != null) {
                    Set<SysDeptRole> delRoles = new HashSet<SysDeptRole>();
                    Set<SysDeptRole> oldRoles = user.getRoles();
                    Set<SysDeptRole> newRoles = new HashSet<SysDeptRole>();
                    for (int i = 0; i < id.length; i++) {
                        logger.debug("id[" + i + "]=" + id[i]);
                        SysDeptRole role = sysDeptRoleService.findById(id[i]);// 
                        if (role != null) {
                            newRoles.add(role);// 
                        }
                    }

                    oldRoles.retainAll(newRoles);// ??
                    delRoles.removeAll(newRoles);// ??
                    newRoles.removeAll(oldRoles);// ??
                    user.setUpdateBy(RequestUtils.getActorId(request));

                    if (sysUserService.updateRole(user, delRoles, newRoles)) {// ??
                        messages.add(ViewMessages.GLOBAL_MESSAGE, new ViewMessage("user.role_success"));
                    } else {// ?
                        messages.add(ViewMessages.GLOBAL_MESSAGE, new ViewMessage("user.role_failure"));
                    }
                }
            }
        }
    }
    MessageUtils.addMessages(request, messages);
    return new ModelAndView("show_msg", modelMap);
}

From source file:org.chromium.chrome.browser.payments.PaymentRequestImpl.java

/** @return The first modifier that matches the given instrument, or null. */
@Nullable/*w w w  .  java 2s .  c  o  m*/
private PaymentDetailsModifier getModifier(@Nullable PaymentInstrument instrument) {
    if (mModifiers == null || instrument == null)
        return null;
    Set<String> methodNames = instrument.getInstrumentMethodNames();
    methodNames.retainAll(mModifiers.keySet());
    return methodNames.isEmpty() ? null : mModifiers.get(methodNames.iterator().next());
}

From source file:edu.jhuapl.openessence.datasource.jdbc.entry.JdbcOeDataEntrySource.java

/**
 * Executes INSERT SQL Statment using Spring's JdbcTemplate.
 *
 * @param tableName        table to insert values into
 * @param ignoreSpecialSql the flag to ignore specialSql definitions in the groovy def file. In general, set false
 *                         during add* and set true during update*
 * @param dimIds           DimensionIds that we will insert data for //todo ?? why is this needed?
 * @param editDims         editable DimensionIds that we will insert data for
 * @param values           values that correspond to the editable DimensionIds. These values get written into the
 *                         database//from  w  w  w .  j  ava  2  s  .  com
 * @return Map of the primary keys and values for the inserted record -- only for the Parent Record - children return
 *         null
 * @throws OeDataSourceAccessException if error occurs at database level
 * @throws OeDataSourceException       if error occurs during processing
 */
private Map editableInsertQuery(String tableName, boolean ignoreSpecialSql, List<String> dimIds,
        Map<String, Dimension> editDims, Map<String, Object> values)
        throws OeDataSourceAccessException, OeDataSourceException {

    List<String> generatedKeys = new ArrayList<String>();
    Set<String> tablePkIds;

    // insert on parent table
    if (tableName.equals(parentTableDetails.getTableName())) {

        // setup KeyHolder from pk_dimIds
        tablePkIds = parentTableDetails.getPks();

        // setup autogen to sqlcol map
        Map<String, Object> superEditCopy = new LinkedHashMap<String, Object>(superEditMap);
        Set<String> superEditKeys = superEditCopy.keySet();
        DualHashBidiMap bidimap = new DualHashBidiMap();
        superEditKeys.retainAll(tablePkIds);
        for (Map.Entry<String, Object> e : superEditCopy.entrySet()) {
            e.setValue(((DimensionBean) e.getValue()).getSqlCol());
            bidimap.put(e.getKey(), e.getValue());
        }

        // setup KeyHolder from pk_dimIds
        generatedKeys.addAll(tablePkIds); // NOTE: jdbc driver clears this and puts in the autoincs it finds.
        Map<String, Object> generatedKeyMap = new HashMap<String, Object>();
        for (String eachKey : generatedKeys) {
            generatedKeyMap.put(eachKey, null);
        }
        List<Map<String, Object>> keyMapList = new ArrayList<Map<String, Object>>();
        keyMapList.add(generatedKeyMap);
        KeyHolder keyHolder = new GeneratedKeyHolder(keyMapList);

        jdbcTemplate.update(new MultiTableInsertPreparedStatementCreator(tableName, ignoreSpecialSql, dimIds,
                editDims, values), keyHolder);

        Map<String, Object> keyMap = keyHolder.getKeys();

        // TODO: current implementation of getGeneratedKeys for PGSQL 8.4 returns ALL column/vals...we just want the pk's we know about
        // TODO: CHECK FOR WHAT HAPPENS WITH LOWER/UPPER CASE
        //http://archives.postgresql.org/pgsql-jdbc/2010-04/msg00061.php
        boolean isPostgreSql = isPostgreSqlDBMS();
        if (isPostgreSql) {
            // postgres' implementation of keyholder lowercases the key column
            DbKeyValMap dbkvm = new DbKeyValMap(bidimap);
            Set<String> kyids = dbkvm.keySet();
            for (String ky : kyids) {
                dbkvm.put(ky, keyMap.get(bidimap.get(ky)));
            }
            kyids.retainAll(tablePkIds);
            keyMap = dbkvm;
        }

        // -OR-
        // if table had no auto-gen keys but the INSERT suceedes, means the pks taken from the 'values' worked.
        // therefore, safe to use these as the "generated" PKs. retains the values that are designated "PK" dimensions
        //
        else if (keyMap == null || keyMap.size() == 0) {
            DbKeyValMap dbkvm = new DbKeyValMap(values);
            Set<String> kyids = dbkvm.keySet();
            kyids.retainAll(tablePkIds);
            keyMap = dbkvm;
        }

        // make sure got *ALL* pkIds/values configured in the ds def.
        List<Map> allkeys = getAllGeneratedKeys(tableName, tablePkIds, new DbKeyValMap(keyMap));

        return (allkeys.size() > 0 ? allkeys.get(0) : null);

    } else { // insert on child table.
        // don't need to know the returned PK ids & vals for children. just do typical INSERT
        jdbcTemplate.update(new MultiTableInsertPreparedStatementCreator(tableName, ignoreSpecialSql, dimIds,
                editDims, values));
        return null;
    }
}

From source file:ubic.gemma.web.services.rest.AnnotationsWebService.java

/**
 * Performs a dataset search for each given value, then intersects the results to create a final set of dataset IDs.
 *
 * @param values the values that the datasets should match.
 * @return set of IDs that satisfy all given search values.
 */// www. j  a va 2  s  .  co  m
private Collection<Long> searchEEs(List<String> values) {
    Set<Long> ids = new HashSet<>();
    boolean firstRun = true;
    for (String value : values) {
        Set<Long> valueIds = new HashSet<>();

        SearchSettings settings = new SearchSettingsImpl();
        settings.setQuery(value);
        settings.setSearchGenes(false);
        settings.setSearchPlatforms(false);
        settings.setSearchExperimentSets(false);
        settings.setSearchPhenotypes(false);
        settings.setSearchProbes(false);
        settings.setSearchGeneSets(false);
        settings.setSearchBioSequences(false);
        settings.setSearchBibrefs(false);

        Map<Class<?>, List<SearchResult>> results = searchService.search(settings, false, false);
        List<SearchResult> eeResults = results.get(ExpressionExperiment.class);

        if (eeResults == null) {
            return new HashSet<>(); // No terms found for the current term means the intersection will be empty.
        }

        // Working only with IDs
        for (SearchResult result : eeResults) {
            valueIds.add(result.getId());
        }

        // Intersecting with previous results
        if (firstRun) {
            // In the first run we keep the whole list od IDs
            ids = valueIds;
        } else {
            // Intersecting with the IDs found in the current run
            ids.retainAll(valueIds);
        }
        firstRun = false;
    }
    return ids;
}

From source file:org.codecover.eclipse.views.RedundancyGraphView.java

private final void calculatePairRedundancy(String Criterion) {
    Set<CoverableItem> coverableItemSet = createCoverableItemSet(Criterion);
    Set<CoverableItem> mainSet = new HashSet<CoverableItem>(coverableItemSet);
    if (Criterion.compareTo("Condition") != 0) {
        for (int i = 0; i < this.wholeTestCases.size(); i++) {
            TestCase currentTestCase = this.wholeTestCases.get(i);
            Set<CoverableItem> currentCoveredSet = new HashSet<CoverableItem>(
                    currentTestCase.getCoverageData().keySet());
            currentCoveredSet.retainAll(mainSet);
            double ts = currentCoveredSet.size();

            double rts;
            Double PairR = 0.0D;// w ww .j  a va 2  s  . co  m
            for (int a = 0; a < this.wholeTestCases.size(); a++) {
                if (a != i) {
                    TestCase secondTestCase = this.wholeTestCases.get(a);
                    Set<CoverableItem> secondSet = new HashSet<CoverableItem>(
                            secondTestCase.getCoverageData().keySet());
                    secondSet.retainAll(mainSet);
                    secondSet.retainAll(currentCoveredSet);
                    rts = secondSet.size();
                    PairR = 0.0D;
                    if (ts != 0.0D) {
                        PairR = rts / ts;
                    } else {
                        PairR = (0.0D / 0.0D);
                    }
                    TestCasePair tcp = new TestCasePair();
                    tcp.CurrentTest = currentTestCase.getName();
                    tcp.WithRespectTo = secondTestCase.getName();
                    if (this.selectedTestCases.contains(currentTestCase)) {
                        tcp.selected = true;
                    } else {
                        tcp.selected = false;
                    }
                    if (!PairR.isNaN()) {
                        BigDecimal bd = new BigDecimal(PairR);
                        bd = bd.setScale(2, BigDecimal.ROUND_HALF_UP);
                        PairR = bd.doubleValue();
                    }
                    this.PairRedundancy.put(tcp, PairR);
                }
            }
        }
    } else {
        Map<TestCase, Integer> totalCoverableItemCount = new HashMap<TestCase, Integer>();
        for (int i = 0; i < this.wholeTestCases.size(); i++) {
            TestCase currentTestCase = this.wholeTestCases.get(i);
            Set<CoverableItem> currentCoveredSet = new HashSet<CoverableItem>(
                    currentTestCase.getAssignmentsMap().keySet());
            currentCoveredSet.retainAll(mainSet);
            int amountFirstTestCase = 0;
            for (CoverableItem item : currentCoveredSet) {
                BooleanAssignmentMap map = currentTestCase.getAssignmentsMap().get(item);
                amountFirstTestCase += map.getEvaluatedAssignments().size();
            }

            double ts = amountFirstTestCase;
            totalCoverableItemCount.put(currentTestCase, Integer.valueOf(amountFirstTestCase));
            for (int a = 0; a < this.wholeTestCases.size(); a++) {
                if (a != i) {
                    TestCase secondTestCase = this.wholeTestCases.get(a);
                    Set<CoverableItem> secondSet = new HashSet<CoverableItem>(
                            secondTestCase.getAssignmentsMap().keySet());
                    secondSet.retainAll(mainSet);
                    secondSet.retainAll(currentCoveredSet);
                    int amountIntersection = 0;
                    for (CoverableItem item : secondSet) {
                        BooleanAssignmentMap map1 = currentTestCase.getAssignmentsMap().get(item);
                        BooleanAssignmentMap map2 = secondTestCase.getAssignmentsMap().get(item);
                        Set<BooleanAssignment> sharedAssignments = new HashSet<BooleanAssignment>(
                                map1.getEvaluatedAssignments());
                        sharedAssignments.retainAll(map2.getEvaluatedAssignments());
                        amountIntersection += sharedAssignments.size();
                    }

                    double rts = amountIntersection;
                    Double PairR = 0.0D;
                    if (ts != 0.0D) {
                        PairR = rts / ts;
                    } else {
                        PairR = (0.0D / 0.0D);
                    }
                    TestCasePair tcp = new TestCasePair();
                    tcp.CurrentTest = currentTestCase.getName();
                    tcp.WithRespectTo = secondTestCase.getName();
                    if (!PairR.isNaN()) {
                        BigDecimal bd = new BigDecimal(PairR);
                        bd = bd.setScale(2, BigDecimal.ROUND_HALF_UP);
                        PairR = bd.doubleValue();
                    }
                    this.PairRedundancy.put(tcp, PairR);
                }
            }
        }
    }
}

From source file:org.codecover.eclipse.views.RedundancyGraphView.java

private final void calculateSuiteRedundancy(String Criterion) {
    Set<CoverableItem> coverableItemSet = createCoverableItemSet(Criterion);
    Set<CoverableItem> mainSet = new HashSet<CoverableItem>(coverableItemSet);
    Set<CoverableItem> otherTestSet = new HashSet<CoverableItem>();
    if (Criterion.compareTo("Condition") != 0) {
        for (int i = 0; i < this.wholeTestCases.size(); i++) {
            TestCase currentTestCase = this.wholeTestCases.get(i);
            Set<CoverableItem> currentCoveredSet = new HashSet<CoverableItem>(
                    currentTestCase.getCoverageData().keySet());
            currentCoveredSet.retainAll(mainSet);
            int ts = currentCoveredSet.size();
            otherTestSet.clear();/*from w w  w  . j  av a2  s  . c o  m*/
            for (int a = 0; a < this.wholeTestCases.size(); a++) {
                if (a != i) {
                    TestCase secondTestCase = this.wholeTestCases.get(a);
                    if (!this.RedundantTestCases.contains(secondTestCase)) {
                        Set<CoverableItem> secondSet = new HashSet<CoverableItem>(
                                secondTestCase.getCoverageData().keySet());
                        secondSet.retainAll(mainSet);
                        otherTestSet.addAll(secondSet);
                    }
                }
            }

            currentCoveredSet.retainAll(otherTestSet);
            double rts = currentCoveredSet.size();
            Double SuiteR = 0.0D;
            if (ts != 0.0D) {
                SuiteR = rts / ts;
            } else {
                SuiteR = (0.0D / 0.0D);
            }

            if (!SuiteR.isNaN()) {
                BigDecimal bd = new BigDecimal(SuiteR);
                bd = bd.setScale(2, BigDecimal.ROUND_HALF_UP);
                SuiteR = bd.doubleValue();
            }
            if (this.RedundantTestCases.contains(currentTestCase)) {
                this.SuiteRedundancy.put(currentTestCase.getName(), 1.0);
            } else {
                this.SuiteRedundancy.put(currentTestCase.getName(), SuiteR);
            }
        }
    } else {
        for (int i = 0; i < this.wholeTestCases.size(); i++) {
            TestCase currentTestCase = this.wholeTestCases.get(i);
            Set<CoverableItem> currentCoveredSet = new HashSet<CoverableItem>(
                    currentTestCase.getAssignmentsMap().keySet());
            currentCoveredSet.retainAll(mainSet);
            int amountFirstTestCase = 0;
            for (CoverableItem item : currentCoveredSet) {
                BooleanAssignmentMap map = currentTestCase.getAssignmentsMap().get(item);
                amountFirstTestCase += map.getEvaluatedAssignments().size();
            }

            int ts = amountFirstTestCase;
            int amountIntersection = 0;
            for (CoverableItem item : currentCoveredSet) {
                BooleanAssignmentMap map1 = currentTestCase.getAssignmentsMap().get(item);
                Set<BooleanAssignment> sharedAssignments = new HashSet<BooleanAssignment>(
                        map1.getEvaluatedAssignments());
                Set<BooleanAssignment> otherAssignments = new HashSet<BooleanAssignment>();
                for (int a = 0; a < this.wholeTestCases.size(); a++) {
                    if (a != i) {
                        TestCase secondTestCase = this.wholeTestCases.get(a);
                        if (!this.RedundantTestCases.contains(secondTestCase)) {
                            Set<CoverableItem> secondSet = new HashSet<CoverableItem>(
                                    secondTestCase.getAssignmentsMap().keySet());
                            secondSet.retainAll(mainSet);
                            if (secondSet.contains(item)) {
                                BooleanAssignmentMap map2 = secondTestCase.getAssignmentsMap().get(item);
                                otherAssignments.addAll(map2.getEvaluatedAssignments());
                            }
                        }
                    }
                }

                sharedAssignments.retainAll(otherAssignments);
                amountIntersection += sharedAssignments.size();
            }

            double rts = amountIntersection;
            Double SuiteR = 0.0D;
            if (ts != 0.0D) {
                SuiteR = rts / ts;
            } else {
                SuiteR = (0.0D / 0.0D);
            }

            if (!SuiteR.isNaN()) {
                BigDecimal bd = new BigDecimal(SuiteR);
                bd = bd.setScale(2, BigDecimal.ROUND_HALF_UP);
                SuiteR = bd.doubleValue();
            }
            this.SuiteRedundancy.put(currentTestCase.getName(), SuiteR);
        }
    }
}

From source file:edu.upenn.cis.orchestra.workloadgenerator.Generator.java

/**
 * Map peer[i] to peer[j], mapping all common attributes to common
 * attributes. Remove peer j (the target) from the available targets.
 *///  w ww .j  a  v  a 2s.  c o  m
private void addMapping(int i, int j, List<Integer> availTargets, List<List<Object>> mappings) {
    Set<String> s1 = new HashSet<String>(unnest(_logicalSchemas.get(i)));
    Set<String> s2 = new HashSet<String>(unnest(_logicalSchemas.get(j)));
    List<Object> mapping = new ArrayList<Object>();
    s1.retainAll(s2);
    mapping.add(i);
    mapping.add(j);
    mapping.add(new ArrayList<String>(s1));
    mappings.add(mapping);
    // Cast it to Integer - we want to remove the Integer from the list, not
    // the element located at position j, which happens if we leave j as an
    // int.
    if (availTargets != null)
        availTargets.remove((Integer) j);
}

From source file:edu.jhuapl.openessence.datasource.jdbc.entry.JdbcOeDataEntrySource.java

/**
 * Helper method for the ADD* related methods where the children records need fkeys that match the already inserted
 * parent record.//from w w w .  j  a v a  2s. c  o m
 *
 * @param tableName      table to get generated keys
 * @param keyIds         the complete known set of pk ids on the table
 * @param autoGenKeyVals any of the known pk values--i.e. ones generated by dbms, or provided by user calling
 *                       addCompleteRecord
 * @return all the pkeys and their values--supports "compound pkeys"
 * @throws OeDataSourceException if filter dimensions not configured properly
 */
private List<Map> getAllGeneratedKeys(String tableName, Collection<String> keyIds, DbKeyValMap autoGenKeyVals)
        throws OeDataSourceException, OeDataSourceAccessException {
    if (isMySqlDBMS()) {
        // FOR MYSQL ONLY -- THE COLUMN NAME GETS OVERRITTEN AS "GENERATED_KEY" -- MYSQL ALLOWS ONLY 1 AUTO-INC COLUMN
        if (autoGenKeyVals.containsKey("GENERATED_KEY")) {
            Object autogenkeyval = autoGenKeyVals.get("GENERATED_KEY");
            autoGenKeyVals.clear();
            Set<String> pkids = new HashSet<String>(this.getParentTableDetails().getPks());
            Set<String> autogendids = this.getAutoGenMap().keySet();

            pkids.retainAll(autogendids); //just want the ONE auto-inc on the master table
            // todo - NPE CHECKING!
            autoGenKeyVals.put(pkids.iterator().next(), autogenkeyval);
        }
    } else if (isPostgreSqlDBMS()) {
        // do nothing. nothing altered about key column names.
    }

    boolean first = true;
    StringBuilder whereClause = new StringBuilder(" WHERE ");
    for (String keyId : autoGenKeyVals.keySet()) {
        if (first) {
            first = false;
        } else {
            whereClause.append(" AND ");
        }
        try {
            whereClause.append(getFilterDimension(keyId).getSqlCol()).append(" = ").append("'")
                    .append(DataTypeConversionHelper.convert2SqlType(autoGenKeyVals.get(keyId))).append("'");
        } catch (NullPointerException e) {
            throw new OeDataSourceException(
                    "This primary key designated dimension has not been configured as a FilterDimension: "
                            + keyId,
                    e);
        }
    }

    // BUILDING UP: "SELECT PK1, PK2, PK3, ... PKn FROM table WHERE GENKEY1 = VAL1, GENKEY2 = VAL2, ... GENKEYN = VALn"
    List<String> columnIdList = new ArrayList<String>();
    StringBuilder select = new StringBuilder("SELECT ");
    first = true;

    for (String keyId : keyIds) {
        if (first) {
            first = false;
        } else {
            select.append(",");
        }
        try {
            select.append(getEditDimension(keyId).getSqlCol());
            columnIdList.add(keyId);
        } catch (NullPointerException e) {
            throw new OeDataSourceException(
                    "This primary key designated dimension has not been configured as a EditDimension: "
                            + keyId,
                    e);
        }
    }

    select.append(" FROM ").append(tableName).append(" ");
    select.append(whereClause);

    log.debug("SELECT (string) FOR GETTING ALL GENERATED KEYS: \n" + select);
    return this.jdbcTemplate.query(select.toString(), new GeneratedKeysRowMapper(columnIdList));
}

From source file:com.offbynull.portmapper.pcp.PcpDiscovery.java

private static Set<InetAddress> discoverGateways() throws InterruptedException, IOException {
    final Set<InetAddress> foundGateways = Collections.synchronizedSet(new HashSet<InetAddress>());
    Set<InetAddress> potentialGateways = NetworkUtils.getPotentialGatewayAddresses(); // port 5351

    DatagramChannel unicastChannel = null;
    try {//from ww  w. j a v  a  2  s.com
        unicastChannel = DatagramChannel.open();
        unicastChannel.configureBlocking(false);
        unicastChannel.socket().bind(new InetSocketAddress(0));
    } catch (IOException ioe) {
        IOUtils.closeQuietly(unicastChannel);
        throw ioe;
    }

    UdpCommunicator communicator = null;
    try {
        communicator = new UdpCommunicator(Collections.singletonList(unicastChannel));
        communicator.startAsync().awaitRunning();
        communicator.addListener(new UdpCommunicatorListener() {

            @Override
            public void incomingPacket(InetSocketAddress sourceAddress, DatagramChannel channel,
                    ByteBuffer packet) {
                foundGateways.add(sourceAddress.getAddress());
            }
        });

        ByteBuffer outBuf = ByteBuffer.allocate(1100);
        MapPcpRequest mpr = new MapPcpRequest(ByteBuffer.allocate(12), 0, 0, 0, InetAddress.getByName("::"),
                0L);
        mpr.dump(outBuf, InetAddress.getByAddress(new byte[4])); // should get back an error for this, but this
                                                                 // should be fine because all we're looking for is a response, not
                                                                 // nessecarily a correct response -- self address being sent is
                                                                 // 0.0.0.0 (IPV4)
                                                                 //
                                                                 // also, we need to pass in MAP because Apple's garbage routers
                                                                 // give back NATPMP responses when you pass in ANNOUNCE

        outBuf.flip();

        for (InetAddress potentialGateway : potentialGateways) {
            communicator.send(unicastChannel, new InetSocketAddress(potentialGateway, 5351),
                    outBuf.asReadOnlyBuffer());
        }

        Thread.sleep(5000L);
    } finally {
        if (communicator != null) {
            communicator.stopAsync().awaitTerminated();
        }
    }

    foundGateways.retainAll(potentialGateways); // just incase we get back some unsolicited responses
    return new HashSet<>(foundGateways);
}

From source file:org.jactr.modules.pm.aural.audicon.map.LocationFeatureMap.java

/**
 * @see org.jactr.modules.pm.common.memory.map.IFeatureMap#getCandidateRealObjects(ChunkTypeRequest, Set)
 *//*from   w w  w . j a  va 2s .c om*/
public void getCandidateRealObjects(ChunkTypeRequest request, Set<IIdentifier> container) {
    Set<IIdentifier> identifiers = new HashSet<IIdentifier>();

    boolean firstIteration = true;
    for (IConditionalSlot cSlot : request.getConditionalSlots())
        if (cSlot.getName().equalsIgnoreCase(IAuralModule.ONSET_SLOT)) {
            if (LOGGER.isWarnEnabled())
                LOGGER.warn(getClass().getName() + " does not currently support internal location");

            Object value = cSlot.getValue();

            Collection<IIdentifier> eval = new HashSet<IIdentifier>();
            if (value == null) {
                if (IConditionalSlot.NOT_EQUALS == cSlot.getCondition())
                    eval.addAll(all());
            } else
                switch (cSlot.getCondition()) {
                case IConditionalSlot.EQUALS:
                    eval.addAll(equal((IChunk) value));
                    break;
                case IConditionalSlot.NOT_EQUALS:
                    eval.addAll(not((IChunk) value));
                    break;
                default:
                    if (LOGGER.isWarnEnabled())
                        LOGGER.warn(getClass().getSimpleName() + " can only handle =,!=");
                    break;

                }

            if (eval.size() == 0)
                break;

            if (firstIteration) {
                identifiers.addAll(eval);
                firstIteration = false;
            } else
                identifiers.retainAll(eval);
        }

}