Example usage for java.util HashSet size

List of usage examples for java.util HashSet size

Introduction

In this page you can find the example usage for java.util HashSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:com.redhat.rhn.manager.kickstart.KickstartFormatter.java

private String renderKeys() {
    StringBuilder retval = new StringBuilder();

    HashSet sslKeys = new HashSet();
    HashSet gpgKeys = new HashSet();

    // setup keys for rendering
    if (this.ksdata.getCryptoKeys() != null) {
        for (Iterator itr = this.ksdata.getCryptoKeys().iterator(); itr.hasNext();) {
            CryptoKey tmpKey = (CryptoKey) itr.next();
            if (tmpKey.isGPG()) {
                gpgKeys.add(tmpKey);//  w w w  .j  a v a 2 s . c  o m
            } else if (tmpKey.isSSL()) {
                sslKeys.add(tmpKey);
            }
        }
    }

    if (gpgKeys.size() > 0) {
        retval.append(renderGpgKeys(gpgKeys));
    }

    if (sslKeys.size() > 0) {
        retval.append(renderSslKeys(sslKeys));
    }
    return retval.toString();
}

From source file:org.apache.flex.compiler.internal.projects.SourcePathManager.java

private void checkForDuplicateQNames() {
    Map<String, Set<QNameFile>> qNameMap = new HashMap<String, Set<QNameFile>>();
    for (HashSet<QNameFile> qNameFiles : sourcePaths.values()) {
        for (QNameFile qNameFile : qNameFiles) {
            Set<QNameFile> qNameFilesForQName = qNameMap.get(qNameFile.qName);
            if (qNameFilesForQName == null) {
                qNameFilesForQName = new HashSet<QNameFile>(1);
                qNameMap.put(qNameFile.qName, qNameFilesForQName);
            }/*from  w  ww  .  j a  va 2  s .  co  m*/
            qNameFilesForQName.add(qNameFile);
        }
    }

    ArrayList<ICompilerProblem> duplicateQNameProblems = new ArrayList<ICompilerProblem>();
    for (Map.Entry<String, Set<QNameFile>> qNameMapEntry : qNameMap.entrySet()) {
        Set<QNameFile> qNameFiles = qNameMapEntry.getValue();
        String qName = qNameMapEntry.getKey();
        if (qNameFiles.size() > 1) {
            StringBuilder listString = new StringBuilder();
            int found = 0;
            for (QNameFile qNameFile : qNameFiles) {
                if (ResourceBundleSourceFileHandler.EXTENSION
                        .equalsIgnoreCase(FilenameUtils.getExtension(qNameFile.file.getAbsolutePath()))) {
                    //TODO: https://bugs.adobe.com/jira/browse/CMP-923
                    //As of now, we ignore the properties files while 
                    //checking the duplicate names until we find a sophisticated way 
                    //to this in the future.
                    continue;
                }

                if (found++ > 0)
                    listString.append(", ");

                assert qName.equals(qNameFile.qName);
                listString.append(qNameFile.file.getAbsolutePath());
            }

            if (found > 1) //if we found more than one duplicate qname then report a problem
            {
                ICompilerProblem problem = new DuplicateQNameInSourcePathProblem(listString.toString(), qName);
                duplicateQNameProblems.add(problem);
            }
        }
    }

    if (duplicateQNameProblems.size() > 0)
        this.duplicateQNameProblems = duplicateQNameProblems;
    else
        this.duplicateQNameProblems = null;
}

From source file:amie.keys.CSAKey.java

private HashMap<Rule, HashSet<String>> discoverConditionalKeysFirstLevel(HashMap<Rule, Graph> ruleToGraph,
        HashMap<Integer, Graph> instantiatedProperty2Graph, Set<Rule> output) {
    Rule rule = new Rule();
    for (int conditionProperty : instantiatedProperty2Graph.keySet()) {
        Graph graph = instantiatedProperty2Graph.get(conditionProperty);
        String prop = id2Property.get(conditionProperty);

        Iterable<Rule> conditions = Utilities.getConditions(rule, prop, (int) support, kb);
        for (Rule conditionRule : conditions) {
            Graph newGraph = new Graph();
            discoverConditionalKeysForCondition(newGraph, graph, graph.topGraphNodes(), conditionRule, output);
            ruleToGraph.put(conditionRule, newGraph);
        }/*w  w w  .ja  va2  s.  co  m*/
    }

    HashMap<Rule, HashSet<String>> newRuleToExtendWith = new HashMap<>();
    for (Rule conRule : ruleToGraph.keySet()) {
        Graph newGraph = ruleToGraph.get(conRule);
        HashSet<String> properties = new HashSet<>();
        for (Node node : newGraph.topGraphNodes()) {
            if (node.toExplore) {
                Iterator<Integer> it = node.set.iterator();
                int prop = it.next();
                String propertyStr = id2Property.get(prop);
                properties.add(propertyStr);
            }

        }
        if (properties.size() != 0) {
            newRuleToExtendWith.put(conRule, properties);
        }
    }
    return newRuleToExtendWith;
}

From source file:org.apache.phoenix.jdbc.SecureUserConnectionsIT.java

@Test
public void testAlternatingLogins() throws Exception {
    final HashSet<ConnectionInfo> connections = new HashSet<>();
    final String princ1 = getUserPrincipal(1);
    final File keytab1 = getUserKeytabFile(1);
    final String princ2 = getUserPrincipal(2);
    final File keytab2 = getUserKeytabFile(2);

    UserGroupInformation ugi1 = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ1, keytab1.getPath());
    UserGroupInformation ugi2 = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ2, keytab2.getPath());

    // Using the same UGI should result in two equivalent ConnectionInfo objects
    ugi1.doAs(new PrivilegedExceptionAction<Void>() {
        public Void run() throws Exception {
            String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
            connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
            return null;
        }//  w w  w.  j a  va 2 s. c  o m
    });
    assertEquals(1, connections.size());
    // Sanity check
    verifyAllConnectionsAreKerberosBased(connections);

    ugi2.doAs(new PrivilegedExceptionAction<Void>() {
        public Void run() throws Exception {
            String url = joinUserAuthentication(BASE_URL, princ2, keytab2);
            connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
            return null;
        }
    });
    assertEquals(2, connections.size());
    verifyAllConnectionsAreKerberosBased(connections);

    ugi1.doAs(new PrivilegedExceptionAction<Void>() {
        public Void run() throws Exception {
            String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
            connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
            return null;
        }
    });
    assertEquals(2, connections.size());
    verifyAllConnectionsAreKerberosBased(connections);
}

From source file:dao.SearchDaoDb.java

/**
* LuceneSearchBlobs - lucene search blobs
* @param dirPath - directory path/*from ww  w. ja  v  a  2 s  . c  o m*/
* @param searchText - search text
* @return List - list of matches based on the text
*/
public List luceneSearchBlobs(String dirPath, String searchText) {

    logger.info("searchText indexDir() = " + dirPath);
    dirPath = dirPath + "/";
    logger.info("searchText searchText() = " + searchText);
    // index the directory
    try {
        luceneManager.indexDir(new File(dirPath));
    } catch (Exception e) {
        throw new BaseDaoException(
                "Exception in LuceneManager.indexDir(), dirPath = " + dirPath + " " + e.getMessage(), e);
    }

    // get the hits
    ArrayList arrayHits = null;
    if (!RegexStrUtil.isNull(searchText)) {
        // String modexpr = searchText.replaceAll("[[\\W]&&[\\S]]", "");
        String modexpr = RegexStrUtil.goodStr(searchText);
        StringTokenizer st = new StringTokenizer(modexpr, " ");
        while (st.hasMoreTokens()) {
            String token = st.nextToken();
            Hits hits = null;
            try {
                logger.info("token search docs:" + token);
                hits = luceneManager.searchDocs(token);
            } catch (Exception e) {
                throw new BaseDaoException(
                        "Exception in LuceneManager searchDocs(token), token=" + token + e.getMessage(), e);
            }

            if (hits == null) {
                logger.info("LuceneManager, hits is null");
                return null;
            } else {
                logger.info("hits.length() = " + hits.length());
                arrayHits = new ArrayList();
                for (int j = 0; j < hits.length(); j++) {
                    logger.info("hits.length() = " + j);
                    Document doc = null;
                    try {
                        doc = hits.doc(j);
                    } catch (Exception e) {
                        throw new BaseDaoException(
                                "Exception in LuceneManager, hits.doc(j),j=" + j + " errMsg=" + e.getMessage(),
                                e);
                    }

                    if (doc == null) {
                        logger.info("doc is null for j = " + j);
                        continue;
                    } else {
                        logger.info("doc = " + doc.toString());
                        if (!RegexStrUtil.isNull(doc.get("path"))) {
                            String fileName = null;
                            String docDir = null;
                            String dirName = null;
                            String str = doc.get("path");
                            // find the sanpath in the str and strip it off
                            int ind = -1;
                            if (str.startsWith(dirPath)) {
                                ind = dirPath.length();
                            }
                            if (ind != -1) {
                                docDir = str.substring(ind, str.length());
                                logger.info("docDir = " + docDir.toString());
                                if (!RegexStrUtil.isNull(docDir)) {
                                    // get filename, docDir (directories) separated with spaces
                                    int endInd = docDir.lastIndexOf(File.separator);
                                    if (endInd != -1) {
                                        if ((endInd + 1) <= docDir.length()) {
                                            fileName = docDir.substring(endInd + 1, docDir.length()).trim();
                                            logger.info("fileName = " + fileName.toString());
                                            docDir = docDir.substring(0, endInd).trim();
                                            endInd = docDir.lastIndexOf(File.separator);
                                            if (endInd != -1) {
                                                if (endInd + 1 <= docDir.length()) {
                                                    dirName = docDir.substring(endInd + 1, docDir.length())
                                                            .trim();
                                                    docDir = docDir.substring(0, endInd).trim();
                                                }
                                            }
                                            logger.info("fileName = " + fileName);
                                            logger.info("dirName = " + dirName);
                                            logger.info("docDir = " + docDir);
                                        }
                                        if (!RegexStrUtil.isNull(docDir)) {
                                            // write the query
                                            docDir = docDir.replaceAll(File.separator, " ");
                                            logger.info("docDir with spaces = " + docDir.toString());
                                        }
                                        // docDir can be null i.e dirPath can be null 
                                        HashSet hs = getDirectoryFile(docDir, fileName, dirName);
                                        // there should be only one match of a file for each sanpath
                                        if (hs != null && hs.size() == 1) {
                                            Iterator it1 = hs.iterator();
                                            while (it1.hasNext()) {
                                                Directory dir = (Directory) it1.next();
                                                if (dir == null)
                                                    continue;
                                                logger.info(" found filematch = " + dir.toString());
                                                arrayHits.add(dir);
                                            } // while
                                        } // if hs
                                    } // endInd != -1
                                } // docDir
                            } // ind != -1
                        } // if
                    } // else
                } // for         
            } // else
        } // while
    } // if
    return arrayHits;
}

From source file:net.countercraft.movecraft.async.translation.TranslationTask.java

private void captureYield(MovecraftLocation[] blocksList, List<MovecraftLocation> harvestedBlocks) {
    if (harvestedBlocks.isEmpty()) {
        return;//  w w w .  j a  v a 2s. co m
    }
    ArrayList<Inventory> chests = new ArrayList<Inventory>();
    HashSet<ItemDropUpdateCommand> itemDropUpdateSet = new HashSet<ItemDropUpdateCommand>();
    HashMap<MovecraftLocation, ItemStack[]> harvestedMap = new HashMap<MovecraftLocation, ItemStack[]>();
    //find chests
    for (MovecraftLocation loc : getCraft().getBlockList()) {
        Block block = getCraft().getW().getBlockAt(loc.getX(), loc.getY(), loc.getZ());
        if (block.getType() == Material.CHEST || block.getType() == Material.TRAPPED_CHEST)
            chests.add(((InventoryHolder) (block.getState())).getInventory());
    }

    for (MovecraftLocation harvestedBlock : harvestedBlocks) {
        Block block = getCraft().getW().getBlockAt(harvestedBlock.getX(), harvestedBlock.getY(),
                harvestedBlock.getZ());
        ItemStack[] drops = block.getDrops().toArray(new ItemStack[block.getDrops().size()]);
        //generate seed drops
        if (block.getType() == Material.CROPS) {
            Random rand = new Random();
            int amount = rand.nextInt(4);
            if (amount > 0) {
                ItemStack seeds = new ItemStack(Material.SEEDS, amount);
                HashSet<ItemStack> d = new HashSet<ItemStack>(Arrays.asList(drops));
                d.add(seeds);
                drops = d.toArray(new ItemStack[d.size()]);
            }
        }
        //get contents of inventories before deposting
        if (block.getState() instanceof InventoryHolder) {
            if (block.getState() instanceof Chest) {
                //Inventory inv = ((DoubleChest) block.getState()).getRightSide().getInventory().getLocation().equals(block.getLocation()) ?((DoubleChest) block.getState()).getRightSide().getInventory(): ((DoubleChest) block.getState()).getLeftSide().getInventory();
                //HashSet<ItemStack> d = new HashSet<ItemStack>(Arrays.asList(inv.getContents()));
                HashSet<ItemStack> d = new HashSet<ItemStack>(
                        Arrays.asList(((Chest) block.getState()).getBlockInventory().getContents()));
                d.addAll(block.getDrops());
                drops = d.toArray(new ItemStack[d.size()]);
            } else {
                HashSet<ItemStack> d = new HashSet<ItemStack>(
                        Arrays.asList((((InventoryHolder) block.getState()).getInventory().getContents())));
                d.addAll(block.getDrops());
                drops = d.toArray(new ItemStack[d.size()]);
            }
        }
        for (ItemStack drop : drops) {
            ItemStack retStack = putInToChests(drop, chests);
            if (retStack != null)
                //drop items on position
                itemDropUpdateSet.add(new ItemDropUpdateCommand(new Location(getCraft().getW(),
                        harvestedBlock.getX(), harvestedBlock.getY(), harvestedBlock.getZ()), retStack));
        }
    }
    data.setItemDropUpdates(itemDropUpdateSet.toArray(new ItemDropUpdateCommand[1]));
}

From source file:org.apache.sysml.hops.codegen.template.PlanSelectionFuseCostBased.java

private void createAndAddMultiAggPlans(CPlanMemoTable memo, ArrayList<Hop> roots) {
    //collect full aggregations as initial set of candidates
    HashSet<Long> fullAggs = new HashSet<Long>();
    Hop.resetVisitStatus(roots);//www .  ja  v  a  2s  .  co  m
    for (Hop hop : roots)
        rCollectFullAggregates(hop, fullAggs);
    Hop.resetVisitStatus(roots);

    //remove operators with assigned multi-agg plans
    fullAggs.removeIf(p -> memo.contains(p, TemplateType.MultiAggTpl));

    //check applicability for further analysis
    if (fullAggs.size() <= 1)
        return;

    if (LOG.isTraceEnabled()) {
        LOG.trace("Found across-partition ua(RC) aggregations: "
                + Arrays.toString(fullAggs.toArray(new Long[0])));
    }

    //collect information for all candidates 
    //(subsumed aggregations, and inputs to fused operators) 
    List<AggregateInfo> aggInfos = new ArrayList<AggregateInfo>();
    for (Long hopID : fullAggs) {
        Hop aggHop = memo._hopRefs.get(hopID);
        AggregateInfo tmp = new AggregateInfo(aggHop);
        for (int i = 0; i < aggHop.getInput().size(); i++) {
            Hop c = HopRewriteUtils.isMatrixMultiply(aggHop) && i == 0
                    ? aggHop.getInput().get(0).getInput().get(0)
                    : aggHop.getInput().get(i);
            rExtractAggregateInfo(memo, c, tmp, TemplateType.CellTpl);
        }
        if (tmp._fusedInputs.isEmpty()) {
            if (HopRewriteUtils.isMatrixMultiply(aggHop)) {
                tmp.addFusedInput(aggHop.getInput().get(0).getInput().get(0).getHopID());
                tmp.addFusedInput(aggHop.getInput().get(1).getHopID());
            } else
                tmp.addFusedInput(aggHop.getInput().get(0).getHopID());
        }
        aggInfos.add(tmp);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Extracted across-partition ua(RC) aggregation info: ");
        for (AggregateInfo info : aggInfos)
            LOG.trace(info);
    }

    //sort aggregations by num dependencies to simplify merging
    //clusters of aggregations with parallel dependencies
    aggInfos = aggInfos.stream().sorted(Comparator.comparing(a -> a._inputAggs.size()))
            .collect(Collectors.toList());

    //greedy grouping of multi-agg candidates
    boolean converged = false;
    while (!converged) {
        AggregateInfo merged = null;
        for (int i = 0; i < aggInfos.size(); i++) {
            AggregateInfo current = aggInfos.get(i);
            for (int j = i + 1; j < aggInfos.size(); j++) {
                AggregateInfo that = aggInfos.get(j);
                if (current.isMergable(that)) {
                    merged = current.merge(that);
                    aggInfos.remove(j);
                    j--;
                }
            }
        }
        converged = (merged == null);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Merged across-partition ua(RC) aggregation info: ");
        for (AggregateInfo info : aggInfos)
            LOG.trace(info);
    }

    //construct and add multiagg template plans (w/ max 3 aggregations)
    for (AggregateInfo info : aggInfos) {
        if (info._aggregates.size() <= 1)
            continue;
        Long[] aggs = info._aggregates.keySet().toArray(new Long[0]);
        MemoTableEntry me = new MemoTableEntry(TemplateType.MultiAggTpl, aggs[0], aggs[1],
                (aggs.length > 2) ? aggs[2] : -1);
        for (int i = 0; i < aggs.length; i++) {
            memo.add(memo._hopRefs.get(aggs[i]), me);
            addBestPlan(aggs[i], me);
            if (LOG.isTraceEnabled())
                LOG.trace("Added multiagg* plan: " + aggs[i] + " " + me);

        }
    }
}

From source file:org.pentaho.di.job.entries.getpop.MailConnection.java

/**
 * Returns all subfolders of the specified folder
 *
 * @param folder//from   w w w . ja  v  a2 s.c o m
 *          parent folder
 * @return sub folders
 */
public String[] returnAllFolders(Folder folder) throws KettleException {
    HashSet<String> list = new HashSet<String>();
    list = returnSubfolders(folder);
    return list.toArray(new String[list.size()]);
}

From source file:org.compass.core.lucene.engine.store.AbstractLuceneSearchEngineStore.java

public void configure(LuceneSearchEngineFactory searchEngineFactory, CompassSettings settings,
        CompassMapping mapping) {// w ww . j a  v  a2  s .c  om
    template = new LuceneStoreTemplate(this);

    this.luceneSettings = searchEngineFactory.getLuceneSettings();

    HashSet<String> subIndexesSet = new HashSet<String>();
    for (ResourceMapping resourceMapping : mapping.getRootMappings()) {
        String alias = resourceMapping.getAlias();
        String[] tempSubIndexes = resourceMapping.getSubIndexHash().getSubIndexes();
        for (String subIndex : tempSubIndexes) {
            subIndexesSet.add(subIndex.intern());

            List<String> list = subIndexesByAlias.get(alias);
            if (list == null) {
                list = new ArrayList<String>();
                subIndexesByAlias.put(alias, list);
            }
            list.add(subIndex);

            list = aliasesBySubIndex.get(subIndex);
            if (aliasesBySubIndex.get(subIndex) == null) {
                list = new ArrayList<String>();
                aliasesBySubIndex.put(subIndex, list);
            }
            list.add(alias);
        }
    }
    subIndexes = subIndexesSet.toArray(new String[subIndexesSet.size()]);

    // set up directory wrapper providers
    Map<String, CompassSettings> dwSettingGroups = settings
            .getSettingGroups(LuceneEnvironment.DirectoryWrapper.PREFIX);
    if (dwSettingGroups.size() > 0) {
        ArrayList<DirectoryWrapperProvider> dws = new ArrayList<DirectoryWrapperProvider>();
        for (Map.Entry<String, CompassSettings> entry : dwSettingGroups.entrySet()) {
            String dwName = entry.getKey();
            if (log.isInfoEnabled()) {
                log.info("Building directory wrapper [" + dwName + "]");
            }
            CompassSettings dwSettings = entry.getValue();
            String dwType = dwSettings.getSetting(LuceneEnvironment.DirectoryWrapper.TYPE);
            if (dwType == null) {
                throw new ConfigurationException(
                        "Directory wrapper [" + dwName + "] has no type associated with it");
            }
            DirectoryWrapperProvider dw;
            try {
                dw = (DirectoryWrapperProvider) ClassUtils.forName(dwType, settings.getClassLoader())
                        .newInstance();
            } catch (Exception e) {
                throw new ConfigurationException("Failed to create directory wrapper [" + dwName + "]", e);
            }
            if (dw instanceof CompassConfigurable) {
                ((CompassConfigurable) dw).configure(dwSettings);
            }
            dws.add(dw);
        }
        directoryWrapperProviders = dws.toArray(new DirectoryWrapperProvider[dws.size()]);
    }

    this.localDirectoryCacheManager = new LocalDirectoryCacheManager(searchEngineFactory);
    localDirectoryCacheManager.configure(settings);
}

From source file:org.apache.roller.weblogger.business.WeblogEntryTest.java

/**
 * We want to make sure that the first time placed on the tag remains
 * through consequent updates.//from w  ww .  j a  va2 s  .co m
 * 
 * @throws Exception
 */
public void testUpdateTagTime() throws Exception {

    WeblogEntryManager mgr = WebloggerFactory.getWeblogger().getWeblogEntryManager();

    // setup some test entries to use
    testWeblog = TestUtils.getManagedWebsite(testWeblog);
    testUser = TestUtils.getManagedUser(testUser);
    WeblogEntry entry = TestUtils.setupWeblogEntry("entry1", testWeblog.getDefaultCategory(), testWeblog,
            testUser);
    String id = entry.getId();

    entry.addTag("testWillStayTag");
    entry.addTag("testTagWillBeRemoved");
    mgr.saveWeblogEntry(entry);
    TestUtils.endSession(true);

    entry = mgr.getWeblogEntry(id);
    assertEquals(2, entry.getTags().size());

    Timestamp original = null;

    for (Iterator it = entry.getTags().iterator(); it.hasNext();) {
        WeblogEntryTag tagData = (WeblogEntryTag) it.next();
        if (tagData.getName().equals("testwillstaytag"))
            original = tagData.getTime();
    }

    List updateTags = new ArrayList();
    updateTags.add("testwillstaytag");
    updateTags.add("testnewtag");
    updateTags.add("testnewtag3");
    entry.updateTags(updateTags);
    mgr.saveWeblogEntry(entry);
    TestUtils.endSession(true);

    entry = mgr.getWeblogEntry(id);
    HashSet tagNames = new HashSet();
    for (Iterator it = entry.getTags().iterator(); it.hasNext();) {
        WeblogEntryTag tagData = (WeblogEntryTag) it.next();
        tagNames.add(tagData.getName());
        if (tagData.getName().equals("testwillstaytag"))
            assertEquals(original, tagData.getTime());
    }

    assertEquals(3, entry.getTags().size());
    assertEquals(3, tagNames.size());
    assertEquals(true, tagNames.contains("testwillstaytag"));
    assertEquals(true, tagNames.contains("testnewtag"));
    assertEquals(true, tagNames.contains("testnewtag3"));

    // teardown our test entry
    TestUtils.teardownWeblogEntry(id);
    TestUtils.endSession(true);
}