Example usage for java.util HashMap size

List of usage examples for java.util HashMap size

Introduction

In this page you can find the example usage for java.util HashMap size.

Prototype

int size

To view the source code for java.util HashMap size.

Click Source Link

Document

The number of key-value mappings contained in this map.

Usage

From source file:edu.csupomona.nlp.tool.crawler.Facebook.java

/**
 * Get all the Likes for the Post./*from   w w  w  . ja  v a2  s  .c  om*/
 * Specific for getting likes of the post. As for comment, getLikeCount() 
 * could be used.
 * @param post              Post to be parsed
 * @return                  HashMap of Likes
 */
public HashMap<String, Like> getLikes(Post post) {
    HashMap<String, Like> fullLikes = new HashMap<>();
    PagableList<Like> likes = post.getLikes();
    Paging<Like> paging;

    // trace
    System.out.print("Getting Likes... ");

    do {
        for (Like like : likes)
            fullLikes.put(like.getId(), like);

        // get next page
        paging = likes.getPaging();

        // to reduce speed
        pause(1);

        // get next page
        if (paging != null)
            for (int n = 1; n <= maxRetries_; ++n) {
                try {
                    likes = fb_.fetchNext(paging);
                } catch (FacebookException ex) { // exception & retry
                    Logger.getLogger(Facebook.class.getName()).log(Level.SEVERE, null, ex);
                    pause(STEP_SEC_ * n);
                    System.out.println("Starting retry... " + n + "/" + maxRetries_);
                    continue;
                }
                break;
            }
    } while ((paging != null) && (likes != null));

    // trace
    System.out.print(fullLikes.size() + "\n");

    return fullLikes;
}

From source file:io.seldon.recommendation.RecentItemsRecommender.java

@Override
public ItemRecommendationResultSet recommend(String client, Long user, Set<Integer> dimensions,
        int maxRecsCount, RecommendationContext ctxt, List<Long> recentItemInteractions) {
    HashMap<Long, Double> recommendations = new HashMap<>();
    Set<Long> exclusions;

    if (ctxt.getMode() == RecommendationContext.MODE.INCLUSION) {
        logger.warn("Can't run RecentItemsRecommender in inclusion context mode");
        return new ItemRecommendationResultSet(name);
    } else {//w ww.jav a  2s. c om
        exclusions = ctxt.getContextItems();
    }
    if (logger.isDebugEnabled())
        logger.debug("Running with dimension " + dimensions.toString());
    Collection<Long> recList = itemStorage
            .retrieveRecentlyAddedItems(client, maxRecsCount + exclusions.size(), dimensions).getItems();
    if (recList.size() > 0) {
        double scoreIncr = 1.0 / (double) recList.size();
        int count = 0;
        for (Long item : recList) {
            if (count >= maxRecsCount)
                break;
            else if (!exclusions.contains(item))
                recommendations.put(item, 1.0 - (count++ * scoreIncr));
        }
        List<ItemRecommendationResultSet.ItemRecommendationResult> results = new ArrayList<>();
        for (Map.Entry<Long, Double> entry : recommendations.entrySet()) {
            results.add(new ItemRecommendationResultSet.ItemRecommendationResult(entry.getKey(),
                    entry.getValue().floatValue()));
        }
        if (logger.isDebugEnabled())
            logger.debug("Recent items algorithm returned " + recommendations.size() + " items");
        return new ItemRecommendationResultSet(results, name);
    } else {
        logger.warn("No items returned for recent items of dimension " + StringUtils.join(dimensions, ",")
                + " for " + client);
    }
    return new ItemRecommendationResultSet(Collections.EMPTY_LIST, name);
}

From source file:com.hangum.tadpole.rdb.core.editors.table.TableViewerEditPart.java

/**
 * table? Column? ?./*w  w w .j  ava 2 s  .co  m*/
 */
public void createTableColumn(final TableViewer tableViewer, final HashMap<Integer, String> mapColumns,
        final SQLResultSorter tableSorter) {
    //  column? .
    Table table = tableViewer.getTable();
    int columnCount = table.getColumnCount();
    for (int i = 0; i < columnCount; i++) {
        table.getColumn(0).dispose();
    }

    try {
        // column info
        final TableViewerColumn tableColumnInfo = new TableViewerColumn(tableViewer, SWT.LEFT);
        tableColumnInfo.getColumn().setText(Messages.TableViewerEditPart_0);
        tableColumnInfo.getColumn().setResizable(true);
        tableColumnInfo.getColumn().setMoveable(false);
        tableColumnInfo.getColumn().setWidth(100);

        // reset column 
        for (int i = 1; i < mapColumns.size() + 1; i++) {
            final int index = i;

            final TableViewerColumn tableColumn = new TableViewerColumn(tableViewer, SWT.LEFT);
            tableColumn.getColumn().setText(mapColumns.get(index - 1));
            tableColumn.getColumn().setResizable(true);
            tableColumn.getColumn().setMoveable(false);

            tableColumn.getColumn().addSelectionListener(new SelectionAdapter() {
                @Override
                public void widgetSelected(SelectionEvent e) {
                    tableSorter.setColumn(index);
                    int dir = tableViewer.getTable().getSortDirection();
                    if (tableViewer.getTable().getSortColumn() == tableColumn.getColumn()) {
                        dir = dir == SWT.UP ? SWT.DOWN : SWT.UP;
                    } else {
                        dir = SWT.DOWN;
                    }

                    tableViewer.getTable().setSortDirection(dir);
                    tableViewer.getTable().setSortColumn(tableColumn.getColumn());
                    tableViewer.refresh();
                }
            });

            if (modifyType == TABLE_MOD_TYPE.EDITOR)
                tableColumn.setEditingSupport(new TextViewerEditingSupport(this, index, tableViewer));
        } // end for

    } catch (Exception e) {
        logger.error(Messages.TableEditPart_8, e);
    }

}

From source file:gov.llnl.lc.smt.command.SmtCommand.java

protected OSM_Node getOSM_Node(IB_Guid guid, HashMap<String, OSM_Node> nodes) {
    // OSM_Fabric Fabric = OMService.getFabric();
    // find the first node that matches the supplied guid
    OSM_Node node = null;// w  w  w.  j av a  2 s  .  co m
    if ((guid != null) && (nodes != null) && (nodes.size() > 0)) {
        node = nodes.get(OSM_Fabric.getOSM_NodeKey(guid.getGuid()));
    }
    return node;
}

From source file:edu.ku.brc.web.ParsePaleo.java

/**
 * /*  w  w  w .ja va2 s. c  om*/
 */
private void buildTaxonTree() {
    try {
        Connection conn = dbConn.getConnection();

        int rootRecId = -1;
        taxonInsertStmt.setString(1, "Root");
        taxonInsertStmt.setString(2, null);
        taxonInsertStmt.setInt(3, 0); // ParentID
        taxonInsertStmt.setInt(4, 0); // RankID
        int rv = taxonInsertStmt.executeUpdate();
        if (rv == 1) {
            Integer recId = BasicSQLUtils.getInsertedId(taxonInsertStmt);
            if (recId != null) {
                rootRecId = recId;
            }
        }
        if (rootRecId == -1) {
            throw new RuntimeException("Bad Root Taxon Node.");
        }

        rootNode = new TreeNode(rootRecId, "Root", 0, 0, null);

        Statement stmt = conn.createStatement();

        HashMap<String, String> values = new HashMap<String, String>();
        String sql = "SELECT p.ID FROM wp_xuub_posts p where p.post_type LIKE '%_page'";
        Vector<Integer> ids = BasicSQLUtils.queryForInts(sql);
        for (int recId : ids) {
            values.clear();
            sql = String.format(
                    "SELECT pm.meta_key, pm.meta_value FROM wp_xuub_posts p INNER JOIN wp_xuub_postmeta pm ON p.ID = pm.post_id WHERE ID = %d AND (NOT (pm.meta_key LIKE '\\_%c'))",
                    recId, '%');
            Vector<Object[]> data = BasicSQLUtils.query(sql);
            for (Object[] row : data) {
                if (row[1] != null) {
                    values.put(row[0].toString(), row[1].toString());
                }
            }

            System.out.println(values);
            if (values.size() == 0) {
                System.out.println(sql);
                continue;
            }

            String phylum = values.get("phylum");
            String clazz = values.get("class");
            String family = values.get("family");
            String genus = values.get("genus");
            String species = values.get("species");
            String common = values.get("common_name");

            String[] names = { phylum, clazz, family, genus, species };
            int[] ranks = { 30, 60, 140, 180, 220 };

            int len = 0;
            while (len < names.length && names[len] != null) {
                len++;
            }
            buildTree(rootNode, names, ranks, 0, common, len);
        }
        stmt.close();

        dbS3Conn.commit();

        System.out.println("Done with taxon tree.");

    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

private void inferOverlappingOrfActivitiesEM(List<OrfWithCodons> orfs) {

    HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>();
    int numCond = -1;
    for (OrfWithCodons orf : orfs)
        for (Codon c : orf.getCodons()) {
            cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf);
            numCond = c.getActivity().length;
        }/*w  w  w.ja v  a2s. c om*/

    // now equivalence classes: gives you all codons that are consistent with a specific combination of orfs
    HashMap<HashSet<OrfWithCodons>, HashSet<Codon>> equi = new HashMap<HashSet<OrfWithCodons>, HashSet<Codon>>();
    for (Codon c : cod2Orf.keySet()) {
        equi.computeIfAbsent(cod2Orf.get(c), x -> new HashSet<>()).add(c);
    }

    OrfWithCodons[][] E = new OrfWithCodons[equi.size()][];
    HashSet<Codon>[] codons = new HashSet[E.length];
    int ind = 0;
    for (HashSet<OrfWithCodons> e : equi.keySet()) {
        codons[ind] = equi.get(e);
        E[ind++] = e.toArray(new OrfWithCodons[0]);
    }

    int dfEach = (numCond - 1) * orfs.size();
    int dfUnique = (numCond - 1);
    double llEach = 0;
    double llUnique = 0;

    double[] alpha = new double[E.length];
    for (int i = 0; i < alpha.length; i++) {
        for (Codon codon : codons[i])
            alpha[i] += codon.getTotalActivity();
    }
    double sum = EI.wrap(alpha).sum();

    // TODO not quite right, divide by effective lengths, then go through all equiv classes and sum the weighted alphas
    llUnique = new EquivalenceClassCountEM<OrfWithCodons>(E, alpha, orf -> orf.getEffectiveLength())
            .compute(miniter, maxiter, (orf, pi) -> orf.setEstimatedTotalActivity(pi * sum, pi));

    for (int c = 0; c < numCond; c++) {
        Arrays.fill(alpha, 0);
        for (int i = 0; i < alpha.length; i++)
            for (Codon codon : codons[i])
                alpha[i] += codon.getActivity()[c];
        int uc = c;

        double csum = EI.wrap(alpha).sum();
        double lla = new EquivalenceClassCountEM<OrfWithCodons>(E, alpha, orf -> orf.getEffectiveLength())
                .compute(miniter, maxiter, (orf, pi) -> orf.setEstimatedTotalActivity(uc, pi * csum, pi));
        if (!Double.isNaN(lla))
            llEach += lla;
    }

    double p = ChiSquare.cumulative(2 * llEach - 2 * llUnique, dfEach - dfUnique, false, false);

    for (OrfWithCodons o : orfs)
        o.setUniqueProportionPval(p);

}

From source file:com.nuance.expertassistant.ReadExcelFile.java

public HashMap<String, ArrayList<HashMap<String, String>>> evaluateFromRatingDB(String ProjectID) {
    HashMap<String, ArrayList<HashMap<String, String>>> EvalMap = new HashMap<String, ArrayList<HashMap<String, String>>>();
    HashMap<String, HashMap<String, String>> QAList = new HashMap<String, HashMap<String, String>>();

    try {/*www . j  av  a  2s .  c om*/
        QAList = QueryDB.fetchQAList(ProjectID);
        Iterator it = QAList.entrySet().iterator();
        while (it.hasNext()) {
            Map.Entry pair1 = (Map.Entry) it.next();

            HashMap<String, String> ratedResponses = (HashMap<String, String>) pair1.getValue();

            String Question = pair1.getKey().toString();

            Iterator it2 = ratedResponses.entrySet().iterator();

            ArrayList<HashMap<String, String>> responseArray4Question = new ArrayList<HashMap<String, String>>();

            while (it2.hasNext()) {
                Map.Entry pair2 = (Map.Entry) it2.next();
                System.out.println(Question + " = " + pair2.getKey());

                /**
                 * ***********************************
                 */
                HashMap<String, String> queryAnalysis = new HashMap<String, String>();

                System.out.println(" The QueryList String is " + Question);
                System.out.println(" The ExpectedResultList String is " + pair2.getKey());

                queryAnalysis.put("Query", Question);
                queryAnalysis.put("ExpectedResult", pair2.getKey().toString());
                queryAnalysis.put("UserRating", pair2.getValue().toString());
                queryAnalysis.put("GoldResponses", String.valueOf(ratedResponses.size()));
                queryAnalysis.put("Found", "NO");
                queryAnalysis.put("Rank", "-1");
                queryAnalysis.put("PassageLength", "-1");
                queryAnalysis.put("numResponses", "-1");

                ArrayList<Integer> resultArray = new ArrayList<Integer>();

                resultArray = retrieveAnswers(ProjectID, Question, pair2.getKey().toString());

                System.out.println("Result Array" + resultArray.toString());

                if (resultArray.get(0) == 1) {
                    queryAnalysis.put("Found", "YES");
                    queryAnalysis.put("Rank", resultArray.get(1).toString());
                    queryAnalysis.put("PassageLength", resultArray.get(2).toString());
                    queryAnalysis.put("numResponses", resultArray.get(3).toString());

                }

                responseArray4Question.add(queryAnalysis);

                /**
                 * ************************************
                 */
            }

            System.out.println("*************************************");
            System.out.println("**************EVALUTE****************");
            System.out.println("*************************************");

            for (int i = 0; i < responseArray4Question.size(); i++) {
                System.out.println("" + responseArray4Question.get(i).get("Query"));
                System.out.println("" + responseArray4Question.get(i).get("ExpectedResult"));
                System.out.println("" + responseArray4Question.get(i).get("UserRating"));
                System.out.println("" + responseArray4Question.get(i).get("GoldResponses"));
                System.out.println("" + responseArray4Question.get(i).get("Found"));
                System.out.println("" + responseArray4Question.get(i).get("Rank"));
                System.out.println("" + responseArray4Question.get(i).get("PassageLength"));

            }

            EvalMap.put(Question, responseArray4Question);

        }

    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }

    System.out.println("*************************************");
    System.out.println("**************Print EvalMap****************");
    System.out.println("*************************************");

    Iterator it = EvalMap.entrySet().iterator();
    while (it.hasNext()) {
        Map.Entry pair = (Map.Entry) it.next();
        ArrayList<HashMap<String, String>> responseArray = (ArrayList<HashMap<String, String>>) pair.getValue();

        for (int i = 0; i < responseArray.size(); i++) {

            System.out.println("" + responseArray.get(i).get("Query"));
            System.out.println("" + responseArray.get(i).get("ExpectedResult"));
            System.out.println("" + responseArray.get(i).get("Found"));
            System.out.println("" + responseArray.get(i).get("Rank"));
            System.out.println("" + responseArray.get(i).get("PassageLength"));
        }
    }

    System.out.println("*************************************");
    System.out.println("**************Print[ed] EvalMap****************");
    System.out.println("*************************************");

    return EvalMap;
}

From source file:com.microsoft.tfs.client.common.ui.wit.dialogs.WorkItemPickerDialog.java

/**
 * Iterate all projects and determine the union of work item types across
 * all projects.//from  w  w w . j av a 2s  .  com
 *
 * @return Array of work-item types representing the union of all work item
 *         types across all projects.
 */
private WorkItemType[] getAllProjectWorkItemTypes() {
    final HashMap map = new HashMap();
    for (int i = 0; i < projects.length; i++) {
        final Project project = projects[i];
        final WorkItemTypeCollection collection = project.getWorkItemTypes();

        for (final Iterator it = collection.iterator(); it.hasNext();) {
            final WorkItemType type = (WorkItemType) it.next();
            final String typeName = type.getName();

            if (!map.containsKey(typeName)) {
                map.put(typeName, type);
            }
        }
    }

    return (WorkItemType[]) map.values().toArray(new WorkItemType[map.size()]);
}

From source file:SecurityGiver.java

public void processDictionary() throws JSONException, IOException {
    //ArrayList<String> keys = new ArrayList<String>();
    //ArrayList<Integer> values = new ArrayList<Integer>();
    HashMap<String, Integer> processedDictionary = new HashMap<String, Integer>();
    ArrayList<String> locales = new ArrayList<String>(Arrays.asList(Locale.getISOCountries()));
    ArrayList<String> queries = new ArrayList<String>(Arrays.asList(d_queryString));

    for (String line : allDescriptions) {
        String[] words = line.split(" ");
        for (String word : words)
            if (!queries.contains(word) && (!locales.contains(word))) {
                if (processedDictionary.get(word) == null)
                    processedDictionary.put(word, 1);
                else
                    processedDictionary.put(word, processedDictionary.get(word) + 1);
            }//w w  w .j a v a  2s  . co  m

    }
    //JSONObject obj = new JSONObject();
    //for (String s : processedDictionary.keySet()) {
    //    int val = processedDictionary.get(s);
    //if (!d_ignore[0].toLowerCase().contains(s.toLowerCase()) && (val > 5)) obj.put(s, val);

    //}
    FileWriter file = new FileWriter("dataForCloud.json");

    //file.write(obj.toString(3));

    try {
        file.write("{");
        //quick and dirty fix
        int i = processedDictionary.size();
        //JSONObject obj = new JSONObject();
        for (String s : processedDictionary.keySet()) {
            i--;
            Integer val = processedDictionary.get(s);
            if ((val > 1 || i == 1) && (!search.toLowerCase().contains(s.toLowerCase()))) {
                //file.write("{text: \""+s+"\", weight: "+val.toString()+"}");
                file.write("\"" + s + "\"" + ":" + "\"" + val.toString() + "\"");
                if (i > 1)
                    file.write(",\n");
            }
            //"{text: "+s, " weight: "+val.toString()+"}");
        }
        file.write("}");
        System.out.println("Successfully printed");

    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        file.flush();
        file.close();
    }

}

From source file:org.powertac.common.TariffEvaluatorTest.java

@Test
public void revokeToDefault() {
    TariffSpecification bobTS = new TariffSpecification(bob, PowerType.CONSUMPTION)
            .addRate(new Rate().withValue(-0.4));
    Tariff bobTariff = new Tariff(bobTS);
    initTariff(bobTariff);/*from  w w w. ja v a 2  s .c o m*/
    TariffSpecification jimTS = new TariffSpecification(jim, PowerType.CONSUMPTION)
            .withMinDuration(TimeService.DAY * 5).addRate(new Rate().withValue(-0.4));
    Tariff jimTariff = new Tariff(jimTS);
    initTariff(jimTariff);

    double[] profile = { 1.0, 2.0 };
    cma.capacityProfile = profile;
    cma.setChoiceSamples(0.4, 0.6);

    // distribute all customers across jim & bob
    subscribeTo(bobTariff, 5000);
    subscribeTo(jimTariff, 5000);

    // revoke Jim's tariff - should move everyone to Bob
    jimTariff.setState(Tariff.State.KILLED);
    // capture calls to tariffMarket
    final HashMap<Tariff, Integer> calls = new HashMap<Tariff, Integer>();
    doAnswer(new Answer<Object>() {
        @Override
        public Object answer(InvocationOnMock invocation) {
            Object[] args = invocation.getArguments();
            assertEquals("correct customer", customer, args[1]);
            calls.put((Tariff) args[0], (Integer) args[2]);
            return null;
        }
    }).when(tariffMarket).subscribeToTariff(any(Tariff.class), any(CustomerInfo.class), anyInt());
    ArrayList<Tariff> tariffs = new ArrayList<Tariff>();
    tariffs.add(defaultConsumption);
    tariffs.add(bobTariff);
    when(tariffRepo.findRecentActiveTariffs(anyInt(), any(PowerType.class))).thenReturn(tariffs);

    evaluator.evaluateTariffs();
    assertEquals("two calls", 2, calls.size());
    assertEquals("-5000 for jim", new Integer(-5000), calls.get(jimTariff));
    assertEquals("none for default", null, calls.get(defaultConsumption));
    assertEquals("+5000 for bob", new Integer(5000), calls.get(bobTariff));
}