Example usage for java.util HashMap size

List of usage examples for java.util HashMap size

Introduction

In this page you can find the example usage for java.util HashMap size.

Prototype

int size

To view the source code for java.util HashMap size.

Click Source Link

Document

The number of key-value mappings contained in this map.

Usage

From source file:de.fhg.fokus.odp.middleware.ckan.CKANGatewayUtil.java

/**
 * Returns a list of the most popular tags.
 * //from  ww  w  .ja  v  a  2s .co  m
 * @param numberOfTags
 *            the number of popular tags to return.
 * @return the most popular tags or null if an error occurred.
 */
@SuppressWarnings("unchecked")
public static JSONArray getMostPopularTags(int numberOfTags) {
    // check the parameters
    if (numberOfTags <= 0) {
        return null;
    }

    // the JSON array to return
    JSONArray toReturn = new JSONArray();

    // prepare the REST API call
    String RESTcall = "api/tag_counts";

    try {
        String tagListString = connectorInstance.restCall(RESTcall);

        if (tagListString == null) {
            log.log(Level.SEVERE, "Failed to realize api call \"" + url + RESTcall + "\" !!!");
            return null;
        }

        // parse the JSON string and obtain an array of JSON objects
        Object obj = JSONValue.parse(tagListString);
        JSONArray array = (JSONArray) obj;

        HashMap<String, Long> map = new HashMap<String, Long>();

        // fill unsorted HashMap with all keys and values
        for (Object tag : array) {
            JSONArray tagArray = (JSONArray) tag;
            map.put((String) tagArray.get(0), (Long) tagArray.get(1));
        }

        // call sortHashMapByValues
        HashMap<String, Long> sortedHashMap = sortHashMapByValues(map);

        // calculate number of return array size
        if (sortedHashMap.size() < numberOfTags) {
            numberOfTags = sortedHashMap.size();
        }

        // iterate over n fields and fill toReturn
        if (sortedHashMap.size() >= numberOfTags) {
            List<String> mapKeys = new ArrayList<String>(sortedHashMap.keySet());
            Iterator<String> keyIt = mapKeys.iterator();
            int i = 0;
            while (keyIt.hasNext() && i < numberOfTags) {
                String key = keyIt.next();
                // (key, (Long) sortedHashMap.get(key));
                JSONObject tag = new JSONObject();
                tag.put("count", sortedHashMap.get(key));
                tag.put("tag_name", key);
                toReturn.add(tag);
                i++;
            }
        }
    }
    // catch potential exceptions
    catch (MalformedURLException e) {
        log.log(Level.SEVERE, "Malformed URL \"" + url + RESTcall + "\" !!!");
        return null;
    } catch (IOException e) {
        return null;
    }

    return toReturn;
}

From source file:org.apache.manifoldcf.crawler.connectors.generic.GenericConnector.java

protected static String[] getAcls(Specification spec) {
    HashMap map = new HashMap();
    int i = 0;/*  w w w .j a v  a 2  s . c o  m*/
    while (i < spec.getChildCount()) {
        SpecificationNode sn = spec.getChild(i++);
        if (sn.getType().equals("access")) {
            String token = sn.getAttributeValue("token");
            map.put(token, token);
        }
    }

    String[] rval = new String[map.size()];
    Iterator iter = map.keySet().iterator();
    i = 0;
    while (iter.hasNext()) {
        rval[i++] = (String) iter.next();
    }
    return rval;
}

From source file:gemlite.shell.admin.dao.AdminDao.java

private void checkPr(TreeSet<String> ipSet, HashMap<String, Set<String>> nodeMap,
        HashMap<String, HashMap<Integer, String>> data, StringBuilder sb) {
    StringBuilder tmp = new StringBuilder();
    // ????bucket
    Iterator<String> ipIt = ipSet.iterator();
    while (ipIt.hasNext()) {
        String host = ipIt.next();
        // ?bucket?
        Iterator<String> pNodeIt = nodeMap.get(host).iterator();
        while (pNodeIt.hasNext()) {
            String pnode = pNodeIt.next();
            String phostAndNode = host + pnode;
            // /* w  ww .  j a va 2  s. c  o m*/
            String pKey = primary + phostAndNode;
            HashMap<Integer, String> pMap = data.get(pKey);
            Iterator<String> rNodeIt = nodeMap.get(host).iterator();
            // nodebucket?bucketId
            while (rNodeIt.hasNext()) {
                String rnode = rNodeIt.next();
                String rhostAndNode = host + rnode;
                // 
                String rKey = redundant + rhostAndNode;
                HashMap<Integer, String> rMap = data.get(rKey);
                if (rMap == null || rMap.size() == 0)
                    continue;
                // ?bucketId,?bucket
                Iterator<Integer> pBucketIt = pMap.keySet().iterator();
                while (pBucketIt.hasNext()) {
                    Integer bucketId = pBucketIt.next();
                    if (rMap.keySet().contains(bucketId)) {
                        tmp.append("primary bucket:" + phostAndNode + "-" + bucketId + " exist in redundant:"
                                + rhostAndNode).append("\n");
                    }
                }
            }
        }
    }

    if (tmp.length() > 0) {
        sb.append(tmp.toString());
    } else {
        sb.append("No primary and redundant bucket exist in the same host!");
    }
}

From source file:com.ibm.bi.dml.runtime.controlprogram.parfor.opt.PerfTestTool.java

/**
 * /*from  w w  w  .  jav  a  2  s  . com*/
 * @return
 */
@SuppressWarnings("all")
public static boolean runTest() {
    boolean ret = false;

    try {
        Timing time = new Timing();
        time.start();

        //init caching
        LazyWriteBuffer.init();

        //register all testdefs and instructions
        registerTestConfigurations();
        registerInstructions();

        //execute tests for all confs and all instructions
        executeTest();

        //compute regression models
        int rows = NUM_SAMPLES_PER_TEST;
        int cols = MODEL_MAX_ORDER + (MODEL_INTERCEPT ? 1 : 0);
        HashMap<Integer, Long> tmp = writeResults(PERF_TOOL_DIR);
        computeRegressionModels(DML_SCRIPT_FNAME, DML_TMP_FNAME, PERF_TOOL_DIR, tmp.size(), rows, cols);
        readRegressionModels(PERF_TOOL_DIR, tmp);

        //execConstantRuntimeTest();
        //execConstantMemoryTest();

        //write final profile to XML file
        writeProfile(PERF_TOOL_DIR, PERF_PROFILE_FNAME);
        System.out
                .format("SystemML PERFORMANCE TEST TOOL: finished profiling (in %.2f min), profile written to "
                        + PERF_PROFILE_FNAME + "%n", time.stop() / 60000);

        ret = true;
    } catch (Exception ex) {
        LOG.error("Failed to run performance test.", ex);
    }

    return ret;
}

From source file:edu.cornell.mannlib.vitro.webapp.controller.grefine.GrefinePropertyListServlet.java

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    super.doGet(req, resp);
    resp.setContentType("application/json");
    VitroRequest vreq = new VitroRequest(req);

    try {//from  w  w w .j  av a  2 s . c  om

        String callbackStr = (vreq.getParameter("callback") == null) ? "" : vreq.getParameter("callback");
        ServletOutputStream out = resp.getOutputStream();

        VClassDao vcDao = vreq.getUnfilteredWebappDaoFactory().getVClassDao();
        DataPropertyDao dao = vreq.getUnfilteredWebappDaoFactory().getDataPropertyDao();
        String topUri = vreq.getParameter("type");
        VClass topClass = vcDao.getVClassByURI(topUri);
        HashSet<String> propURIs = new HashSet<String>();
        HashMap<VClass, List<DataProperty>> classPropertiesMap = populateClassPropertiesMap(vcDao, dao, topUri,
                propURIs);

        // Construct json String
        JSONObject completeJson = new JSONObject();
        JSONArray propertiesJsonArr = new JSONArray();
        if (classPropertiesMap.size() > 0) {
            for (Iterator<VClass> iter = classPropertiesMap.keySet().iterator(); iter.hasNext();) { // add results to schema
                VClass vc = (VClass) iter.next();
                //System.out.println("vc uri: " + vc.getURI());
                //System.out.println("vc name: " + vc.getName());   

                ArrayList<DataProperty> vcProps = (ArrayList<DataProperty>) classPropertiesMap.get(vc);
                for (DataProperty prop : vcProps) {
                    String nameStr = prop.getPublicName() == null
                            ? prop.getName() == null ? null : prop.getName()
                            : prop.getPublicName();
                    //System.out.println("--- uri: " + prop.getURI());
                    //System.out.println("--- name: " + nameStr);
                    // top level
                    JSONObject propertiesItemJson = new JSONObject();
                    JSONObject rootSchemaJson = new JSONObject();
                    rootSchemaJson.put("id", vc.getURI());
                    rootSchemaJson.put("name", vc.getName());
                    rootSchemaJson.put("alias", new JSONArray());
                    propertiesItemJson.put("schema", rootSchemaJson);
                    // second level
                    propertiesItemJson.put("id", prop.getURI());
                    propertiesItemJson.put("name", nameStr);
                    propertiesItemJson.put("alias", new JSONArray());

                    JSONObject expectsJson = new JSONObject();
                    expectsJson.put("id", prop.getURI());
                    expectsJson.put("name", nameStr);
                    expectsJson.put("alias", new JSONArray());
                    propertiesItemJson.put("expects", expectsJson);

                    propertiesJsonArr.put(propertiesItemJson);
                }
            }
        }

        // get data properties from subclasses
        List<VClass> lvl2Classes = new ArrayList<VClass>();
        List roots = null;
        String requestType = vreq.getParameter("type");
        if (requestType != null) {
            roots = new LinkedList<VClass>();
            roots.add(vcDao.getVClassByURI(requestType));
        }

        if (roots != null) {
            String ontologyUri = null;
            Collections.sort(roots);
            Iterator rootIt = roots.iterator();
            if (rootIt.hasNext()) {
                while (rootIt.hasNext()) {
                    VClass root = (VClass) rootIt.next();
                    if (root != null) {
                        List<VClass> lvl2ChildClasses = new ArrayList<VClass>();
                        addChildren(vcDao, vreq.getUnfilteredWebappDaoFactory(), root, lvl2ChildClasses, 0,
                                ontologyUri);
                        lvl2Classes.addAll(lvl2ChildClasses);
                    }
                }
            }
        }

        for (VClass lvl2Class : lvl2Classes) {
            HashMap<VClass, List<DataProperty>> lvl2ClassPropertiesMap = populateClassPropertiesMap(vcDao, dao,
                    lvl2Class.getURI(), propURIs);
            if (lvl2ClassPropertiesMap.size() > 0) {
                for (Iterator<VClass> iter = lvl2ClassPropertiesMap.keySet().iterator(); iter.hasNext();) { // add results to schema
                    VClass vc = (VClass) iter.next();
                    ArrayList<DataProperty> vcProps = (ArrayList<DataProperty>) lvl2ClassPropertiesMap.get(vc);
                    for (DataProperty prop : vcProps) {
                        String nameStr = prop.getPublicName() == null
                                ? prop.getName() == null ? null : prop.getName()
                                : prop.getPublicName();
                        // top level
                        JSONObject propertiesItemJson = new JSONObject();

                        JSONObject rootSchemaJson = new JSONObject();
                        rootSchemaJson.put("id", topClass.getURI());
                        rootSchemaJson.put("name", topClass.getName());
                        rootSchemaJson.put("alias", new JSONArray());
                        propertiesItemJson.put("schema", rootSchemaJson);

                        // second level
                        propertiesItemJson.put("id", vc.getURI());
                        propertiesItemJson.put("name", vc.getName());
                        propertiesItemJson.put("alias", new JSONArray());

                        propertiesItemJson.put("id2", prop.getURI());
                        propertiesItemJson.put("name2", nameStr);
                        propertiesItemJson.put("alias2", new JSONArray());

                        JSONObject expectsJson = new JSONObject();
                        expectsJson.put("id", prop.getURI());
                        expectsJson.put("name", nameStr);
                        expectsJson.put("alias", new JSONArray());
                        propertiesItemJson.put("expects", expectsJson);

                        propertiesJsonArr.put(propertiesItemJson);
                    }
                }

            }
        }

        completeJson.put("properties", propertiesJsonArr);
        out.print(callbackStr + "(" + completeJson.toString() + ")");

    } catch (Exception ex) {
        log.warn(ex, ex);
    }
}

From source file:com.compomics.util.experiment.identification.psm_scoring.psm_scores.HyperScore.java

/**
 * Returns the interpolation values for the given score histogram in the
 * form {a, b}./*from  w w w  .  ja  va  2  s. co m*/
 *
 * @param scoreHistogram the score histogram
 * @param useCache if true the interpolation values will be stored in the
 * histograms in cache
 *
 * @return the interpolation values for the given score histogram
 */
public double[] getInterpolationValues(HashMap<Integer, Integer> scoreHistogram, boolean useCache) {

    ArrayList<Integer> bins = new ArrayList<Integer>(scoreHistogram.keySet());
    Collections.sort(bins, Collections.reverseOrder());
    ArrayList<Double> evalueFunctionX = new ArrayList<Double>(scoreHistogram.size());
    ArrayList<Double> evalueFunctionY = new ArrayList<Double>(scoreHistogram.size());
    Integer currentSum = 0;
    for (Integer bin : bins) {
        Integer nInBin = scoreHistogram.get(bin);
        if (nInBin != null) {
            currentSum += nInBin;
        }
        if (currentSum > 0) {
            Double xValue = new Double(bin);
            xValue = FastMath.log10(xValue);
            evalueFunctionX.add(xValue);
            Double yValue = new Double(currentSum);
            yValue = FastMath.log10(yValue);
            evalueFunctionY.add(yValue);
        }
    }
    if (evalueFunctionX.size() <= 1) {
        return null;
    }
    RegressionStatistics regressionStatistics = LinearRegression.getSimpleLinearRegression(evalueFunctionX,
            evalueFunctionY);
    if (useCache) {
        Double roundedA = Util.roundDouble(regressionStatistics.a, 2);
        Double roundedB = Util.roundDouble(regressionStatistics.b, 2);
        Integer nA = as.get(roundedA);
        if (nA == null) {
            as.put(roundedA, 1);
        } else {
            as.put(roundedA, nA + 1);
        }
        Integer nB = bs.get(roundedB);
        if (nB == null) {
            bs.put(roundedB, 1);
        } else {
            bs.put(roundedB, nB + 1);
        }
    }
    return new double[] { regressionStatistics.a, regressionStatistics.b };
}

From source file:com.ichi2.anki.tests.ContentProviderTest.java

/**
 * Initially create one note for each model.
 *///from   w w  w.j  av  a  2  s .  c  om
@Override
protected void setUp() throws Exception {
    super.setUp();
    Log.i(AnkiDroidApp.TAG, "setUp()");
    mCreatedNotes = new ArrayList<>();
    final Collection col = CollectionHelper.getInstance().getCol(getContext());
    // Add a new basic model that we use for testing purposes (existing models could potentially be corrupted)
    JSONObject model = Models.addBasicModel(col, BASIC_MODEL_NAME);
    mModelId = model.getLong("id");
    ArrayList<String> flds = col.getModels().fieldNames(model);
    // Use the names of the fields as test values for the notes which will be added
    mDummyFields = flds.toArray(new String[flds.size()]);
    // create test decks and add one note for every deck
    final AddContentApi api = new AddContentApi(getContext());
    HashMap<Long, String> deckList = api.getDeckList();
    mNumDecksBeforeTest = deckList.size();
    // TODO: add the notes directly with libanki
    for (int i = 0; i < TEST_DECKS.length; i++) {
        mTestDeckIds[i] = api.addNewDeck(TEST_DECKS[i]);
        Uri newNoteUri = api.addNewNote(mModelId, mTestDeckIds[i], mDummyFields, TEST_TAG);
        assertNotNull(newNoteUri);
        mCreatedNotes.add(newNoteUri);
        // Check that the flds data was set correctly
        long nid = Long.parseLong(newNoteUri.getLastPathSegment());
        Note addedNote = col.getNote(nid);
        assertTrue("Check that the flds data was set correctly",
                Arrays.equals(addedNote.getFields(), mDummyFields));
        assertTrue("Check that there was at least one card generated", addedNote.cards().size() > 0);
    }
    // Add a note to the default deck as well so that testQueryNextCard() works
    Uri newNoteUri = api.addNewNote(mModelId, 1, mDummyFields, TEST_TAG);
    assertNotNull(newNoteUri);
    mCreatedNotes.add(newNoteUri);
}

From source file:com.livgrhm.kansas.resources.AuthResource.java

private void addToAuthList(String email, String hash, java.sql.Date now, String ip) {
    // clear any hashmap entries for this userId
    HashMap authMapInst = this.authMap.getAuthMap();
    Iterator i = authMapInst.entrySet().iterator();
    while (i.hasNext()) {
        Map.Entry item = (Map.Entry) i.next();
        AuthItem ai = (AuthItem) item.getValue();
        if (ai.email.equals(email)) {
            i.remove();//from   w  w  w.  j ava 2 s .  com
        }
    }
    // now add the new key entry
    AuthItem ai = new AuthItem();
    ai.email = email;
    ai.loginDate = now;
    ai.ipAddress = ip;
    authMapInst.put(hash, ai);

    System.out.println("PUT IN AUTHMAP HASH: " + hash);
    System.out.println("AUTHMAP SIZE: " + authMapInst.size());

    this.authMap.setAuthMap(authMapInst);
}

From source file:com.clustercontrol.notify.util.NotifyRelationCache.java

public static void refresh() {
    JpaTransactionManager jtm = new JpaTransactionManager();
    if (!jtm.isNestedEm()) {
        m_log.warn("refresh() : transactioin has not been begined.");
        jtm.close();/* w  ww. j a va  2s.  com*/
        return;
    }

    try {
        _lock.writeLock();

        long start = HinemosTime.currentTimeMillis();
        new JpaTransactionManager().getEntityManager().clear();
        HashMap<String, List<NotifyRelationInfo>> notifyMap = new HashMap<String, List<NotifyRelationInfo>>();
        List<NotifyRelationInfo> nriList = null;
        try {
            nriList = QueryUtil.getAllNotifyRelationInfoWithoutJob();
        } catch (Exception e) {
            m_log.warn("refresh() : " + e.getClass().getSimpleName() + ", " + e.getMessage(), e);
            return;
        }
        for (NotifyRelationInfo nri : nriList) {
            String notifyGroupId = nri.getId().getNotifyGroupId();
            // ???????????
            if (onCache(notifyGroupId)) {
                List<NotifyRelationInfo> notifyList = notifyMap.get(notifyGroupId);
                if (notifyList == null) {
                    notifyList = new ArrayList<NotifyRelationInfo>();
                    notifyList.add(nri);
                    notifyMap.put(notifyGroupId, notifyList);
                } else {
                    notifyList.add(nri);
                }
            }
        }
        for (List<NotifyRelationInfo> notifyList : notifyMap.values()) {
            if (notifyList == null) {
                continue;
            }
            Collections.sort(notifyList);
        }
        storeCache(notifyMap);
        m_log.info("refresh NotifyRelationCache. " + (HinemosTime.currentTimeMillis() - start) + "ms. size="
                + notifyMap.size());
    } finally {
        _lock.writeUnlock();
    }
}

From source file:freenet.client.async.ContainerInserter.java

private Metadata makeManifest(HashMap<String, Object> manifestElements, String archivePrefix) {
    SimpleManifestComposer smc = new Metadata.SimpleManifestComposer();
    for (Map.Entry<String, Object> me : manifestElements.entrySet()) {
        String name = me.getKey();
        Object o = me.getValue();
        if (o instanceof HashMap) {
            @SuppressWarnings("unchecked")
            HashMap<String, Object> hm = (HashMap<String, Object>) o;
            HashMap<String, Object> subMap = new HashMap<String, Object>();
            //System.out.println("Decompose: "+name+" (SubDir)");
            smc.addItem(name, makeManifest(hm, archivePrefix + name + '/'));
            if (logDEBUG)
                Logger.debug(this,
                        "Sub map for " + name + " : " + subMap.size() + " elements from " + hm.size());
        } else if (o instanceof Metadata) {
            //already Metadata, take it as is
            //System.out.println("Decompose: "+name+" (Metadata)");
            smc.addItem(name, (Metadata) o);
        } else {/*  w  ww.  j  a v a 2  s .  co m*/
            ManifestElement element = (ManifestElement) o;
            String mimeType = element.getMimeType();
            ClientMetadata cm;
            if (mimeType == null || mimeType.equals(DefaultMIMETypes.DEFAULT_MIME_TYPE))
                cm = null;
            else
                cm = new ClientMetadata(mimeType);
            Metadata m;
            if (element.targetURI != null) {
                //System.out.println("Decompose: "+name+" (ManifestElement, Redirect)");
                m = new Metadata(DocumentType.SIMPLE_REDIRECT, null, null, element.targetURI, cm);
            } else {
                //System.out.println("Decompose: "+name+" (ManifestElement, Data)");
                containerItems.add(new ContainerElement(element.getData(), archivePrefix + name));
                m = new Metadata(DocumentType.ARCHIVE_INTERNAL_REDIRECT, null, null,
                        archivePrefix + element.fullName, cm);
            }
            smc.addItem(name, m);
        }
    }
    return smc.getMetadata();
}