Example usage for java.util HashMap size

List of usage examples for java.util HashMap size

Introduction

In this page you can find the example usage for java.util HashMap size.

Prototype

int size

To view the source code for java.util HashMap size.

Click Source Link

Document

The number of key-value mappings contained in this map.

Usage

From source file:de.hbz.lobid.helper.CompareJsonMaps.java

public boolean writeFileAndTestJson(final JsonNode actual, final JsonNode expected) {
    // generated data to map
    final HashMap<String, String> actualMap = new HashMap<>();
    extractFlatMapFromJsonNode(actual, actualMap);
    // expected data to map
    final HashMap<String, String> expectedMap = new HashMap<>();
    extractFlatMapFromJsonNode(expected, expectedMap);
    CompareJsonMaps.logger.debug("\n##### remove good entries ###");
    Iterator<String> it = actualMap.keySet().iterator();
    removeContext(it);/*ww w.ja  v a2  s  .c  om*/
    it = expectedMap.keySet().iterator();
    removeContext(it);
    for (final Entry<String, String> e : expectedMap.entrySet()) {
        CompareJsonMaps.logger.debug("Trying to remove " + e.getKey() + "...");
        if (!actualMap.containsKey(e.getKey())) {
            CompareJsonMaps.logger.warn("At least this element is missing in actual: " + e.getKey());
            return false;
        }
        if (e.getKey().endsWith("Order]")) {
            handleOrderedValues(actualMap, e);
        } else {
            handleUnorderedValues(actualMap, e);
        }
    }
    if (!actualMap.isEmpty()) {
        CompareJsonMaps.logger.warn("Fail - no Equality! These keys/values were NOT expected:");
        actualMap.forEach((key, val) -> CompareJsonMaps.logger.warn("KEY=" + key + " VALUE=" + val));
    } else
        CompareJsonMaps.logger.info("Succeeded - resources are equal");
    return actualMap.size() == 0;
}

From source file:com.wantscart.jade.provider.jdbc.JdbcDataAccess.java

private int[] batchUpdate2(String sql, Modifier modifier, List<Map<String, Object>> parametersList) {
    if (parametersList.size() == 0) {
        return new int[0];
    }/*from   w  w w.ja  v a2  s. c  o m*/
    // sql --> args[]
    HashMap<String, List<Object[]>> batches = new HashMap<String, List<Object[]>>();
    // sql --> named args
    HashMap<String, List<Map<String, Object>>> batches2 = new HashMap<String, List<Map<String, Object>>>();
    // sql --> [2,3,6,9] positions of parametersList
    Map<String, List<Integer>> positions = new HashMap<String, List<Integer>>();

    //TODO fix shardby null bug
    SQLThreadLocal.set(SQLType.WRITE, sql, modifier, parametersList);
    for (int i = 0; i < parametersList.size(); i++) {
        SQLInterpreterResult ir = interpret(sql, modifier, parametersList.get(i));
        List<Object[]> args = batches.get(ir.getSQL());
        List<Integer> position = positions.get(ir.getSQL());
        List<Map<String, Object>> maplist = batches2.get(ir.getSQL());
        if (args == null) {
            args = new LinkedList<Object[]>();
            batches.put(ir.getSQL(), args);
            position = new LinkedList<Integer>();
            positions.put(ir.getSQL(), position);
            maplist = new LinkedList<Map<String, Object>>();
            batches2.put(ir.getSQL(), maplist);
        }
        position.add(i);
        args.add(ir.getParameters());
        maplist.add(parametersList.get(i));
    }
    if (batches.size() == 1) {
        SQLThreadLocal.set(SQLType.WRITE, sql, modifier, parametersList);
        int[] updated = jdbc.batchUpdate(modifier, batches.keySet().iterator().next(),
                batches.values().iterator().next());
        SQLThreadLocal.remove();
        return updated;
    }
    int[] batchUpdated = new int[parametersList.size()];
    for (Map.Entry<String, List<Object[]>> batch : batches.entrySet()) {
        String batchSQL = batch.getKey();
        List<Object[]> values = batch.getValue();
        List<Map<String, Object>> map = batches2.get(batchSQL);
        SQLThreadLocal.set(SQLType.WRITE, sql, modifier, map);
        int[] updated = jdbc.batchUpdate(modifier, batchSQL, values);
        SQLThreadLocal.remove();
        List<Integer> position = positions.get(batchSQL);
        int i = 0;
        for (Integer p : position) {
            batchUpdated[p] = updated[i++];
        }
    }
    return batchUpdated;

}

From source file:au.org.theark.core.dao.DataExtractionDao.java

public File createBiospecimenDataCustomCSV(Search search, DataExtractionVO devo, List<CustomFieldDisplay> cfds,
        FieldCategory fieldCategory) {/*from w  w w .  j  a  v a2  s  .c  o m*/
    HashMap<String, ExtractionVO> hashOfBiospecimenCustomData = devo.getBiospecimenCustomData();
    log.info(" writing out biospecimenCustomData " + hashOfBiospecimenCustomData.size()
            + " entries for category '" + fieldCategory + "'");

    final String tempDir = System.getProperty("java.io.tmpdir");
    String filename = new String("BIOSPECIMENCUSTOMDATA.csv");
    final java.io.File file = new File(tempDir, filename);
    if (filename == null || filename.isEmpty()) {
        filename = "exportBiospecimenCustomcsv.csv";
    }
    OutputStream outputStream;
    try {
        outputStream = new FileOutputStream(file);
        CsvWriter csv = new CsvWriter(outputStream);

        csv.write("SUBJECTUID");

        // Header

        for (String key : hashOfBiospecimenCustomData.keySet()) {
            HashMap<String, String> keyValues = hashOfBiospecimenCustomData.get(key).getKeyValues();
            for (String key2 : keyValues.keySet()) {
                csv.write(key2);
            }
            break;
        }
        csv.endLine();

        for (String subjectUID : hashOfBiospecimenCustomData.keySet()) {
            HashMap<String, String> keyValues = hashOfBiospecimenCustomData.get(subjectUID).getKeyValues();
            for (String key : keyValues.keySet()) {
                csv.write(keyValues.get(keyValues.get(key)));
            }
            csv.endLine();
        }
        csv.close();
    } catch (FileNotFoundException e) {
        log.error(e.getMessage());
    }

    return file;
}

From source file:edu.umich.its.lti.google.GoogleLtiServlet.java

private void insertRosterPermissions(HttpServletRequest request, HttpServletResponse response,
        TcSessionData tcSessionData) throws ServletException, IOException {
    M_log.info("In the insertRosterPermissions call for siteId:  " + tcSessionData.getContextId() + " UserId: "
            + tcSessionData.getUserId());
    HashMap<String, HashMap<String, String>> roster = getRoster(request, tcSessionData);
    M_log.debug("Roster Size: " + roster.size());
    //not running permission call for single person since that person already has permission to the folder
    if (roster.size() != 1) {
        insertPermissions(request, response, tcSessionData, roster);
    } else {//from   w  ww.ja va 2s .c  o  m
        response.setStatus(HttpServletResponse.SC_OK);
        response.getWriter().print(resource.getString("gd.insert.permission.warn.prompt"));
    }
    // Title set in request by insertPermissions: get and clear it
    request.removeAttribute(FOLDER_TITLE);
}

From source file:com.ca.dvs.utilities.lisamar.JDBCUtil.java

/**
 * Insert the seed data into the linked table
 * //from   ww w. j  ava  2 s  .  com
 * @param eType          - entity type
 * @param refEntityMap      - map of the referenced entity type
 * @param refEntitySetMap   - map of the referenced entity set  
 * @param refKeyMap         - map of the referenced key
 * @param cmdList         - command list
 */
private void insertDataIntoLinkTable(final EntityType eType, HashMap<String, EntityType> refEntityMap,
        HashMap<String, String> refEntitySetMap, HashMap<String, String> refKeyMap, List<String> cmdList) {

    if (sampleData.size() == 0)
        return;

    if (eType.isCustomType())
        return;

    if (refEntitySetMap.size() < 2)
        return;

    String refEntitySetName1 = "";
    String refEntitySetName2 = "";
    for (Property p : eType.getProperties()) {
        String refEntitySetName = refEntitySetMap.get(p.getName());
        if (refEntitySetName1.isEmpty())
            refEntitySetName1 = refEntitySetName;
        else if (false == refEntitySetName.equalsIgnoreCase(refEntitySetName1))
            refEntitySetName2 = refEntitySetName;
    }
    if (refEntitySetName1.isEmpty() || refEntitySetName2.isEmpty())
        return;

    // find sample data set of refEntitySet
    List<Map<String, Object>> samples = null;
    samples = getSampleObjects(refEntitySetName1);
    if (samples == null || samples.size() == 0) {
        samples = getSampleObjects(refEntitySetName2);
        String refEntitySetName = refEntitySetName1;
        refEntitySetName1 = refEntitySetName2;
        refEntitySetName2 = refEntitySetName;
    }
    if (samples == null || samples.size() == 0)
        return;

    String sqlInsert = "INSERT INTO " + eType.getDBTableName();

    for (Map<String, Object> map1 : samples) {

        List<Map<String, Object>> subSamples = getSampleObjects(refEntitySetName2, map1);
        if (subSamples == null)
            continue;

        String strColumns = "";
        String strValues = "";

        for (Property p1 : eType.getProperties()) {

            if (p1.getName().isEmpty())
                continue;

            Object dataObj = null;
            if (refEntitySetMap.get(p1.getName()).equalsIgnoreCase(refEntitySetName1)) {

                dataObj = map1.get(refKeyMap.get(p1.getName()));
                if (dataObj == null)
                    continue;

                dataObj = isMatchedDataType(p1.getType(), dataObj);
                if (dataObj == null)
                    continue;

                if (strColumns.isEmpty())
                    strColumns += "( ";
                else
                    strColumns += ",";

                if (strValues.isEmpty())
                    strValues += " VALUES( ";
                else
                    strValues += ",";

                strColumns += p1.getDBColumnName();
                if (dataObj instanceof String)
                    strValues += (String) dataObj;
                else
                    strValues += dataObj.toString();
            }

        }
        if (strColumns.isEmpty() || strValues.isEmpty())
            break;

        for (Map<String, Object> map2 : subSamples) {

            String strColumns2 = "";
            String strValues2 = "";
            for (Property p2 : eType.getProperties()) {

                if (p2.getName().isEmpty())
                    continue;

                if (refEntitySetMap.get(p2.getName()).equalsIgnoreCase(refEntitySetName1))
                    continue;

                Object dataObj2 = map2.get(refKeyMap.get(p2.getName()));
                if (dataObj2 == null)
                    continue;

                dataObj2 = isMatchedDataType(p2.getType(), dataObj2);
                if (dataObj2 == null)
                    continue;

                if (false == strColumns2.isEmpty())
                    strColumns += ",";

                if (false == strValues2.isEmpty())
                    strValues2 += ",";

                strColumns2 += p2.getDBColumnName();
                if (dataObj2 instanceof String)
                    strValues2 += (String) dataObj2;
                else
                    strValues2 += dataObj2.toString();
            }

            if (strColumns2.isEmpty() || strValues2.isEmpty())
                break;

            String totalColumns = strColumns + "," + strColumns2 + ")";
            String totalValuse = strValues + "," + strValues2 + ");";
            String sql = sqlInsert + totalColumns + " " + totalValuse;
            cmdList.add(sql);
            System.out.println(sql);
        }
    }
}

From source file:gda.data.metadata.NXMetaDataProvider.java

public boolean isToBeTraversed(INexusTree tree) {
    int nNodes = tree.getNumberOfChildNodes();
    boolean out = (nNodes > 0);
    HashMap<String, Serializable> attributes = tree.getAttributes();
    if (attributes != null && attributes.size() == nNodes) {

        Serializable units = attributes.get("units");
        Serializable format = attributes.get("format");
        Serializable field_t = attributes.get("field_type");
        Serializable metadata_t = attributes.get("metadata_type");

        int nodesToBeTraversed = nNodes;
        if (units != null) {
            nodesToBeTraversed -= 1;/*w  w w . ja v  a2 s  . c  o m*/
        }
        if (format != null) {
            nodesToBeTraversed -= 1;
        }
        if (field_t != null) {
            nodesToBeTraversed -= 1;
        }
        if (metadata_t != null) {
            nodesToBeTraversed -= 1;
        }
        out = (nodesToBeTraversed > 0);
    }
    return out;
}

From source file:gda.data.metadata.NXMetaDataProvider.java

public boolean isToBeHarvested(INexusTree tree) {
    int nNodes = tree.getNumberOfChildNodes();
    boolean out = (nNodes > 0);
    HashMap<String, Serializable> attributes = tree.getAttributes();
    if (attributes != null && attributes.size() == nNodes) {

        Serializable units = attributes.get("units");
        Serializable format = attributes.get("format");
        Serializable field_t = attributes.get("field_type");
        Serializable metadata_t = attributes.get("metadata_type");

        int nodesRemaining = nNodes;
        if (units != null) {
            nodesRemaining -= 1;/*  w w w  . ja  va 2  s  . co m*/
        }
        if (format != null) {
            nodesRemaining -= 1;
        }
        if (field_t != null) {
            nodesRemaining -= 1;
        }
        if (metadata_t != null) {
            nodesRemaining -= 1;
        }
        out = (nodesRemaining == 0);
    }
    return out;
}

From source file:com.netflix.simianarmy.aws.janitor.crawler.edda.EddaInstanceJanitorCrawler.java

private void refreshOwnerByImage(String region, List<Resource> resources) {
    HashSet<String> imageIds = new HashSet<String>();
    for (Resource resource : resources) {
        if (resource.getOwnerEmail() == null) {
            imageIds.add(resource.getAdditionalField("imageId"));
        }//  www.j ava  2 s.  c  o  m
    }
    if (imageIds.size() > 0) {
        HashMap<String, String> imageToOwner = new HashMap<String, String>();
        String url = eddaClient.getBaseUrl(region) + "/aws/images/";
        url += StringUtils.join(imageIds, ',');
        url += ";tags.key=owner;public=false;_expand:(imageId,tags:(owner))";
        JsonNode imageJsonNode = null;
        try {
            imageJsonNode = eddaClient.getJsonNodeFromUrl(url);
        } catch (Exception e) {
            LOGGER.error(String.format("Failed to get Json node from edda for AMIs in region %s.", region), e);
        }
        if (imageJsonNode == null) {
            return;
        }
        for (Iterator<JsonNode> it = imageJsonNode.getElements(); it.hasNext();) {
            JsonNode image = it.next();
            String imageId = image.get("imageId").getTextValue();
            JsonNode tags = image.get("tags");
            for (Iterator<JsonNode> tagIt = tags.getElements(); tagIt.hasNext();) {
                JsonNode tag = tagIt.next();
                if (tag.get("owner") != null) {
                    imageToOwner.put(imageId, tag.get("owner").getTextValue());
                    break;
                }
            }
        }
        if (imageToOwner.size() > 0) {
            for (Resource resource : resources) {
                if (resource.getOwnerEmail() == null
                        && imageToOwner.get(resource.getAdditionalField("imageId")) != null) {
                    resource.setOwnerEmail(imageToOwner.get(resource.getAdditionalField("imageId")));
                    LOGGER.info(
                            String.format("Found owner %s for instance %s in AMI %s", resource.getOwnerEmail(),
                                    resource.getId(), resource.getAdditionalField("imageId")));
                }
            }
        }
    }
}

From source file:edu.umich.its.lti.google.GoogleLtiServlet.java

/**
 * Removes permissions to the given folder to people in the roster.
 * Permissions for owners of the folder are not
 * touched.//from w  w  w  .  j  a v  a2s.  c o  m
 */
private void removePermissions(HttpServletRequest request, HttpServletResponse response,
        TcSessionData tcSessionData) throws Exception {
    M_log.info("In the Removal of permission call... For Site Id: " + tcSessionData.getContextId() + " UserId: "
            + tcSessionData.getContextId());
    try {
        if (!validatePermissionsRequiredParams(request, response, tcSessionData)) {
            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
            response.getWriter().print(resource.getString("gd.permission.error.six"));
            return;
        }
        FolderPermissionsHandler handler = getHandler(request, response, tcSessionData);
        if (handler == null) {
            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
            response.getWriter().print(resource.getString("gd.permission.error.six"));
            String content = "Error: unable to remove Google Folder permissions, as the folder was not retrieved from Google Drive for Instructor email address: \"";
            helperLogMessages(tcSessionData, content, null);
            return;
        }
        File file = handler.getFile();
        if (file == null) {
            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
            response.getWriter().print(resource.getString("gd.permission.error.six"));
            String content = "Error: unable to remove Google Folder permissions, as the folder was not retrieved from Google Drive for Instructor email address: \"";
            helperLogMessages(tcSessionData, content, null);
            return; // Quick return to simplify code
        }

        HashMap<String, HashMap<String, String>> roster = getRoster(request, tcSessionData);
        M_log.debug("Roster Size: " + roster.size());
        //not running permission call for single person since the owner permission can't be removed
        if (roster.size() != 1) {
            new RemovePermissionHandler(handler, roster, tcSessionData).start();
            M_log.info(
                    "In the Removal of permission call after starting a new thread and giving usefull message to user. SiteId: "
                            + tcSessionData.getContextId() + " UserId: " + tcSessionData.getUserId());
        }
        response.setStatus(HttpServletResponse.SC_OK);
        response.getWriter().print(resource.getString("gd.removal.permission.warn.prompt"));
    } catch (Exception err) {
        response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
        response.getWriter().print(resource.getString("gd.permission.error.six"));
        String content = "Removal of google permissions Failed for the class roster to shared folder of Instructor with email address: \"";
        helperLogMessages(tcSessionData, content, err);
    }
}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

/**
 * orf->[total-activity,fraction-by-coverage]
 * @param equi//from ww  w  . j  a v  a2s . com
 * @param uniqueRegions
 * @param acti
 * @return
 */
private HashMap<OrfWithCodons, double[]> estimateByCoverage(
        HashMap<HashSet<OrfWithCodons>, HashSet<Codon>> equi,
        HashMap<HashSet<OrfWithCodons>, Integer> equiLengths, ToDoubleFunction<Codon> acti) {

    HashMap<HashSet<OrfWithCodons>, Double> vv = new HashMap<HashSet<OrfWithCodons>, Double>();
    double[] v = new double[equi.size()];
    OrfWithCodons[][] E = new OrfWithCodons[equi.size()][];
    HashSet<Codon>[] codons = new HashSet[E.length];
    int ind = 0;
    for (HashSet<OrfWithCodons> e : equi.keySet()) {
        codons[ind] = equi.get(e);
        E[ind] = e.toArray(new OrfWithCodons[0]);
        for (Codon c : equi.get(e))
            v[ind] += acti.applyAsDouble(c);
        vv.put(e, v[ind]);
        v[ind] /= equiLengths.get(e);
        ind++;
    }
    HashMap<OrfWithCodons, double[]> re = new HashMap<OrfWithCodons, double[]>();
    new EquivalenceClassMinimizeFactors<OrfWithCodons>(E, v)
            .compute((orf, pi) -> re.put(orf, new double[] { 0, pi }));

    for (HashSet<OrfWithCodons> e : equi.keySet()) {
        double sum = EI.wrap(e).mapToDouble(i -> re.get(i)[1]).sum();
        for (OrfWithCodons i : e) {
            double[] r = re.get(i);
            r[0] += vv.get(e) * r[1] / sum;
        }
    }

    return re;
}