Example usage for java.util HashMap size

List of usage examples for java.util HashMap size

Introduction

In this page you can find the example usage for java.util HashMap size.

Prototype

int size

To view the source code for java.util HashMap size.

Click Source Link

Document

The number of key-value mappings contained in this map.

Usage

From source file:it.baywaylabs.jumpersumo.utility.Finder.java

public Integer[] checkConsecutive(String[] key, HashMap<String, int[]> map) {
    Set<Integer> check = new HashSet<Integer>();

    boolean consecutivi = false;

    if (key.length > 0 && map.size() > 0) {
        /*/*from   w  ww . j  a  v  a  2 s .  c  o m*/
        for (int i=0; i<key.length; i++)
        {
        for (int j=0; j<map.get(key[i]).length; j++ )
        {
            if (i+1 < key.length)
            {
                if ( map.get(key[i])[j] + key[i].length() +1 == map.get(key[i+1])[j] )
                {
                    // check.add(map.get(key[0])[map.get(key[0]).length-1]);
                    check.add(map.get(key[0])[j]);
                }
            }
        }
        }
        */
        for (int i = 0; i < key.length; i++) {
            int index = -1;
            if (i + 1 < key.length && map.get(key[i + 1]) != null) {
                index = map.get(key[i + 1]).length - 1;
            }
            for (int j = map.get(key[i]).length - 1; j >= 0; j--) {
                if (i + 1 < key.length && index >= 0) {
                    if (map.get(key[i])[j] + key[i].length() + 1 == map.get(key[i + 1])[index]) {
                        // check.add(map.get(key[0])[map.get(key[0]).length-1]);
                        check.add(map.get(key[0])[j]);
                        index--;
                    }
                }
            }
        }
    }

    if (!check.isEmpty()) {
        return check.toArray(new Integer[check.size()]);
        // return ArrayUtils.toPrimitive((Integer[]) check.toArray());
    }

    return null;
}

From source file:com.norbl.cbp.ppe.Ec2Wrangler.java

/** Note that spot price instances are <i>not</i> available for
 *  cluster instances.<p>/* w w  w.  j a v a 2s .  co m*/
 *
 *  This method blocks until the spot order has been filled.
 *  This is necessary so that we can tag the instances.  Note
 *  when this method returns the instances may not be fully booted.
 *        
  * @param instanceType
  * @param imageID
  * @param availabilityZone
  * @param nInstances
  * @param keyName
  * @param securityGroupName
  * @param networkName
  * @param userData
  * @param spotPrice
  * @return
  * @throws MissingParamsException
  * @throws ImproperParamException 
  */
public String launchSpotInstances(InstanceType instanceType, String imageID, String availabilityZone,
        int nInstances, String keyName, String securityGroupName, String networkName, String userData,
        double spotPrice) throws MissingParamsException, ImproperParamException {

    LaunchSpecification spec = new LaunchSpecification();
    spec.setInstanceType(instanceType);
    spec.setImageId(imageID);
    if (Ec2Location.isValidAvailablityZone(ec2Client, availabilityZone))
        setAvailabilityZone(spec, availabilityZone);
    // else any zone will do, so don't set it.
    spec.setKeyName(keyName);
    List<String> sgs = new ArrayList<String>();
    sgs.add(securityGroupName);
    spec.setSecurityGroups(sgs);
    spec.setUserData(userData);
    if (isHVM(imageID))
        setupClusterPlacementGroup(spec);
    RequestSpotInstancesRequest reqSpot = new RequestSpotInstancesRequest();
    reqSpot.setInstanceCount(new Integer(nInstances));
    reqSpot.setSpotPrice(StringUtil.f2(spotPrice));

    reqSpot.setLaunchSpecification(spec);

    String networkID = NiM.createNetworkID();
    NetworkInfo ni = new NetworkInfo(networkID, networkName);
    NiM.register(ni);
    ni.setState(NetworkInfo.State.spotRequestPending);

    RequestSpotInstancesResult rr = ec2Client.requestSpotInstances(reqSpot);

    // In order to tag the instances, we must wait until the spot
    // orders have been placed.
    HashMap<String, String> sirHt = getSpotInstanceRequestIDs(rr);

    /* D */ if (sirHt.size() != nInstances) {
        ExceptionHandler
                .gui(new RuntimeException("Spot ht.size=" + sirHt.size() + " nInstances=" + nInstances));
    }

    List<String> instanceIDs;
    while ((instanceIDs = getIIDsForSpotRequest(sirHt)).size() < sirHt.size()) {
        //            /* D */ System.out.println("Ec2W waiting for spots: n=" +
        //                    instanceIDs.size() + "/" + sirHt.size() + "/" +
        //                    nInstances);
        try {
            Thread.sleep(ConstantsEc2.SPOT_STATE_NAP_TIME);
        } catch (InterruptedException ix) {
        }
    }

    ni.setState(NetworkInfo.State.pending);
    //         /* D */ System.out.println("Ec2W DONE waiting for spots: n=" +
    //                    instanceIDs.size() + "/" + sirHt.size() + "/" +
    //                    nInstances);

    tagInstances(instanceIDs, networkID, networkName);

    NiM.update(getInstancesAllListed()); // Update the network info

    return (networkID);
}

From source file:com.uber.hoodie.common.model.HoodieTableMetadata.java

/**
 * Get only the latest file in the partition with precondition commitTime(file) lt maxCommitTime
 *
 * @param fs//from  w  w  w.jav  a 2s.c  o m
 * @param partitionPathStr
 * @param maxCommitTime
 * @return
 */
public FileStatus[] getLatestVersionInPartition(FileSystem fs, String partitionPathStr, String maxCommitTime) {
    try {
        Path partitionPath = new Path(basePath, partitionPathStr);
        if (!fs.exists(partitionPath)) {
            return new FileStatus[0];
        }
        FileStatus[] files = fs.listStatus(partitionPath);
        Map<String, List<FileStatus>> fileIdToVersions = groupFilesByFileId(files, commits.lastCommit());
        HashMap<String, FileStatus> validFiles = new HashMap<>();
        for (String fileId : fileIdToVersions.keySet()) {
            List<FileStatus> versions = fileIdToVersions.get(fileId);
            for (FileStatus file : versions) {
                String filename = file.getPath().getName();
                String commitTime = FSUtils.getCommitTime(filename);
                if (HoodieCommits.isCommit1BeforeOrOn(commitTime, maxCommitTime)) {
                    validFiles.put(fileId, file);
                    break;
                }
            }
        }
        return validFiles.values().toArray(new FileStatus[validFiles.size()]);
    } catch (IOException e) {
        throw new HoodieIOException("Could not get latest versions in Partition " + partitionPathStr, e);
    }
}

From source file:com.streamsets.pipeline.lib.jdbc.multithread.TestMultithreadedTableProvider.java

@Test
public void restoreFromV2Offsets() throws InterruptedException, StageException {
    Map<TableRuntimeContext, Map<String, String>> partitionsAndOffsets = createRandomPartitionsAndStoredOffsets(
            true);/*from   ww  w. jav  a 2 s  .c  o  m*/

    final Map<String, String> offsets = buildOffsetMap(partitionsAndOffsets);

    MultithreadedTableProvider provider = createProvider(partitionsAndOffsets.keySet());

    final HashMap<String, String> newCommitOffsets = new HashMap<>();
    provider.initializeFromV2Offsets(offsets, newCommitOffsets);
    assertThat(newCommitOffsets.size(), equalTo(offsets.size()));

    assertLoadedPartitions(partitionsAndOffsets, provider);

    // now test when the offset format is from before non-incremental mode was added
    provider = createProvider(partitionsAndOffsets.keySet());
    final HashMap<String, String> newCommitOffsetsPreNonInc = new HashMap<>();
    final Map<String, String> preNonIncrementalOffsets = buildOffsetMap(partitionsAndOffsets, true);
    provider.initializeFromV2Offsets(preNonIncrementalOffsets, newCommitOffsetsPreNonInc);
    assertThat(newCommitOffsetsPreNonInc.size(), equalTo(preNonIncrementalOffsets.size()));
    assertThat(newCommitOffsetsPreNonInc, equalTo(newCommitOffsets));

    assertLoadedPartitions(partitionsAndOffsets, provider);
}

From source file:hms.hwestra.interactionrebuttal.InteractionRebuttal.java

private void combine(String file1, String file2, String pheno, String outfile) throws IOException {
    DoubleMatrixDataset<String, String> ds1 = new DoubleMatrixDataset<String, String>(file1);
    DoubleMatrixDataset<String, String> ds2 = new DoubleMatrixDataset<String, String>(file2);

    ds2.transposeDataset();//from  ww w . j  a  v  a 2s .c o m

    HashMap<Integer, Integer> sharedSamples = new HashMap<Integer, Integer>();
    for (int i = 0; i < ds1.rowObjects.size(); i++) {
        Integer indexI = ds2.hashRows.get(ds1.rowObjects.get(i));
        if (indexI != null) {
            sharedSamples.put(i, indexI);
        }
    }
    System.out.println(sharedSamples.size() + " shared samples");

    double[][] data2 = new double[sharedSamples.size()][ds1.colObjects.size() + 1];
    ArrayList<String> newRows = new ArrayList<String>();
    int ctr = 0;
    Integer phenoIndex = ds2.hashCols.get(pheno);
    System.out.println("Merging col: " + phenoIndex);
    for (int i = 0; i < ds1.rowObjects.size(); i++) {
        Integer otherSample = sharedSamples.get(i);
        if (otherSample != null) {
            System.arraycopy(ds1.rawData[i], 0, data2[ctr], 0, ds1.rawData[i].length);

            data2[ctr][data2[ctr].length - 1] = ds2.rawData[otherSample][phenoIndex];
            newRows.add(ds1.rowObjects.get(i));
            ctr++;
        }
    }

    DoubleMatrixDataset<String, String> dsout = new DoubleMatrixDataset<>();
    dsout.rawData = data2;
    dsout.rowObjects = newRows;
    ds1.colObjects.add(pheno);
    dsout.colObjects = ds1.colObjects;
    dsout.recalculateHashMaps();
    dsout.save(outfile);

}

From source file:au.org.paperminer.main.LocationFilter.java

/**
 * Returns JSON struct for a specific location id, or blank.
 * @param req//  w w w.j av  a 2 s .  com
 * @param resp
 */
private void getGSDetails(HttpServletRequest req, HttpServletResponse resp) {
    HashMap<String, HashMap<String, String>> map = new HashMap<String, HashMap<String, String>>();
    try {
        String arg = req.getParameter("lst");
        if ((arg != null) && (arg.length() > 0)) {
            String[] ids = arg.split(",");
            m_logger.debug("locationFilter getGSDetails: " + arg + " length:" + ids.length);
            for (int i = 0; i < ids.length; i++) {
                HashMap<String, String> tmp = m_helper.getLocationInfo(ids[i]);
                map.put(ids[i], tmp);
                m_logger.debug("  getGSDetails fetched: " + tmp.get("name") + " (" + ids[i] + ")");
            }
        }
        resp.setContentType("text/json");
        PrintWriter pm = resp.getWriter();
        String jsonStr = "";
        if (map.size() > 0) {
            jsonStr = "{\"locns\":" + JSONValue.toJSONString(map) + "}";
        }
        pm.write(jsonStr);
        pm.close();
    } catch (PaperMinerException ex) {
        req.setAttribute(PaperMinerConstants.ERROR_PAGE, "e302");
    } catch (IOException ex) {
        req.setAttribute(PaperMinerConstants.ERROR_PAGE, "e114");
    }
}

From source file:eu.edisonproject.training.wsd.BabelNet.java

private Set<Term> babelNetDisambiguation(String language, String lemma, Set<String> ngarms) {
    if (ngarms.isEmpty()) {
        return null;
    }// ww  w .ja  v  a  2 s .  c  o  m
    if (ngarms.size() == 1 && ngarms.iterator().next().length() <= 1) {
        return null;
    }

    HashMap<CharSequence, Double> idsMap = new HashMap<>();
    Map<CharSequence, Term> termMap = new HashMap<>();
    Set<Term> terms = new HashSet<>();
    int count = 0;
    int breaklimit = 1000;
    int oneElementlimit = 65;
    int difflimit = 60;
    Double persent;
    for (String n : ngarms) {
        if (n.length() <= 1) {
            continue;
        }
        count++;
        if (idsMap.size() == 1 && count > oneElementlimit) {
            //                Double score = idsMap.values().iterator().next();
            //                if (score >= 10) {
            break;
            //                }
        }

        if ((count % 2) == 0 && idsMap.size() >= 2 && count > difflimit) {
            ValueComparator bvc = new ValueComparator(idsMap);
            TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc);
            sorted_map.putAll(idsMap);
            Iterator<CharSequence> iter = sorted_map.keySet().iterator();
            Double first = idsMap.get(iter.next());
            Double second = idsMap.get(iter.next());

            persent = first / (first + second);
            if (persent > 0.65) {
                break;
            }
        }
        if (count > breaklimit) {
            break;
        }

        String clearNg = n.replaceAll("_", " ");
        if (clearNg == null) {
            continue;
        }
        if (clearNg.startsWith(" ")) {
            clearNg = clearNg.replaceFirst(" ", "");
        }
        if (clearNg.endsWith(" ")) {
            clearNg = clearNg.substring(0, clearNg.length() - 1);
        }

        Pair<Term, Double> termPair = null;
        try {
            termPair = babelNetDisambiguation(language, lemma, clearNg);
        } catch (Exception ex) {
            if (ex.getMessage() != null && ex.getMessage().contains("Your key is not valid")) {
                try {
                    termPair = babelNetDisambiguation(language, lemma, clearNg);
                } catch (Exception ex1) {
                    //                       LOGGER.log(Level.WARNING, ex1, null);
                }
            } else {
                LOGGER.log(Level.WARNING, null, ex);
            }
        }
        if (termPair != null) {
            termMap.put(termPair.first.getUid(), termPair.first);
            Double score;
            if (idsMap.containsKey(termPair.first.getUid())) {
                score = idsMap.get(termPair.first.getUid());
                //                    score++;
                score += termPair.second;
            } else {
                //                    score = 1.0;
                score = termPair.second;
            }
            idsMap.put(termPair.first.getUid(), score);
        }
    }
    if (!idsMap.isEmpty()) {
        ValueComparator bvc = new ValueComparator(idsMap);
        TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc);
        sorted_map.putAll(idsMap);
        count = 0;
        Double firstScore = idsMap.get(sorted_map.firstKey());
        terms.add(termMap.get(sorted_map.firstKey()));
        idsMap.remove(sorted_map.firstKey());
        for (CharSequence tvID : sorted_map.keySet()) {
            if (count >= 1) {
                Double secondScore = idsMap.get(tvID);
                persent = secondScore / (firstScore + secondScore);
                if (persent > 0.2) {
                    terms.add(termMap.get(tvID));
                }
                if (count >= 2) {
                    break;
                }
            }
            count++;
        }
        return terms;
    }
    return null;
}

From source file:de.randi2.services.ChartsServiceImpl.java

@Override
@Transactional(propagation = Propagation.REQUIRED)
public ChartData generateRecruitmentChartFactors(Trial trial) {
    trial = trialDao.refresh(trial);//from   www. j a  v a  2  s  .co  m
    ChartData chData = new ChartData();
    ArrayList<String> xL = new ArrayList<String>();
    ArrayList<double[]> data = new ArrayList<double[]>();
    HashMap<String, Double> strataCountMap = new HashMap<String, Double>();
    HashMap<String, String> strataNameMap = new HashMap<String, String>();

    Pair<List<String>, List<String>> pair = trial.getAllStrataIdsAndNames();
    for (int i = 0; i < pair.first().size(); i++) {
        strataCountMap.put(pair.first().get(i), new Double(0));
        strataNameMap.put(pair.first().get(i), pair.last().get(i));
    }

    for (TrialSubject subject : trial.getSubjects()) {
        String stratum = "";
        if (trial.isStratifyTrialSite()) {
            stratum = subject.getTrialSite().getId() + "__";
        }
        stratum += subject.getStratum();
        Double count = strataCountMap.get(stratum);
        count++;
        strataCountMap.put(stratum, count);
    }

    double[] dataTable;
    int i = 0;
    for (String s : strataCountMap.keySet()) {
        dataTable = new double[strataCountMap.size()];
        for (int j = 0; j < dataTable.length; j++) {
            if (j != i) {
                dataTable[j] = 0;
            }
        }
        dataTable[i] = strataCountMap.get(s);
        xL.add(strataNameMap.get(s));
        i++;
        data.add(dataTable);
    }
    chData.setData(data);
    chData.setXLabels(xL);
    return chData;
}

From source file:edu.ku.brc.specify.toycode.RegPivot.java

/**
 * @param hash//from   www .  j  a v a2 s.c  o m
 * @param recordType
 * @param pStmt
 * @param dbFieldTypes
 * @param dbFieldNames
 * @param inxToName
 * @throws SQLException
 */
private void writeHash(final HashMap<String, HashMap<String, Object>> hash, final Integer recordType,
        final PreparedStatement pStmt, final Vector<Integer> dbFieldTypes, final Vector<String> dbFieldNames,
        final HashMap<Integer, String> inxToName) throws SQLException {
    int totalCnt = hash.size();
    int cnt = 0;

    for (String idKey : hash.keySet()) {
        cnt++;
        if (cnt % 500 == 0)
            System.out.println(cnt + " / " + totalCnt);

        HashMap<String, Object> nameToVals = hash.get(idKey);

        if (recordType != null) {
            pStmt.setInt(dbFieldNames.size() + 1, (Integer) recordType);
        }

        for (int i = 0; i < dbFieldNames.size(); i++) {
            int fInx = i + 1;
            String name = inxToName.get(i);
            Object value = nameToVals.get(name);

            pStmt.setObject(fInx, null);

            int typ = dbFieldTypes.get(i);

            if (value != null) {
                if (value instanceof Integer) {
                    pStmt.setInt(fInx, (Integer) value);

                } else if (value instanceof String) {
                    pStmt.setString(fInx, (String) value);
                } else if (value instanceof Timestamp) {
                    pStmt.setTimestamp(fInx, (Timestamp) value);
                } else {
                    System.err.println("Unhandled class: " + value.getClass().getName());
                }
            } else {
                pStmt.setObject(fInx, null);
            }
        }
        pStmt.executeUpdate();
    }

}

From source file:com.google.gwt.emultest.java.util.HashMapTest.java

public void testIsEmpty() {
    HashMap<String, String> srcMap = new HashMap<String, String>();
    checkEmptyHashMapAssumptions(srcMap);

    HashMap<String, String> dstMap = new HashMap<String, String>();
    checkEmptyHashMapAssumptions(dstMap);

    dstMap.putAll(srcMap);/* w ww.jav a  2 s .co m*/
    assertTrue(dstMap.isEmpty());

    dstMap.put(KEY_KEY, VALUE_VAL);
    assertFalse(dstMap.isEmpty());

    dstMap.remove(KEY_KEY);
    assertTrue(dstMap.isEmpty());
    assertEquals(dstMap.size(), 0);
}