Example usage for java.util HashMap entrySet

List of usage examples for java.util HashMap entrySet

Introduction

In this page you can find the example usage for java.util HashMap entrySet.

Prototype

Set entrySet

To view the source code for java.util HashMap entrySet.

Click Source Link

Document

Holds cached entrySet().

Usage

From source file:com.opengamma.integration.copier.snapshot.reader.FileSnapshotReader.java

private void iterateSheetRows() {

    _curves = new HashMap<>();
    _surface = new HashMap<>();
    _yieldCurve = new HashMap<>();

    //Temporary maps for data structures
    HashMap<String, ManageableCurveSnapshot> curvesBuilder = new HashMap<>();
    HashMap<String, Pair<YieldCurveKey, ManageableYieldCurveSnapshot>> yieldCurveBuilder = new HashMap<>();
    HashMap<String, Pair<VolatilitySurfaceKey, ManageableVolatilitySurfaceSnapshot>> surfaceBuilder = new HashMap<>();
    ManageableUnstructuredMarketDataSnapshot globalBuilder = new ManageableUnstructuredMarketDataSnapshot();

    while (true) {
        Map<String, String> currentRow = _sheetReader.loadNextRow();

        // When rows are complete create snapshot elements from temporary structures
        if (currentRow == null) {
            for (Map.Entry<String, ManageableCurveSnapshot> entry : curvesBuilder.entrySet()) {
                _curves.put(new CurveKey(entry.getKey()), entry.getValue());
            }//  ww  w.j  a  va  2s  .c o  m
            for (Map.Entry<String, Pair<YieldCurveKey, ManageableYieldCurveSnapshot>> entry : yieldCurveBuilder
                    .entrySet()) {
                _yieldCurve.put(entry.getValue().getFirst(), entry.getValue().getSecond());
            }
            for (Map.Entry<String, Pair<VolatilitySurfaceKey, ManageableVolatilitySurfaceSnapshot>> entry : surfaceBuilder
                    .entrySet()) {
                _surface.put(entry.getValue().getFirst(), entry.getValue().getSecond());
            }
            _global = globalBuilder;
            return;
        }

        String type = currentRow.get(SnapshotColumns.TYPE.get());

        switch (SnapshotType.from(type)) {
        case NAME:
            _name = currentRow.get(SnapshotColumns.NAME.get());
            break;
        case BASIS_NAME:
            _basisName = currentRow.get(SnapshotColumns.NAME.get());
            break;
        case CURVE:
            buildCurves(curvesBuilder, currentRow);
            break;
        case YIELD_CURVE:
            buildYieldCurves(yieldCurveBuilder, currentRow);
            break;
        case GOBAL_VALUES:
            buildGlobalValues(globalBuilder, currentRow);
            break;
        case VOL_SURFACE:
            buildSurface(surfaceBuilder, currentRow);
            break;
        default:
            s_logger.error("Unknown snapshot element of type {}", type);
            break;
        }
    }
}

From source file:com.yahoo.ycsb.db.couchbase2.Couchbase2Client.java

/**
 * Encode the source into a String for storage.
 *
 * @param source the source value./*  w ww . j  a  v a 2  s.  c  om*/
 * @return the encoded string.
 */
private String encode(final HashMap<String, ByteIterator> source) {
    HashMap<String, String> stringMap = StringByteIterator.getStringMap(source);
    ObjectNode node = JacksonTransformers.MAPPER.createObjectNode();
    for (Map.Entry<String, String> pair : stringMap.entrySet()) {
        node.put(pair.getKey(), pair.getValue());
    }
    JsonFactory jsonFactory = new JsonFactory();
    Writer writer = new StringWriter();
    try {
        JsonGenerator jsonGenerator = jsonFactory.createGenerator(writer);
        JacksonTransformers.MAPPER.writeTree(jsonGenerator, node);
    } catch (Exception e) {
        throw new RuntimeException("Could not encode JSON value");
    }
    return writer.toString();
}

From source file:io.pravega.segmentstore.server.reading.StorageReaderTests.java

/**
 * Tests the execute method with valid Requests:
 * * All StreamSegments exist and have enough data.
 * * All read offsets are valid (but we may choose to read more than the length of the Segment).
 * * ReadRequests may overlap./*from w w  w  .j  a  v a2  s.  c o  m*/
 */
@Test
public void testValidRequests() throws Exception {
    final int defaultReadLength = MIN_SEGMENT_LENGTH - 1;
    final int offsetIncrement = defaultReadLength / 3;

    @Cleanup
    InMemoryStorage storage = new InMemoryStorage(executorService());
    storage.initialize(1);
    byte[] segmentData = populateSegment(storage);
    @Cleanup
    StorageReader reader = new StorageReader(SEGMENT_METADATA, storage, executorService());
    HashMap<StorageReader.Request, CompletableFuture<StorageReader.Result>> requestCompletions = new HashMap<>();
    int readOffset = 0;
    while (readOffset < segmentData.length) {
        int readLength = Math.min(defaultReadLength, segmentData.length - readOffset);
        CompletableFuture<StorageReader.Result> requestCompletion = new CompletableFuture<>();
        StorageReader.Request r = new StorageReader.Request(readOffset, readLength, requestCompletion::complete,
                requestCompletion::completeExceptionally, TIMEOUT);
        reader.execute(r);
        requestCompletions.put(r, requestCompletion);
        readOffset += offsetIncrement;
    }

    // Check that the read requests returned with the right data.
    for (val entry : requestCompletions.entrySet()) {
        StorageReader.Result readData = entry.getValue().join();
        StorageReader.Request request = entry.getKey();
        int expectedReadLength = Math.min(request.getLength(),
                (int) (segmentData.length - request.getOffset()));

        Assert.assertNotNull("No data returned for request " + request, readData);
        Assert.assertEquals("Unexpected read length for request " + request, expectedReadLength,
                readData.getData().getLength());
        AssertExtensions.assertStreamEquals("Unexpected read contents for request " + request,
                new ByteArrayInputStream(segmentData, (int) request.getOffset(), expectedReadLength),
                readData.getData().getReader(), expectedReadLength);
    }
}

From source file:dao.EventCommentDao.java

public List<Object[]> getUserIdSuccessfulCount(Campaign campaign, Date dateFrom, Date dateTo, Long pkId) {
    HashMap<String, Object> paramMap = new HashMap();
    String sql = "select user_id,count(distinct event_id) from event_comment where type=:successful and campaign_id=:campaignId and personal_cabinet_id=:pkId";
    if (dateFrom != null) {
        sql += " and insert_date>=:dateFrom";
        paramMap.put("dateFrom", DateAdapter.getDateFromString(DateAdapter.getDateInMysql(dateFrom)));
    }//from www  .j  av  a  2  s  .  co m
    if (dateTo != null) {
        sql += " and insert_date<=:dateTo";
        paramMap.put("dateTo", DateAdapter.getDateFromString(DateAdapter.getDateInMysql(dateTo)));
    }
    sql += " group by user_id";
    Query query = getCurrentSession().createSQLQuery(sql);
    query.setParameter("campaignId", campaign.getCampaignId());
    query.setParameter("pkId", pkId);
    query.setParameter("successful", EventComment.SUCCESSFUL);
    for (Map.Entry<String, Object> entry : paramMap.entrySet()) {
        query.setParameter(entry.getKey(), entry.getValue());
    }
    return query.list();
}

From source file:com.datatorrent.contrib.hdht.MockFileAccess.java

@Override
public FileReader getReader(final long bucketKey, final String fileName) throws IOException {
    final HashMap<Slice, Pair<byte[], Integer>> data = Maps.newHashMap();
    final ArrayList<Slice> keys = Lists.newArrayList();
    final MutableInt index = new MutableInt();

    DataInputStream is = getInputStream(bucketKey, fileName);
    Input input = new Input(is);
    while (!input.eof()) {
        byte[] keyBytes = kryo.readObject(input, byte[].class);
        byte[] value = kryo.readObject(input, byte[].class);
        Slice key = new Slice(keyBytes, 0, keyBytes.length);
        data.put(key, new Pair<byte[], Integer>(value, keys.size()));
        keys.add(key);//from w  w w  .  ja v  a 2  s.com
    }
    input.close();
    is.close();

    return new FileReader() {

        @Override
        public void readFully(TreeMap<Slice, Slice> result) throws IOException {
            for (Map.Entry<Slice, Pair<byte[], Integer>> e : data.entrySet()) {
                result.put(e.getKey(), new Slice(e.getValue().first));
            }
        }

        @Override
        public void reset() throws IOException {
            index.setValue(0);
        }

        @Override
        public boolean seek(Slice key) throws IOException {
            Pair<byte[], Integer> v = data.get(key);
            if (v == null) {
                index.setValue(0);
                return false;
            }
            index.setValue(v.second);
            return true;
        }

        @Override
        public boolean next(Slice key, Slice value) throws IOException {

            if (deletedFiles.contains("" + bucketKey + fileName)) {
                throw new IOException("Simulated error for deleted file: " + fileName);
            }

            int pos = index.intValue();
            if (pos < keys.size()) {
                Slice k = keys.get(pos);
                key.buffer = k.buffer;
                key.offset = k.offset;
                key.length = k.length;
                Pair<byte[], Integer> v = data.get(k);
                value.buffer = v.first;
                value.offset = 0;
                value.length = v.first.length;
                index.increment();
                return true;
            }
            return false;
        }

        @Override
        public void close() throws IOException {
        }
    };
}

From source file:br.com.blackhubos.eventozero.util.Framework.java

/**
 * Esse mtodo  semelhante ao broadcast por file, o que muda,  que este  direto por uma lista definida por voc e no via arquivo.
 * Veja {@link #broadcast(File, HashMap)}
 *
 * @param messages A lista de mensagens/*from w  w  w. ja v a2s  .  co  m*/
 * @param replacements Pode ser null. HashMap contendo key e valores para substituies.
 * @return Retorna uma lista formatada das mensagens que foram enviadas.
 */
public static java.util.Vector<String> broadcast(final java.util.Vector<String> messages,
        @Nullable HashMap<String, Object> replacements) {
    if (replacements == null) {
        replacements = new HashMap<String, Object>();
    }

    if ((messages == null) || messages.isEmpty()) {
        return new java.util.Vector<String>();
    }

    final java.util.Vector<String> array = new java.util.Vector<String>();

    for (String s : messages) {
        for (final Entry<String, Object> r : replacements.entrySet()) {
            s = s.replaceAll(r.getKey(), String.valueOf(r.getValue()));
        }

        array.add(ChatColor.translateAlternateColorCodes('&', s));
        Bukkit.broadcastMessage(ChatColor.translateAlternateColorCodes('&', s));
    }

    return array;
}

From source file:it.polito.tellmefirst.web.rest.clients.ClientEpub.java

private HashMap<ClassifyOutput, Integer> createMapWithScore(HashMap<String, Integer> sortedMapByOcc,
        HashMap<String, List<ClassifyOutput>> classifiedChapters) {

    LOG.debug("[createMapWithScore] - BEGIN");

    HashMap<ClassifyOutput, Integer> sortedMapWithScore = new LinkedHashMap<>();
    ArrayList<ClassifyOutput> classifyOutputList = new ArrayList<>();

    for (Entry<String, List<ClassifyOutput>> chapterEntry : classifiedChapters.entrySet()) {
        for (int i = 0; i < chapterEntry.getValue().size(); i++) {
            classifyOutputList.add(chapterEntry.getValue().get(i));
        }/*www .j a  v a 2  s.  com*/
    }

    for (Entry<String, Integer> sortedMapEntry : sortedMapByOcc.entrySet()) {
        boolean flag = true;
        for (int k = 0; k < classifyOutputList.size(); k++) {
            if (flag && sortedMapEntry.getKey() == classifyOutputList.get(k).getUri()) {
                sortedMapWithScore.put(classifyOutputList.get(k), sortedMapEntry.getValue());
                flag = false;
            }
        }
    }

    LOG.debug("[createMapWithScore] - END");

    return sortedMapWithScore;
}

From source file:eu.europeana.uim.sugarcrmclient.plugin.SugarCRMServiceImpl.java

@Override
public void updateRecordData(String recordID, HashMap<UpdatableField, String> values)
        throws QueryResultException {

    SetEntry request = new SetEntry();
    ArrayList<NameValue> nvList = new ArrayList<NameValue>();
    Iterator<?> it = values.entrySet().iterator();

    // First add the id name\value pair
    NameValue nvid = new NameValue();
    nvid.setName("id");
    nvid.setValue(recordID);/*from  www  . j av a 2  s.c o  m*/
    nvList.add(nvid);

    while (it.hasNext()) {
        @SuppressWarnings("unchecked")
        Map.Entry<EuropeanaUpdatableField, String> pairs = (Map.Entry<EuropeanaUpdatableField, String>) it
                .next();
        NameValue nv = new NameValue();
        nv.setName(pairs.getKey().getFieldId());
        nv.setValue(pairs.getValue());

        nvList.add(nv);
    }

    NameValueList valueList = ClientUtils.generatePopulatedNameValueList(nvList);

    request.setNameValueList(valueList);
    request.setModuleName("Opportunities");
    request.setSession(sugarwsClient.getSessionID());

    sugarwsClient.setentry(request);
}

From source file:hd3gtv.mydmam.useraction.fileoperation.UAFileOperationTrash.java

public void process(JobProgression progression, UserProfile userprofile, UAConfigurator user_configuration,
        HashMap<String, SourcePathIndexerElement> source_elements) throws Exception {
    String user_base_directory_name = userprofile.getBaseFileName_BasedOnEMail();

    if (trash_directory_name == null) {
        trash_directory_name = "Trash";
    }/*from   w ww .j  a  va2 s.  c  o m*/

    Log2Dump dump = new Log2Dump();
    dump.add("user", userprofile.key);
    dump.add("trash_directory_name", trash_directory_name);
    dump.add("user_base_directory_name", user_base_directory_name);
    dump.add("source_elements", source_elements.values());
    Log2.log.debug("Prepare trash", dump);

    progression.update("Prepare trashs directories");

    File current_user_trash_dir;
    HashMap<String, File> trashs_dirs = new HashMap<String, File>();
    for (Map.Entry<String, SourcePathIndexerElement> entry : source_elements.entrySet()) {
        String storagename = entry.getValue().storagename;
        if (trashs_dirs.containsKey(storagename)) {
            continue;
        }
        File storage_dir = Explorer
                .getLocalBridgedElement(SourcePathIndexerElement.prepareStorageElement(storagename));
        current_user_trash_dir = new File(storage_dir.getPath() + File.separator + trash_directory_name
                + File.separator + user_base_directory_name);

        if (current_user_trash_dir.exists() == false) {
            FileUtils.forceMkdir(current_user_trash_dir);
        } else {
            CopyMove.checkExistsCanRead(current_user_trash_dir);
            CopyMove.checkIsWritable(current_user_trash_dir);
            CopyMove.checkIsDirectory(current_user_trash_dir);
        }
        trashs_dirs.put(storagename, current_user_trash_dir);

        if (stop) {
            return;
        }
    }

    progression.update("Move item(s) to trash(s) directorie(s)");
    progression.updateStep(1, source_elements.size());

    Date now = new Date();

    for (Map.Entry<String, SourcePathIndexerElement> entry : source_elements.entrySet()) {
        progression.incrStep();
        File current_element = Explorer.getLocalBridgedElement(entry.getValue());
        CopyMove.checkExistsCanRead(current_element);
        CopyMove.checkIsWritable(current_element);

        current_user_trash_dir = trashs_dirs.get(entry.getValue().storagename);

        File f_destination = new File(current_user_trash_dir.getPath() + File.separator
                + simpledateformat.format(now) + "_" + current_element.getName());

        if (current_element.isDirectory()) {
            FileUtils.moveDirectory(current_element, f_destination);
        } else {
            FileUtils.moveFile(current_element, f_destination);
        }

        if (stop) {
            return;
        }

        ContainerOperations.copyMoveMetadatas(entry.getValue(), entry.getValue().storagename,
                "/" + trash_directory_name + "/" + user_base_directory_name, false, this);

        ElasticsearchBulkOperation bulk = Elasticsearch.prepareBulk();
        explorer.deleteStoragePath(bulk, Arrays.asList(entry.getValue()));
        bulk.terminateBulk();

        if (stop) {
            return;
        }
    }

    ElasticsearchBulkOperation bulk = Elasticsearch.prepareBulk();
    ArrayList<SourcePathIndexerElement> spie_trashs_dirs = new ArrayList<SourcePathIndexerElement>();
    for (String storage_name : trashs_dirs.keySet()) {
        SourcePathIndexerElement root_trash_directory = SourcePathIndexerElement
                .prepareStorageElement(storage_name);
        root_trash_directory.parentpath = root_trash_directory.prepare_key();
        root_trash_directory.directory = true;
        root_trash_directory.currentpath = "/" + trash_directory_name;
        spie_trashs_dirs.add(root_trash_directory);
    }

    explorer.refreshStoragePath(bulk, spie_trashs_dirs, false);
    bulk.terminateBulk();
}

From source file:eu.planets_project.tb.impl.model.ExperimentExecutableImpl.java

public Map.Entry<URI, URI> getMigrationHttpDataEntry(String localFileInputRef) {
    if (this.hmInputOutputData.containsKey(localFileInputRef)) {
        String outputFileRef = this.hmInputOutputData.get(localFileInputRef);
        HashMap<URI, URI> hmRet = new HashMap<URI, URI>();

        //get the URI return values for the local file ref
        URI inputFile = this.getOutputHttpFileRef(outputFileRef);
        URI outputFile = this.getInputHttpFileRef(localFileInputRef);
        if ((inputFile != null) && (outputFile != null)) {
            hmRet.put(inputFile, outputFile);

            Iterator<Entry<URI, URI>> itRet = hmRet.entrySet().iterator();
            while (itRet.hasNext()) {
                //return the Entry
                return itRet.next();
            }/*w w  w.j a  va  2  s. c  om*/
        }
    }
    return null;
}