Example usage for java.util Map.Entry get

List of usage examples for java.util Map.Entry get

Introduction

In this page you can find the example usage for java.util Map.Entry get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.epam.catgenome.manager.protein.ProteinSequenceManager.java

/**
 * Load protein sequence for specified track (start and end indexes, gene item id, reference genome).
 *
 * @param geneTrack track/*from   w w w .  jav a2  s  .  c  om*/
 * @return track of protein sequences
 * @throws GeneReadingException if errors occurred during working with gene file
 */
@Transactional(propagation = Propagation.REQUIRED)
@Cacheable(cacheNames = "proteinTrack", key = "#geneTrack.proteinCacheKey(#referenceId)", unless = "#result == null") //TODO: remove?
public Track<ProteinSequenceInfo> loadProteinSequence(final Track<Gene> geneTrack, final Long referenceId)
        throws GeneReadingException {
    Assert.notNull(referenceId, MessageHelper.getMessage(MessagesConstants.ERROR_REFERENCE_ID_NULL));
    Chromosome chromosome = trackHelper.validateTrack(geneTrack);

    Map<Gene, List<ProteinSequenceEntry>> proteinSequences = psReconstructionManager
            .reconstructProteinSequence(gffManager.loadGenes(geneTrack, false), chromosome, referenceId, false);

    Track<ProteinSequenceInfo> track = new Track<>(geneTrack);
    List<ProteinSequenceInfo> blocks = new ArrayList<>(proteinSequences.size());

    for (Map.Entry<Gene, List<ProteinSequenceEntry>> mrnaEntry : proteinSequences.entrySet()) {
        List<ProteinSequenceEntry> psEntryList = mrnaEntry.getValue();
        List<ProteinSequence> psList = psEntryList.stream().map(ProteinSequence::new)
                .collect(Collectors.toList());
        String transcriptId = mrnaEntry.getKey().getAttributes().get(TRANSCRIPT_ID_FILED);
        if (StringUtils.isNotEmpty(transcriptId)) {
            blocks.add(new ProteinSequenceInfo(geneTrack.getStartIndex(), geneTrack.getEndIndex(), transcriptId,
                    psList));
        }
    }

    track.setBlocks(blocks);
    return track;
}

From source file:com.alibaba.jstorm.ui.controller.NettyController.java

private List<UINettyMetric> getNettyData(MetricInfo nettyMetrics, String host, int window) {
    HashMap<String, UINettyMetric> nettyData = new HashMap<>();
    if (nettyMetrics == null || nettyMetrics.get_metrics_size() == 0) {
        return new ArrayList<>(nettyData.values());
    }/*from   w w w .  j  av  a 2 s  .c  om*/
    for (Map.Entry<String, Map<Integer, MetricSnapshot>> metric : nettyMetrics.get_metrics().entrySet()) {
        String name = metric.getKey();
        String[] split_name = name.split("@");

        String metricName = UIMetricUtils.extractMetricName(split_name);
        String connection = null;
        if (metricName != null) {
            connection = metricName.substring(metricName.indexOf(".") + 1);
            metricName = metricName.substring(0, metricName.indexOf("."));
        }
        MetricSnapshot snapshot = metric.getValue().get(window);

        UINettyMetric netty;
        if (nettyData.containsKey(connection)) {
            netty = nettyData.get(connection);
        } else {
            netty = new UINettyMetric(host, connection);
            nettyData.put(connection, netty);
        }
        netty.setMetricValue(snapshot, metricName);
    }
    return new ArrayList<>(nettyData.values());
}

From source file:org.wso2.carbon.apimgt.core.impl.APIDefinitionFromSwagger20.java

private void populateConfigMapForScopes(Swagger swagger, String namespace) {
    Map<String, String> configMap = ServiceReferenceHolder.getInstance().getRestAPIConfigurationMap(namespace);
    //update local cache with configs defined in configuration file(dep.yaml)
    if (!localConfigMap.containsKey(namespace)) {
        localConfigMap.put(namespace, new ConcurrentHashMap<>());
    }//from   www. j a v  a2 s .c  o m
    if (configMap != null) {
        localConfigMap.get(namespace).putAll(configMap);
    }
    //update local cache with the resource to scope mapping read from swagger
    if (swagger != null) {
        for (Map.Entry<String, Path> entry : swagger.getPaths().entrySet()) {
            Path resource = entry.getValue();
            Map<HttpMethod, Operation> operationsMap = resource.getOperationMap();
            for (Map.Entry<HttpMethod, Operation> httpverbEntry : operationsMap.entrySet()) {
                if (httpverbEntry.getValue().getVendorExtensions().size() > 0 && httpverbEntry.getValue()
                        .getVendorExtensions().get(APIMgtConstants.SWAGGER_X_SCOPE) != null) {
                    String path = httpverbEntry.getKey() + "_" + entry.getKey();
                    if (!localConfigMap.get(namespace).containsKey(path)) {
                        localConfigMap.get(namespace).put(path, httpverbEntry.getValue().getVendorExtensions()
                                .get(APIMgtConstants.SWAGGER_X_SCOPE).toString());
                    }
                }

            }
        }
    }
}

From source file:org.apache.camel.component.cxf.jaxrs.CxfRsProducer.java

protected Map<String, String> parseResponseHeaders(Object response, Exchange camelExchange) {

    Map<String, String> answer = new HashMap<String, String>();
    if (response instanceof Response) {

        for (Map.Entry<String, List<Object>> entry : ((Response) response).getMetadata().entrySet()) {
            if (LOG.isTraceEnabled()) {
                LOG.trace("Parse external header " + entry.getKey() + "=" + entry.getValue());
            }//  w ww  .j a  va  2  s.  co m
            LOG.info("Parse external header " + entry.getKey() + "=" + entry.getValue());
            answer.put(entry.getKey(), entry.getValue().get(0).toString());
        }
    }

    return answer;
}

From source file:gov.nih.nci.cabig.caaers.web.admin.CTEPDataInitializationAjaxFacade.java

public List<IntegrationLogAjaxableDomainObect> searchIntegrationLogs(Date startDate, Date endDate,
        String status, String service) {

    List<IntegrationLogAjaxableDomainObect> filteredResults = new ArrayList<IntegrationLogAjaxableDomainObect>();
    List<IntegrationLog> results = new ArrayList<IntegrationLog>();

    IntegrationLogQuery query = new IntegrationLogQuery();

    query.filterByLoggedOnStartDateAndEndDate(startDate, endDate);

    if (!StringUtilities.isBlank(service)) {
        String entity = extractEntity(service);
        query.filterByEntity(entity);//from w  ww  . j  av  a 2s .c  om
        if (service.equalsIgnoreCase("GetStudyDetails")) {
            query.filterByOperation("updateStudy", "createStudy");
        } else {
            String operation = extractOperation(service);
            query.filterByOperation(operation);
        }
    }
    // get all results that match service, start date and end date. Then do post filtering based on status
    results = integrationLogDao.searchIntegrationLogs(query);

    // group results by correlation id 
    Map<String, List<IntegrationLog>> map = groupIntegrationLogsBasedOnCorrelationId(results);

    Iterator<Entry<String, List<IntegrationLog>>> mapIterator = map.entrySet().iterator();
    while (mapIterator.hasNext()) {
        Map.Entry<String, List<IntegrationLog>> entry = (Map.Entry<String, List<IntegrationLog>>) mapIterator
                .next();
        IntegrationLogAjaxableDomainObect ajaxIntLog = new IntegrationLogAjaxableDomainObect();
        ajaxIntLog.setEntity(entry.getValue().get(0).getEntity());
        ajaxIntLog.setCorrelationId(entry.getValue().get(0).getCorrelationId());
        ajaxIntLog.setHasLogDetails(integrationLogDao.hasLogDetails(entry.getValue().get(0)));
        ajaxIntLog.setLoggedOn(getEarliestLogTime(entry.getValue()));

        // need to sort the grouped integration logs by id to display the synch status in work flow order
        Collections.sort(entry.getValue(), new Comparator<IntegrationLog>() {
            public int compare(IntegrationLog o1, IntegrationLog o2) {
                return o1.getId().compareTo(o2.getId());
            }
        });

        ajaxIntLog.setOverallStatus(
                getIfIncompleteOrFailed(entry.getValue()) ? htmlFailureString : htmlSuccessString);
        ajaxIntLog.setService(getServiceNameFromEntityAndOperation(entry.getValue().get(0).getEntity(),
                entry.getValue().get(0).getOperation()));

        for (IntegrationLog intLog : entry.getValue()) {
            ajaxIntLog.getSteps().put(intLog.getSynchStatus().getName(), intLog.getIfSuccess());
        }

        if (StringUtilities.isBlank(status)) {
            filteredResults.add(ajaxIntLog);
        } else if (status.equalsIgnoreCase("Failed") && getIfIncompleteOrFailed(entry.getValue())) {
            filteredResults.add(ajaxIntLog);
        } else if (status.equalsIgnoreCase("Success") && !getIfIncompleteOrFailed(entry.getValue())) {
            filteredResults.add(ajaxIntLog);
        }
    }

    return filteredResults;
}

From source file:org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.java

public static void configureExportInputFormat(SqoopOptions opts, Job job, ConnManager connMgr, String dbTable,
        Configuration config) throws IOException {

    LOG.info("Configuring HCatalog for export job");
    SqoopHCatUtilities hCatUtils = SqoopHCatUtilities.instance();
    hCatUtils.configureHCat(opts, job, connMgr, dbTable, job.getConfiguration());
    job.setInputFormatClass(getInputFormatClass());
    Map<String, List<Integer>> dbColInfo = hCatUtils.getDbColumnInfo();
    MapWritable columnTypesJava = new MapWritable();
    Properties mapColumnJava = opts.getMapColumnJava();
    for (Map.Entry<String, List<Integer>> e : dbColInfo.entrySet()) {
        Text columnName = new Text(e.getKey());
        Text columnText = null;/*from   www  . j ava  2  s  . c o m*/
        if (mapColumnJava.containsKey(e.getKey())) {
            columnText = new Text(mapColumnJava.getProperty(e.getKey()));
        } else {
            columnText = new Text(connMgr.toJavaType(dbTable, e.getKey(), e.getValue().get(0)));
        }
        columnTypesJava.put(columnName, columnText);
    }
    MapWritable columnTypesSql = new MapWritable();
    for (Map.Entry<String, List<Integer>> e : dbColInfo.entrySet()) {
        Text columnName = new Text(e.getKey());
        IntWritable sqlType = new IntWritable(e.getValue().get(0));
        columnTypesSql.put(columnName, sqlType);
    }
    DefaultStringifier.store(config, columnTypesJava, SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_JAVA);
    DefaultStringifier.store(config, columnTypesSql, SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_SQL);
}

From source file:de.micromata.genome.chronos.spi.ram.RamJobStore.java

@Override
public synchronized int setJobState(long pk, String newState, String oldState) {
    for (Map.Entry<Long, Map<Long, TriggerJobDO>> m : allJobs.entrySet()) {
        if (m.getValue().containsKey(pk) == true) {
            TriggerJobDO tj = m.getValue().get(pk);
            if (StringUtils.equals(oldState, tj.getState().name()) == true) {
                tj.setState(State.valueOf(newState));
                if (StringUtils.equals(newState, "WAIT")) {
                    tj.setRetryCount(0);
                }//  www.java  2  s.  c o m
                return 1;
            }
            return 0;
        }
    }
    return 0;
}

From source file:org.apache.storm.daemon.supervisor.SyncSupervisorEvent.java

protected Map<Integer, LocalAssignment> readAssignments(Map<String, Map<String, Object>> assignmentsSnapshot,
        Map<Integer, LocalAssignment> existingAssignment, String assignmentId, AtomicInteger retries) {
    try {/*from   w w  w  .  ja  va2 s.  co  m*/
        Map<Integer, LocalAssignment> portLA = new HashMap<Integer, LocalAssignment>();
        for (Map.Entry<String, Map<String, Object>> assignEntry : assignmentsSnapshot.entrySet()) {
            String stormId = assignEntry.getKey();
            Assignment assignment = (Assignment) assignEntry.getValue().get(IStateStorage.DATA);

            Map<Integer, LocalAssignment> portTasks = readMyExecutors(stormId, assignmentId, assignment);

            for (Map.Entry<Integer, LocalAssignment> entry : portTasks.entrySet()) {

                Integer port = entry.getKey();

                LocalAssignment la = entry.getValue();

                if (!portLA.containsKey(port)) {
                    portLA.put(port, la);
                } else {
                    throw new RuntimeException("Should not have multiple topologys assigned to one port");
                }
            }
        }
        retries.set(0);
        return portLA;
    } catch (RuntimeException e) {
        if (retries.get() > 2) {
            throw e;
        } else {
            retries.addAndGet(1);
        }
        LOG.warn("{} : retrying {} of 3", e.getMessage(), retries.get());
        return existingAssignment;
    }
}

From source file:org.apache.solr.cloud.ExclusiveSliceProperty.java

private void balanceUnassignedReplicas() {
    tmpMaxPropPerNode = origMaxPropPerNode; // A bit clumsy, but don't want to duplicate code.
    tmpModulo = origModulo;//w w w  .jav  a  2 s.  c  o  m

    // Get the nodeName and shardName for the node that has the least room for this

    while (shardsNeedingHosts.size() > 0) {
        String nodeName = "";
        int minSize = Integer.MAX_VALUE;
        SliceReplica srToChange = null;
        for (String slice : shardsNeedingHosts) {
            for (Map.Entry<String, List<SliceReplica>> ent : nodesHostingReplicas.entrySet()) {
                // A little tricky. If we don't set this to something below, then it means all possible places to
                // put this property are full up, so just put it somewhere.
                if (srToChange == null && ent.getValue().size() > 0) {
                    srToChange = ent.getValue().get(0);
                }
                ListIterator<SliceReplica> iter = ent.getValue().listIterator();
                while (iter.hasNext()) {
                    SliceReplica sr = iter.next();
                    if (StringUtils.equals(slice, sr.slice.getName()) == false) {
                        continue;
                    }
                    if (nodesHostingProp.containsKey(ent.getKey()) == false) {
                        nodesHostingProp.put(ent.getKey(), new ArrayList<SliceReplica>());
                    }
                    if (minSize > nodesHostingReplicas.get(ent.getKey()).size()
                            && nodesHostingProp.get(ent.getKey()).size() < tmpMaxPropPerNode) {
                        minSize = nodesHostingReplicas.get(ent.getKey()).size();
                        srToChange = sr;
                        nodeName = ent.getKey();
                    }
                }
            }
        }
        // Now, you have a slice and node to put it on
        shardsNeedingHosts.remove(srToChange.slice.getName());
        if (nodesHostingProp.containsKey(nodeName) == false) {
            nodesHostingProp.put(nodeName, new ArrayList<SliceReplica>());
        }
        nodesHostingProp.get(nodeName).add(srToChange);
        adjustLimits(nodesHostingProp.get(nodeName));
        removeSliceAlreadyHostedFromPossibles(srToChange.slice.getName());
        addProp(srToChange.slice, srToChange.replica.getName());
    }
}

From source file:com.itdhq.contentLoader.ContentLoaderComponent.java

/**
 * Here you can see a bit of magic!//from  ww  w  . j av  a 2 s  . c  om
 */
private void fillNodeProperties(NodeRef node) {
    logger.debug("ContentLoaderComponent.fillNodeProperties");
    Map<QName, Serializable> props = nodeService.getProperties(node);
    Map<QName, PropertyDefinition> allProps = getAllTypeProperties(node);
    Map<QName, PropertyDefinition> fillingProps = new HashMap<>();
    for (Map.Entry<QName, PropertyDefinition> prop : allProps.entrySet()) {
        if (!props.containsKey(prop.getKey())) {
            fillingProps.put(prop.getKey(), prop.getValue());
            if (!prop.getValue().getConstraints().isEmpty()) {
                logger.debug("Constraint types "
                        + prop.getValue().getConstraints().get(0).getConstraint().getType() + "  "
                        + prop.getValue().getConstraints().get(0).getConstraint().getParameters());

            }
        }
    }
    for (Map.Entry<QName, PropertyDefinition> fillingProp : fillingProps.entrySet()) {
        switch (fillingProp.getValue().getDataType().getName().toString()) {
        case "{http://www.alfresco.org/model/dictionary/1.0}text":
            props.put(fillingProp.getKey(), genTextProperty(fillingProp.getValue()));
            logger.debug("New text property " + fillingProp.getKey() + "  "
                    + genTextProperty(fillingProp.getValue()));
            break;
        case "{http://www.alfresco.org/model/dictionary/1.0}datetime":
            props.put(fillingProp.getKey(), genDateTimeProperty(fillingProp.getValue(), 1000));
            logger.debug("New datetime property " + fillingProp.getKey() + "  "
                    + genDateTimeProperty(fillingProp.getValue(), 1000));
            break;
        case "{http://www.alfresco.org/model/dictionary/1.0}date":
            props.put(fillingProp.getKey(), genDateTimeProperty(fillingProp.getValue(), 1000));
            logger.debug("New date property " + fillingProp.getKey() + "  "
                    + genDateTimeProperty(fillingProp.getValue(), 1000));
            break;
        case "{http://www.alfresco.org/model/dictionary/1.0}long":
            props.put(fillingProp.getKey(), genLongProperty(fillingProp.getValue()));
            logger.debug("New long property " + fillingProp.getKey() + "  "
                    + genLongProperty(fillingProp.getValue()));
            break;
        case "{http://www.alfresco.org/model/dictionary/1.0}int":
            props.put(fillingProp.getKey(), genLongProperty(fillingProp.getValue()));
            logger.debug("New int property " + fillingProp.getKey() + "  "
                    + genIntProperty(fillingProp.getValue(), 1000));
            break;
        }
    }
    nodeService.setProperties(node, props);
}