Example usage for java.util Map.Entry get

List of usage examples for java.util Map.Entry get

Introduction

In this page you can find the example usage for java.util Map.Entry get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.opennms.netmgt.rrd.model.RRDv3IT.java

/**
 * Test samples for a single RRA//w ww.jav a 2s.c o m
 *
 * @throws Exception the exception
 */
@Test
public void testSamplesSingleRRA() throws Exception {
    File source = new File("src/test/resources/sample-counter.xml");
    RRDv3 rrd = JaxbUtils.unmarshal(RRDv3.class, source);
    Assert.assertNotNull(rrd);
    NavigableMap<Long, List<Double>> samples = rrd.generateSamples(rrd.getRras().get(0));
    Assert.assertFalse(samples.isEmpty());
    long ts = 1441748400L;
    Double v1 = 600.0;
    Double v2 = 2.0;
    Assert.assertEquals(rrd.getRras().get(0).getRows().size(), samples.size());
    for (Map.Entry<Long, List<Double>> s : samples.entrySet()) {
        System.out.println(s);
        Assert.assertEquals(2, s.getValue().size());
        Assert.assertEquals(ts, (long) s.getKey());
        Assert.assertEquals(v1, s.getValue().get(0));
        Assert.assertEquals(v2, s.getValue().get(1));
        ts += 300L;
        v1 += 300.0 * v2;
        v2 += 1.0;
    }
}

From source file:org.jboss.pnc.environment.docker.DockerEnvironmentDriver.java

/**
 * Get container port mapping from Docker daemon REST interface.
 * // ww  w  .j a v  a  2 s. c  om
 * @param containerId ID of running container
 * @return Map with pairs containerPort:publicPort
 * @throws Exception Thrown if data could not be obtained from Docker daemon or are corrupted
 */
private Map<String, HostPortMapping> getContainerPortMappings(String containerId) throws Exception {
    Map<String, HostPortMapping> resultMap = new HashMap<>();
    String response = HttpUtils.processGetRequest(String.class,
            dockerEndpoint + "/containers/" + containerId + "/json");

    ObjectMapper objectMapper = new ObjectMapper();
    JsonNode rootNode = objectMapper.readTree(response);
    JsonNode networkSettingsNode = rootNode.path("NetworkSettings");
    JsonNode portsNode = networkSettingsNode.path("Ports");

    Map<String, List<HostPortMapping>> portsMap = objectMapper.readValue(portsNode.traverse(),
            new TypeReference<Map<String, List<HostPortMapping>>>() {
            });

    for (Map.Entry<String, List<HostPortMapping>> entry : portsMap.entrySet()) {
        resultMap.put(entry.getKey().substring(0, entry.getKey().indexOf("/")), entry.getValue().get(0));
    }

    return resultMap;
}

From source file:com.aliyun.openservices.odps.console.mapreduce.runtime.MapReduceJob.java

private void dropTempResources() {
    Map<String, List<String>> tempResources = mrCmd.getTempResources();
    if (!tempResources.isEmpty()) {
        for (Map.Entry<String, List<String>> entry : tempResources.entrySet()) {
            try {
                odps.resources().delete(entry.getValue().get(1));
            } catch (Exception ex) {
                // Ignore exception
            }/* ww w.j a v  a 2 s . co  m*/
        }
    }
}

From source file:jp.ac.tohoku.ecei.sb.metabolomeqc.basiccorrector.RegressionIntensityCorrector.java

@Override
public IntensityMatrix doCorrection(IntensityMatrix original) {
    updateBadInjections(original);/*from  ww  w . jav a 2s .  c o m*/
    // Make corrected intensity matrix
    List<Injection> correctedSamples = original.getNormalInjections();

    IntensityMatrix corrected = new IntensityMatrixImpl(original.getSize()[0], correctedSamples.size());
    corrected.setRowKeys(original.getRowKeys());
    corrected.setColumnKeys(correctedSamples);

    List<Sample> globalQCIndexes = original.getGlobalQCSamples();
    if (globalQCIndexes.size() == 0)
        throw new UnsupportedOperationException("No global QC");
    log.info("Global QC {}", globalQCIndexes);
    log.info("Bad injections {}", badInjections);

    // median of SQCs for compounds
    Map<Compound, Double> medianForCompounds = GlobalQCMedianCalculator.calcGlobalQCMedian(original,
            badInjections);

    // do correction
    Map<Plate, Map<Sample.SampleType, List<Injection>>> map = original.getInjectionsByPlateAndType();
    for (Map.Entry<Plate, Map<Sample.SampleType, List<Injection>>> oneRun : map.entrySet()) {
        Stream<CorrectionResult[]> oneresult = corrected.getRowKeys().parallelStream().map(oneCompound -> {
            SimpleRegression simpleRegression = new SimpleRegression();
            for (Injection oneSqc : oneRun.getValue().get(Sample.SampleType.QC)) {
                if (!oneSqc.getSample().equals(globalQCIndexes.get(0)))
                    continue; // skip non global QC
                if (badInjections.indexOf(oneSqc) != -1)
                    continue; // skip bad sample
                if (oneSqc.isIgnored())
                    continue; // skip ignored QCs
                simpleRegression.addData(oneSqc.getRunIndex(), original.get(oneCompound, oneSqc));
            }

            CorrectionResult[] resultArray = new CorrectionResult[oneRun.getValue()
                    .get(Sample.SampleType.NORMAL).size()];

            log.info("Simple Regression N : {}", simpleRegression.getN());

            if (simpleRegression.getN() < 3) {
                // Failed to correct
                int i = 0;
                for (Injection oneNormal : oneRun.getValue().get(Sample.SampleType.NORMAL)) {
                    //corrected.put(oneCompound, oneNormal, Double.NaN);
                    resultArray[i++] = new CorrectionResult(oneNormal, oneCompound, Double.NaN);
                }
            } else {
                RegressionResults result = simpleRegression.regress();
                double[] coefficients = result.getParameterEstimates();

                int i = 0;
                // correct
                for (Injection oneNormal : oneRun.getValue().get(Sample.SampleType.NORMAL)) {
                    double offset = coefficients[0] + coefficients[1] * oneNormal.getRunIndex()
                            - medianForCompounds.get(oneCompound);
                    //corrected.put(oneCompound, oneNormal, original.get(oneCompound, oneNormal) - offset);
                    resultArray[i++] = new CorrectionResult(oneNormal, oneCompound,
                            original.get(oneCompound, oneNormal) - offset);
                }
            }

            //log.info("resultArray: {} {}", oneRun, resultArray);

            return resultArray;
        });

        oneresult.forEachOrdered(correctionResultArray -> {
            for (CorrectionResult oneResult : correctionResultArray) {
                corrected.put(oneResult.getCompound(), oneResult.getSample(), oneResult.getValue());
            }
        });
    }

    return corrected;
}

From source file:com.thoughtworks.go.server.service.dd.DependencyFanInNode.java

private void addToRevisionQueue(PipelineTimelineEntry entry,
        Queue<PipelineTimelineEntry.Revision> revisionQueue, List<FaninScmMaterial> scmMaterials,
        FanInGraphContext context, Set<CaseInsensitiveString> visitedNodes) {
    for (Map.Entry<String, List<PipelineTimelineEntry.Revision>> revisionList : entry.revisions().entrySet()) {
        String fingerprint = revisionList.getKey();
        PipelineTimelineEntry.Revision revision = revisionList.getValue().get(0);
        if (isScmMaterial(fingerprint, context)) {
            scmMaterials.add(new FaninScmMaterial(fingerprint, revision));
            continue;
        }//from w w  w  . j a  va  2s.c  om

        if (isDependencyMaterial(fingerprint, context)
                && !visitedNodes.contains(new CaseInsensitiveString(revision.revision))) {
            revisionQueue.add(revision);
            visitedNodes.add(new CaseInsensitiveString(revision.revision));
        }
    }
}

From source file:cn.cnic.bigdatalab.flume.sink.mongodb.MappingDefinition.java

private void populateDocumentType(Map.Entry<String, JsonNode> entry, DocumentFieldDefinition fieldDefinition) {
    if (fieldDefinition.getType().equals(MongoDataType.DOCUMENT) && entry.getValue().has(DOCUMENT_MAPPING)) {
        if (entry.getValue().has(DELIMITER_CHAR)) {
            fieldDefinition.setDelimiter(entry.getValue().get(DELIMITER_CHAR).asText());
            JsonNode documentMapping = entry.getValue().get(DOCUMENT_MAPPING);
            Map<String, FieldDefinition> documentFieldDefinitionMap = new LinkedHashMap<String, FieldDefinition>();
            Iterator<Map.Entry<String, JsonNode>> entryIterator = documentMapping.fields();
            Map.Entry<String, JsonNode> field = null;
            while (entryIterator.hasNext()) {
                field = entryIterator.next();
                documentFieldDefinitionMap.put(field.getKey(), populateFieldDefinition(field));
            }//  www  .  j a va2s .  co m
            fieldDefinition.setDocumentMapping(documentFieldDefinitionMap);
        } else {
            throw new MongoSinkException("Delimiter char must be set into schema");
        }
    }
}

From source file:com.vmware.bdd.plugin.ambari.model.AmClusterDef.java

private void updateServiceUserConfigInBlueprint(ClusterBlueprint blueprint) {
    Map<String, Object> conf = blueprint.getConfiguration();
    if (conf == null) {
        return;/*from   w w w . j av a  2 s.c  om*/
    }
    Map<String, Map<String, String>> serviceUserConfigs = (Map<String, Map<String, String>>) conf
            .get(UserMgmtConstants.SERVICE_USER_CONFIG_IN_SPEC_FILE);
    if (MapUtils.isEmpty(serviceUserConfigs)) {
        return;
    }
    //Todo(qjin:) For hdfs and other services, if need to modify other configs related with servcie user, also need to
    //handle seperately, that config changes should be reflected in the blueprint
    for (Map.Entry<String, Map<String, String>> serviceUserConfig : serviceUserConfigs.entrySet()) {
        String serviceUser = serviceUserConfig.getValue().get(UserMgmtConstants.SERVICE_USER_NAME);
        if (!StringUtils.isBlank(serviceUser)) {
            String serviceUserParentConfigName = serviceUserConfig.getKey().toLowerCase() + "-env";
            String serviceUserConfigName = serviceUserConfig.getKey().toLowerCase() + "_user";
            Map<String, String> serviceConfig = (Map<String, String>) conf.get(serviceUserParentConfigName);
            if (serviceConfig == null) {
                serviceConfig = new HashMap<>();
            }
            serviceConfig.put(serviceUserConfigName, serviceUser);
            conf.put(serviceUserParentConfigName, serviceConfig);
        }
    }
    conf.remove(UserMgmtConstants.SERVICE_USER_CONFIG_IN_SPEC_FILE);
}

From source file:org.yes.cart.service.order.impl.DeliveryAssemblerImpl.java

/**
 * Delivery sets determination.//from  w  w  w.ja v a2 s  .  com
 *
 * @param order               given order
 * @param onePhysicalDelivery true if need to create one physical delivery.
 * @return true in case if order can has single delivery.
 */
Map<DeliveryBucket, List<CustomerOrderDet>> getDeliveryGroups(final CustomerOrder order,
        final boolean onePhysicalDelivery) throws SkuUnavailableException {

    final Map<DeliveryBucket, List<CustomerOrderDet>> buckets = (Map) orderSplittingStrategy
            .determineDeliveryBuckets(order.getShop().getShopId(),
                    new ArrayList<CartItem>(order.getOrderDetail()), onePhysicalDelivery);

    for (final Map.Entry<DeliveryBucket, List<CustomerOrderDet>> bucket : buckets.entrySet()) {

        if (CustomerOrderDelivery.NOSTOCK_DELIVERY_GROUP.equals(bucket.getKey().getGroup())) {

            final CustomerOrderDet first = bucket.getValue().get(0);

            throw new SkuUnavailableException(first.getProductSkuCode(), first.getProductName(), true);

        } else if (CustomerOrderDelivery.OFFLINE_DELIVERY_GROUP.equals(bucket.getKey().getGroup())) {

            final CustomerOrderDet first = bucket.getValue().get(0);

            throw new SkuUnavailableException(first.getProductSkuCode(), first.getProductName(), false);

        }

    }

    return buckets;

}

From source file:org.obiba.onyx.quartz.editor.locale.LocaleProperties.java

private Map<Locale, Properties> toLocalePropertiesMap(Questionnaire questionnaire) {
    Map<Locale, Properties> mapLocaleProperties = new HashMap<Locale, Properties>();
    for (Locale locale : getLocales()) {
        Properties properties = new Properties();
        for (Map.Entry<IQuestionnaireElement, ListMultimap<Locale, KeyValue>> entry : getElementLabels()
                .entrySet()) {/*from   w w  w  .  j  av  a  2 s  . com*/
            IQuestionnaireElement element = entry.getKey();
            List<KeyValue> keyValueList = entry.getValue().get(locale);
            for (KeyValue keyValue : keyValueList) {
                String fullKey = questionnaire.getPropertyKeyProvider().getPropertyKey(element,
                        keyValue.getKey());
                String value = keyValue.getValue();
                properties.setProperty(fullKey, value != null ? value.replaceAll("\n", "<br/>") : "");
            }
        }
        mapLocaleProperties.put(locale, properties);
    }
    return mapLocaleProperties;
}

From source file:com.netflix.simianarmy.resources.sniper.SniperMonkeyResource.java

/**
 * Gets the sniper events. Creates GET /api/v1/sniper api which outputs the sniper events in json. Users can specify
 * cgi query params to filter the results and use "since" query param to set the start of a timerange. "since" will
 * number of milliseconds since the epoch.
 *
 * @param uriInfo/*from ww  w . j a v  a 2s.  c  om*/
 *            the uri info
 * @return the sniper events json response
 * @throws IOException
 *             Signals that an I/O exception has occurred.
 */
@GET
public Response getSniperEvents(@Context UriInfo uriInfo) throws IOException {
    Map<String, String> query = new HashMap<String, String>();
    Date date = null;
    for (Map.Entry<String, List<String>> pair : uriInfo.getQueryParameters().entrySet()) {
        if (pair.getValue().isEmpty()) {
            continue;
        }
        if (pair.getKey().equals("since")) {
            date = new Date(Long.parseLong(pair.getValue().get(0)));
        } else {
            query.put(pair.getKey(), pair.getValue().get(0));
        }
    }
    // if "since" not set, default to 24 hours ago
    if (date == null) {
        Calendar now = monkey.context().calendar().now();
        now.add(Calendar.DAY_OF_YEAR, -1);
        date = now.getTime();
    }

    List<Event> evts = monkey.context().recorder().findEvents(SniperMonkey.Type.SNIPER,
            SniperMonkey.EventTypes.SNIPER_TERMINATION, query, date);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    JsonGenerator gen = JSON_FACTORY.createJsonGenerator(baos, JsonEncoding.UTF8);
    gen.writeStartArray();
    for (Event evt : evts) {
        gen.writeStartObject();
        gen.writeStringField("monkeyType", evt.monkeyType().name());
        gen.writeStringField("eventType", evt.eventType().name());
        gen.writeNumberField("eventTime", evt.eventTime().getTime());
        gen.writeStringField("region", evt.region());
        for (Map.Entry<String, String> pair : evt.fields().entrySet()) {
            gen.writeStringField(pair.getKey(), pair.getValue());
        }
        gen.writeEndObject();
    }
    gen.writeEndArray();
    gen.close();
    return Response.status(Response.Status.OK).entity(baos.toString("UTF-8")).build();
}