Example usage for java.util Map.Entry put

List of usage examples for java.util Map.Entry put

Introduction

In this page you can find the example usage for java.util Map.Entry put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:edu.vt.vbi.patric.portlets.TranscriptomicsGeneExp.java

private JSONObject processSummary(ResourceRequest request) {
    JSONObject jsonResult = new JSONObject();

    String paramFeatureId = request.getParameter("featureId");
    String paramSampleId = request.getParameter("sampleId");
    String paramKeyword = request.getParameter("keyword");
    String paramLogRatio = request.getParameter("log_ratio");
    String paramZScore = request.getParameter("zscore");

    try {//from w ww  . java  2 s  .c o m
        DataApiHandler dataApi = new DataApiHandler(request);
        //select?q=feature_id:PATRIC.83332.12.NC_000962.CDS.34.1524.fwd&rows=0&facet=true&facet.range.other=before&facet.range.other=after
        // &facet.range.start=-2&facet.range.end=2&facet.range.gap=0.5&facet.range=z_score&facet.range=log_ratio

        //select?q=feature_id:PATRIC.83332.12.NC_000962.CDS.34.1524.fwd&rows=0&facet=true&facet.mincount=1&facet.field=strain&facet.field=mutant&facet.field=condition
        SolrQuery query = new SolrQuery();

        if (paramKeyword != null && !paramKeyword.equals("")) {
            query.setQuery(paramKeyword + " AND feature_id:" + paramFeatureId);
        } else {
            query.setQuery("feature_id:" + paramFeatureId);
        }

        if (paramSampleId != null && !paramSampleId.equals("")) {
            String[] pids = paramSampleId.split(",");

            query.addFilterQuery("pid:(" + StringUtils.join(pids, " OR ") + ")");
        }
        if (paramLogRatio != null && !paramLogRatio.equals("") && !paramLogRatio.equals("0")) {
            query.addFilterQuery(
                    "log_ratio:[* TO -" + paramLogRatio + "] OR log_ratio:[" + paramLogRatio + " TO *]");
        }
        if (paramZScore != null && !paramZScore.equals("") && !paramZScore.equals("0")) {
            query.addFilterQuery("z_score:[* TO -" + paramZScore + "] OR z_score:[" + paramZScore + " TO *]");
        }

        query.setRows(dataApi.MAX_ROWS);
        query.setFacet(true).setFacetMinCount(1).set("json.nl", "map");
        query.set("facet.range.other", "before").add("facet.range.other", "after");
        query.addNumericRangeFacet("log_ratio", -2, 2, 0.5).addNumericRangeFacet("z_score", -2, 2, 0.5);
        query.addFacetField("strain").addFacetField("mutant").addFacetField("condition");

        LOGGER.debug("[{}] {}", SolrCore.TRANSCRIPTOMICS_GENE.getSolrCoreName(), query.toString());

        String apiResponse = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_GENE, query);

        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<Map> sdl = (List<Map>) respBody.get("docs");

        // features
        JSONArray features = new JSONArray();
        for (Map doc : sdl) {
            JSONObject feature = new JSONObject();
            feature.put("exp_accession", doc.get("accession"));
            // feature.put("exp_channels", doc.get(""));
            feature.put("exp_condition", doc.get("condition"));
            feature.put("exp_id", doc.get("eid"));
            feature.put("exp_locustag", doc.get("refseq_locus_tag"));
            feature.put("exp_mutant", doc.get("mutant"));
            feature.put("exp_name", doc.get("expname"));
            feature.put("exp_organism", doc.get("organism"));
            feature.put("exp_pavg", doc.get("avg_intensity"));
            feature.put("exp_platform", doc.get("")); // ??
            feature.put("exp_pratio", doc.get("log_ratio"));
            feature.put("exp_samples", doc.get("")); // ??
            feature.put("exp_strain", doc.get("")); // ??
            feature.put("exp_timepoint", doc.get("timepoint"));
            feature.put("exp_zscore", doc.get("z_score"));
            // feature.put("figfam_id", doc.get("")); // ??
            feature.put("locus_tag", doc.get("alt_locus_tag"));
            feature.put("feature_id", doc.get("feature_id"));
            feature.put("pid", doc.get("pid"));
            feature.put("pmid", doc.get("pmid"));

            features.add(feature);
        }
        jsonResult.put("features", features);

        Map facets = (Map) resp.get("facet_counts");

        Map facetRanges = (Map) facets.get("facet_ranges");

        if (facetRanges.containsKey("log_ratio")) {
            Map facetLogRatio = (Map) facetRanges.get("log_ratio");
            final int before = (Integer) facetLogRatio.get("before");
            final int after = (Integer) facetLogRatio.get("after");
            Map facetRangeLogRatio = (Map) facetLogRatio.get("counts");

            List<JSONObject> list = new ArrayList<>();

            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetRangeLogRatio.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }

            boolean hasMinBucket = false;
            boolean hasMaxBucket = false;
            for (JSONObject entry : list) {
                if (entry.get("category").equals("-2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + before));
                    hasMinBucket = true;
                } else if (entry.get("category").equals("2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + after));
                    hasMaxBucket = true;
                }
            }
            if (!hasMinBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "-2.0");
                json.put("count", before);
                list.add(json);
            }
            if (!hasMaxBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "2.0");
                json.put("count", after);
                list.add(json);
            }

            jsonResult.put("log_ratio", list);
        }

        if (facetRanges.containsKey("z_score")) {
            Map facetZscore = (Map) facetRanges.get("z_score");
            final int before = (Integer) facetZscore.get("before");
            final int after = (Integer) facetZscore.get("after");
            Map facetRangeZscore = (Map) facetZscore.get("counts");

            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetRangeZscore.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }

            boolean hasMinBucket = false;
            boolean hasMaxBucket = false;
            for (JSONObject entry : list) {
                if (entry.get("category").equals("-2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + before));
                    hasMinBucket = true;
                } else if (entry.get("category").equals("2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + after));
                    hasMaxBucket = true;
                }
            }
            if (!hasMinBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "-2.0");
                json.put("count", before);
                list.add(json);
            }
            if (!hasMaxBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "2.0");
                json.put("count", after);
                list.add(json);
            }

            jsonResult.put("z_score", list);
        }

        Map facetFields = (Map) facets.get("facet_fields");

        // strain
        if (facetFields.containsKey("strain")) {
            Map facetStrain = (Map) facetFields.get("strain");
            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetStrain.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }
            jsonResult.put("strain", list);
        }

        // mutant
        if (facetFields.containsKey("mutant")) {
            Map facetMutant = (Map) facetFields.get("mutant");
            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetMutant.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }
            jsonResult.put("mutant", list);
        }

        // condition
        if (facetFields.containsKey("condition")) {
            Map facetCondition = (Map) facetFields.get("condition");
            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetCondition.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }
            jsonResult.put("condition", list);
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    return jsonResult;
}

From source file:com.ichi2.libanki.Decks.java

/**
 * Add a deck with NAME. Reuse deck if already exists. Return id as int.
 *///from  w  w w .ja  v  a2s .c om
public Long id(String name, boolean create, String type) {
    try {
        name = name.replace("\"", "");
        for (Map.Entry<Long, JSONObject> g : mDecks.entrySet()) {
            if (g.getValue().getString("name").equalsIgnoreCase(name)) {
                return g.getKey();
            }
        }
        if (!create) {
            return null;
        }
        if (name.contains("::")) {
            // not top level; ensure all parents exist
            name = _ensureParents(name);
        }
        JSONObject g;
        long id;
        g = new JSONObject(type);
        g.put("name", name);
        while (true) {
            id = Utils.intNow(1000);
            if (!mDecks.containsKey(id)) {
                break;
            }
        }
        g.put("id", id);
        mDecks.put(id, g);
        save(g);
        maybeAddToActive();
        //runHook("newDeck"); // TODO
        return id;
    } catch (JSONException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.hichinaschool.flashcards.libanki.Decks.java

public long id(String name, boolean create, String type) {
    name = name.replace("\'", "").replace("\"", "");
    for (Map.Entry<Long, JSONObject> g : mDecks.entrySet()) {
        try {/*from   www  .  j a v a 2s.  c o  m*/
            if (g.getValue().getString("name").equalsIgnoreCase(name)) {
                return g.getKey();
            }
        } catch (JSONException e) {
            throw new RuntimeException(e);
        }
    }
    if (!create) {
        return 0;
    }
    if (name.matches(".*::.*")) {
        // not top level; ensure all parents exist
        name = _ensureParents(name);
    }
    JSONObject g;
    long id;
    try {
        g = new JSONObject(type);
        g.put("name", name);
        id = Utils.intNow(1000);
        while (mDecks.containsKey(id)) {
            id = Utils.intNow();
        }
        g.put("id", id);
        mDecks.put(id, g);
    } catch (JSONException e) {
        throw new RuntimeException(e);
    }
    save(g);
    maybeAddToActive();
    return id;
}

From source file:com.alibaba.jstorm.daemon.nimbus.TopologyMetricsRunnable.java

private void mergeAndUploadClusterMetrics() {
    TopologyMetricContext context = getClusterTopologyMetricContext();
    TopologyMetric tpMetric = context.mergeMetrics();
    if (tpMetric == null) {
        tpMetric = MetricUtils.mkTopologyMetric();
        tpMetric.set_topologyMetric(MetricUtils.mkMetricInfo());
    }/* ww  w .j av a2s . c o m*/

    //reset snapshots metric id
    MetricInfo clusterMetrics = tpMetric.get_topologyMetric();
    Map<String, Long> metricNames = context.getMemMeta();
    for (Map.Entry<String, Map<Integer, MetricSnapshot>> entry : clusterMetrics.get_metrics().entrySet()) {
        String metricName = entry.getKey();
        MetricType metricType = MetricUtils.metricType(metricName);
        Long metricId = metricNames.get(metricName);
        for (Map.Entry<Integer, MetricSnapshot> metric : entry.getValue().entrySet()) {
            MetricSnapshot snapshot = metric.getValue();
            snapshot.set_metricId(metricId);
            if (metricType == MetricType.HISTOGRAM || metricType == MetricType.TIMER) {
                snapshot.set_points(new ArrayList<Long>(0));
            }
            //                entry.getValue().put(metric.getKey(), snapshot);
        }
    }

    //fill the unacquired metrics with zero
    long ts = System.currentTimeMillis();
    for (Map.Entry<String, Long> entry : metricNames.entrySet()) {
        String name = entry.getKey();
        if (!clusterMetrics.get_metrics().containsKey(name)) {
            Map<Integer, MetricSnapshot> metric = new HashMap<>();
            MetricType type = MetricUtils.metricType(name);
            metric.put(AsmWindow.M1_WINDOW, new MetricSnapshot(entry.getValue(), ts, type.getT()));
            clusterMetrics.put_to_metrics(name, metric);
        }
    }

    //upload to cache
    Update event = new Update();
    event.timestamp = System.currentTimeMillis();
    event.topologyMetrics = tpMetric;
    event.topologyId = JStormMetrics.CLUSTER_METRIC_KEY;
    pushEvent(event);

    LOG.info("send update event for cluster metrics, size : {}", clusterMetrics.get_metrics_size());
}

From source file:org.apache.flink.streaming.connectors.kafka.KafkaConsumerTestBase.java

/**
 * Runs a job using the provided environment to read a sequence of records from a single Kafka topic.
 * The method allows to individually specify the expected starting offset and total read value count of each partition.
 * The job will be considered successful only if all partition read results match the start offset and value count criteria.
 *///from  w  w w.j  a v  a2  s  . c  om
protected void readSequence(final StreamExecutionEnvironment env, final StartupMode startupMode,
        final Map<KafkaTopicPartition, Long> specificStartupOffsets, final Properties cc,
        final String topicName,
        final Map<Integer, Tuple2<Integer, Integer>> partitionsToValuesCountAndStartOffset) throws Exception {
    final int sourceParallelism = partitionsToValuesCountAndStartOffset.keySet().size();

    int finalCountTmp = 0;
    for (Map.Entry<Integer, Tuple2<Integer, Integer>> valuesCountAndStartOffset : partitionsToValuesCountAndStartOffset
            .entrySet()) {
        finalCountTmp += valuesCountAndStartOffset.getValue().f0;
    }
    final int finalCount = finalCountTmp;

    final TypeInformation<Tuple2<Integer, Integer>> intIntTupleType = TypeInfoParser
            .parse("Tuple2<Integer, Integer>");

    final TypeInformationSerializationSchema<Tuple2<Integer, Integer>> deser = new TypeInformationSerializationSchema<>(
            intIntTupleType, env.getConfig());

    // create the consumer
    cc.putAll(secureProps);
    FlinkKafkaConsumerBase<Tuple2<Integer, Integer>> consumer = kafkaServer.getConsumer(topicName, deser, cc);
    switch (startupMode) {
    case EARLIEST:
        consumer.setStartFromEarliest();
        break;
    case LATEST:
        consumer.setStartFromLatest();
        break;
    case SPECIFIC_OFFSETS:
        consumer.setStartFromSpecificOffsets(specificStartupOffsets);
        break;
    case GROUP_OFFSETS:
        consumer.setStartFromGroupOffsets();
        break;
    }

    DataStream<Tuple2<Integer, Integer>> source = env.addSource(consumer).setParallelism(sourceParallelism)
            .map(new ThrottledMapper<Tuple2<Integer, Integer>>(20)).setParallelism(sourceParallelism);

    // verify data
    source.flatMap(new RichFlatMapFunction<Tuple2<Integer, Integer>, Integer>() {

        private HashMap<Integer, BitSet> partitionsToValueCheck;
        private int count = 0;

        @Override
        public void open(Configuration parameters) throws Exception {
            partitionsToValueCheck = new HashMap<>();
            for (Integer partition : partitionsToValuesCountAndStartOffset.keySet()) {
                partitionsToValueCheck.put(partition, new BitSet());
            }
        }

        @Override
        public void flatMap(Tuple2<Integer, Integer> value, Collector<Integer> out) throws Exception {
            int partition = value.f0;
            int val = value.f1;

            BitSet bitSet = partitionsToValueCheck.get(partition);
            if (bitSet == null) {
                throw new RuntimeException("Got a record from an unknown partition");
            } else {
                bitSet.set(val - partitionsToValuesCountAndStartOffset.get(partition).f1);
            }

            count++;

            LOG.info("Received message {}, total {} messages", value, count);

            // verify if we've seen everything
            if (count == finalCount) {
                for (Map.Entry<Integer, BitSet> partitionsToValueCheck : this.partitionsToValueCheck
                        .entrySet()) {
                    BitSet check = partitionsToValueCheck.getValue();
                    int expectedValueCount = partitionsToValuesCountAndStartOffset
                            .get(partitionsToValueCheck.getKey()).f0;

                    if (check.cardinality() != expectedValueCount) {
                        throw new RuntimeException("Expected cardinality to be " + expectedValueCount
                                + ", but was " + check.cardinality());
                    } else if (check.nextClearBit(0) != expectedValueCount) {
                        throw new RuntimeException("Expected next clear bit to be " + expectedValueCount
                                + ", but was " + check.cardinality());
                    }
                }

                // test has passed
                throw new SuccessException();
            }
        }

    }).setParallelism(1);

    tryExecute(env, "Read data from Kafka");

    LOG.info("Successfully read sequence for verification");
}