Example usage for java.util LinkedHashMap get

List of usage examples for java.util LinkedHashMap get

Introduction

In this page you can find the example usage for java.util LinkedHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:edumsg.edumsg_android_app.MainActivity.java

/**
 * Creates a {@link JsonObjectRequest} with parameters supplied in a {@link Map<String, String>}.
 * Two other {@link JsonObjectRequest} are nested within the {@link com.android.volley.Response.Listener#onResponse(Object)}
 * callback method. First it retrieves the user's retweets, then their favorites, finally their
 * timeline.//from   ww w. j  a va  2  s . c  o  m
 */
private void getFeed() {
    final LoadToast loadToast = new LoadToast(this);
    final float scale = getApplicationContext().getResources().getDisplayMetrics().density;
    int pixels = (int) (56 * scale + 0.5f);
    loadToast.setTranslationY(pixels);
    if (!swipeRefreshLayout.isRefreshing()) {
        loadToast.setText("Loading...");
        loadToast.show();
    }
    Map<String, String> jsonParams2 = new HashMap<>();
    jsonParams2.put("queue", "USER");
    jsonParams2.put("method", "get_retweets");
    jsonParams2.put("session_id", sessionId + "");
    JSONObject jsonRequest2 = new JSONObject(jsonParams2);
    JsonObjectRequest jsonObjectRequest2 = new JsonObjectRequest(Request.Method.POST, requestUrl, jsonRequest2,
            new Response.Listener<JSONObject>() {
                @Override
                public void onResponse(JSONObject response) {
                    try {
                        final ObjectMapper mapper = new ObjectMapper();
                        Map<String, Object> responseMap = mapper.readValue(response.toString(),
                                new TypeReference<HashMap<String, Object>>() {
                                });
                        retweets = (ArrayList) responseMap.get("tweet_ids");
                        Map<String, String> jsonParams = new HashMap<>();
                        jsonParams.put("queue", "USER");
                        jsonParams.put("method", "get_favorites");
                        jsonParams.put("session_id", sessionId);
                        JSONObject jsonRequest = new JSONObject(jsonParams);
                        JsonObjectRequest jsonObjectRequest3 = new JsonObjectRequest(Request.Method.POST,
                                requestUrl, jsonRequest, new Response.Listener<JSONObject>() {
                                    @Override
                                    public void onResponse(JSONObject response) {
                                        try {
                                            Map<String, Object> responseMap = mapper.readValue(
                                                    response.toString(),
                                                    new TypeReference<HashMap<String, Object>>() {
                                                    });
                                            favorites = (ArrayList) responseMap.get("favorites");
                                            Map<String, String> jsonParams = new HashMap<>();
                                            jsonParams.put("queue", "USER");
                                            jsonParams.put("method", "timeline");
                                            jsonParams.put("session_id", sessionId);
                                            JSONObject jsonRequest = new JSONObject(jsonParams);
                                            JsonObjectRequest jsonObjectRequest4 = new JsonObjectRequest(
                                                    Request.Method.POST, requestUrl, jsonRequest,
                                                    new Response.Listener<JSONObject>() {
                                                        @Override
                                                        public void onResponse(JSONObject response) {
                                                            try {
                                                                Map<String, Object> responseMap = mapper
                                                                        .readValue(response.toString(),
                                                                                new TypeReference<HashMap<String, Object>>() {
                                                                                });
                                                                if (responseMap.get("code").equals("200")) {
                                                                    if (!swipeRefreshLayout.isRefreshing())
                                                                        loadToast.success();
                                                                    ArrayList tweetsArray = (ArrayList) responseMap
                                                                            .get("feeds");
                                                                    final Iterator iterator = tweetsArray
                                                                            .iterator();
                                                                    while (iterator.hasNext()) {
                                                                        final Map<String, Object> tweetJsonObj = mapper
                                                                                .readValue(mapper
                                                                                        .writeValueAsString(
                                                                                                iterator.next()),
                                                                                        new TypeReference<HashMap<String, Object>>() {
                                                                                        });
                                                                        final int tweetId = (int) tweetJsonObj
                                                                                .get("id");
                                                                        final LinkedHashMap creatorMap = (LinkedHashMap) tweetJsonObj
                                                                                .get("creator");
                                                                        final int creatorId = (int) creatorMap
                                                                                .get("id");
                                                                        final String creatorUsername = (String) creatorMap
                                                                                .get("username");
                                                                        if (creatorUsername == username)
                                                                            continue;
                                                                        String tweetText = (String) tweetJsonObj
                                                                                .get("tweet_text");
                                                                        String avatarUrl = (String) creatorMap
                                                                                .get("avatar_url");
                                                                        User creator = new User();
                                                                        creator.setId(creatorId);
                                                                        creator.setName((String) creatorMap
                                                                                .get("name"));
                                                                        creator.setUsername((String) creatorMap
                                                                                .get("username"));
                                                                        creator.setAvatar_url(avatarUrl);
                                                                        final LinkedHashMap retweeterMap = (LinkedHashMap) tweetJsonObj
                                                                                .get("retweeter");
                                                                        final Tweet tweetObject = new Tweet(
                                                                                tweetId, creator, tweetText);
                                                                        if (retweeterMap != null) {
                                                                            User retweeter = new User();
                                                                            retweeter.setId((int) retweeterMap
                                                                                    .get("id"));
                                                                            retweeter.setName(
                                                                                    (String) retweeterMap
                                                                                            .get("name"));
                                                                            retweeter.setUsername(
                                                                                    (String) retweeterMap
                                                                                            .get("username"));
                                                                            tweetObject.setRetweeter(retweeter);
                                                                        }
                                                                        if (avatarUrl != null
                                                                                && !avatarUrl.equals("")) {
                                                                            tweetObject.setImgUrl(avatarUrl);
                                                                        }
                                                                        if (retweets.contains(
                                                                                Integer.valueOf(tweetId))) {
                                                                            tweetObject.setIsRetweeted(true);
                                                                        }
                                                                        Iterator favIter = favorites.iterator();
                                                                        while (favIter.hasNext()) {
                                                                            Map<String, Object> tweetJsonObj2 = mapper
                                                                                    .readValue(mapper
                                                                                            .writeValueAsString(
                                                                                                    favIter.next()),
                                                                                            new TypeReference<HashMap<String, Object>>() {
                                                                                            });
                                                                            if (tweetId == (int) tweetJsonObj2
                                                                                    .get("id")) {
                                                                                tweetObject
                                                                                        .setIsFavorited(true);
                                                                                break;
                                                                            }
                                                                        }
                                                                        tweetObjects.add(tweetObject);
                                                                    }
                                                                    if (swipeRefreshLayout.isRefreshing()) {
                                                                        rvAdapter.notifyDataSetChanged();
                                                                        swipeRefreshLayout.setRefreshing(false);
                                                                    } else {
                                                                        rvAdapter.notifyItemRangeInserted(0,
                                                                                tweetObjects.size());
                                                                    }
                                                                }
                                                            } catch (Exception e) {
                                                                loadToast.error();
                                                                e.printStackTrace();
                                                            }
                                                        }
                                                    }, new Response.ErrorListener() {
                                                        @Override
                                                        public void onErrorResponse(VolleyError error) {
                                                            loadToast.error();
                                                            error.printStackTrace();
                                                        }
                                                    }) {
                                                @Override
                                                public Map<String, String> getHeaders()
                                                        throws AuthFailureError {
                                                    HashMap<String, String> headers = new HashMap<String, String>();
                                                    headers.put("Content-Type",
                                                            "application/json; charset=utf-8");
                                                    //headers.put("User-agent", System.getProperty("http.agent"));
                                                    return headers;
                                                };
                                            };
                                            jsonObjectRequest4.setTag(TAG);
                                            jsonObjectRequest4.setRetryPolicy(new DefaultRetryPolicy(10000,
                                                    DefaultRetryPolicy.DEFAULT_MAX_RETRIES,
                                                    DefaultRetryPolicy.DEFAULT_BACKOFF_MULT));
                                            getVolleyRequestQueue().add(jsonObjectRequest4);
                                        } catch (Exception e) {
                                            loadToast.error();
                                            e.printStackTrace();
                                        }
                                    }
                                }, new Response.ErrorListener() {
                                    @Override
                                    public void onErrorResponse(VolleyError error) {
                                        loadToast.error();
                                        error.printStackTrace();
                                    }
                                }) {
                            @Override
                            public Map<String, String> getHeaders() throws AuthFailureError {
                                HashMap<String, String> headers = new HashMap<String, String>();
                                headers.put("Content-Type", "application/json; charset=utf-8");
                                //headers.put("User-agent", System.getProperty("http.agent"));
                                return headers;
                            };
                        };
                        jsonObjectRequest3.setTag(TAG);
                        jsonObjectRequest3.setRetryPolicy(
                                new DefaultRetryPolicy(10000, DefaultRetryPolicy.DEFAULT_MAX_RETRIES,
                                        DefaultRetryPolicy.DEFAULT_BACKOFF_MULT));
                        getVolleyRequestQueue().add(jsonObjectRequest3);
                    } catch (Exception e) {
                        loadToast.error();
                        e.printStackTrace();
                    }
                }
            }, new Response.ErrorListener() {
                @Override
                public void onErrorResponse(VolleyError error) {
                    loadToast.error();
                    error.printStackTrace();
                }
            }) {
        @Override
        public Map<String, String> getHeaders() throws AuthFailureError {
            HashMap<String, String> headers = new HashMap<String, String>();
            headers.put("Content-Type", "application/json; charset=utf-8");
            //headers.put("User-agent", System.getProperty("http.agent"));
            return headers;
        };
    };
    jsonObjectRequest2.setTag(TAG);
    jsonObjectRequest2.setRetryPolicy(new DefaultRetryPolicy(10000, DefaultRetryPolicy.DEFAULT_MAX_RETRIES,
            DefaultRetryPolicy.DEFAULT_BACKOFF_MULT));
    getVolleyRequestQueue().add(jsonObjectRequest2);
}

From source file:com.cwctravel.hudson.plugins.extended_choice_parameter.ExtendedChoiceParameterDefinition.java

public Map<String, String> getChoicesByDropdownId() throws Exception {
    LinkedHashMap<String, LinkedHashSet<String>> choicesByDropdownId = calculateChoicesByDropdownId();

    Map<String, String> collapsedMap = new LinkedHashMap<String, String>();

    for (String dropdownId : choicesByDropdownId.keySet()) {
        String choices = new String();
        for (String choice : choicesByDropdownId.get(dropdownId)) {
            if (choices.length() > 0) {
                choices += ",";
            }/*  ww w. jav  a 2 s  . c om*/
            choices += choice;
        }

        collapsedMap.put(dropdownId, choices);
    }

    /* collapsedMap is of a form like this:
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0", "Select a genome...,HG18,ZZ23");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 HG18", "Select a source...,Diffuse large B-cell lymphoma,Multiple Myeloma");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 ZZ23", "Select a source...,Neuroblastoma");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 HG18 Diffuse large B-cell lymphoma","Select a cell type...,LY1");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 HG18 Multiple Myeloma","Select a cell type...,MM1S");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 ZZ23 Neuroblastoma","Select a cell type...,BE2C,SKNAS");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 HG18 Diffuse large B-cell lymphoma LY1","Select a name...,LY1_BCL6_DMSO,LY1_BCL6_JQ1");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 HG18 Multiple Myeloma MM1S", "Select a name...,MM1S_BRD4_150nM_JQ1,MM1S_BRD4_500nM_JQ1");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 ZZ23 Neuroblastoma BE2C", "Select a name...,BE2C_BRD4");
    collapsedMap.put(name + " dropdown MultiLevelMultiSelect 0 ZZ23 Neuroblastoma SKNAS", "Select a name...,SKNAS_H3K4ME3");
    */

    return collapsedMap;
}

From source file:citation_prediction.CitationCore.java

/**
 * This function calculates the essential formulas for the Newton-Raphson function. The formulas used here
 * were based on the math of Dr. Allen Parks.
 * /*from   w w  w . jav  a  2  s .  co m*/
 * @param l A list structure that will hold the calculated values.
 * @return The list updated with the calculated values of the essential formulas.
 */
private LinkedHashMap<String, Double> getPartialsData(LinkedHashMap<String, Double> l) {

    double fn = (((1 + l.get("mhat")) * pnorm(l.get("xt")) - l.get("s_pnorm_xi")) * l.get("s_xi"))
            - l.get("s_dnorm_xi") + (1 + l.get("mhat")) * dnorm(l.get("xt"));
    l.put("fn", fn);

    double gn = (((1 + l.get("mhat")) * pnorm(l.get("xt")) - l.get("s_pnorm_xi")) * (l.get("s_xi_sqrd") - 1))
            - l.get("s_xi_dnorm_xi") + ((1 + l.get("mhat")) * l.get("xt") * dnorm(l.get("xt")));
    l.put("gn", gn);

    double df_dmu = (((1 + l.get("mhat"))
            * ((l.get("xt") - l.get("s_xi")) * dnorm(l.get("xt")) - pnorm(l.get("xt"))))
            + l.get("s_xi") * l.get("s_dnorm_xi") - l.get("s_xi_dnorm_xi") + l.get("s_pnorm_xi"))
            / l.get("sigma");
    l.put("df_dmu", df_dmu);

    double df_dsigma = (((1 + l.get("mhat")) * ((l.get("xt") - l.get("s_xi")) * l.get("xt") * dnorm(l.get("xt"))
            - l.get("s_xi") * pnorm(l.get("xt"))))
            + l.get("s_xi") * (l.get("s_xi_dnorm_xi") + l.get("s_pnorm_xi")) - l.get("s_xi_sqrd_dnorm_xi"))
            / l.get("sigma");
    l.put("df_dsigma", df_dsigma);

    double dg_dmu = (((1 + l.get("mhat")) * (2 * l.get("s_xi") * pnorm(l.get("xt"))
            + (l.get("s_xi_sqrd") - Math.pow(l.get("xt"), 2)) * dnorm(l.get("xt"))))
            - (2 * l.get("s_xi") * l.get("s_pnorm_xi") + l.get("s_xi_sqrd") * l.get("s_dnorm_xi")
                    - l.get("s_xi_sqrd_dnorm_xi")))
            / (-l.get("sigma"));
    l.put("dg_dmu", dg_dmu);

    double dg_dsigma = (((1 + l.get("mhat")) * ((Math.pow(l.get("xt"), 3)) * dnorm(l.get("xt"))
            - l.get("s_xi_sqrd") * l.get("xt") * dnorm(l.get("xt"))
            - 2 * l.get("s_xi_sqrd") * pnorm(l.get("xt")))) + 2 * l.get("s_xi_sqrd") * l.get("s_pnorm_xi")
            + l.get("s_xi_sqrd") * l.get("s_xi_dnorm_xi") - l.get("s_xi_cubed_dnorm_xi")) / l.get("sigma");
    l.put("dg_dsigma", dg_dsigma);

    return l;
}

From source file:com.allinfinance.dwr.system.SelectOptionsDWR.java

/**
 * ????//from   w  w  w  .j  a  v  a  2s  . c o m
 * @param txnId
 * @return
 */
public String getComboDataWithParameter(String txnId, String parameter, HttpServletRequest request,
        HttpServletResponse response) {

    String jsonData = "{data:[{'valueField':'','displayField':'?'}]}";
    try {
        //??
        Operator operator = (Operator) request.getSession().getAttribute(Constants.OPERATOR_INFO);
        LinkedHashMap<String, String> dataMap = SelectOption.getSelectView(txnId,
                new Object[] { operator, parameter });
        Iterator<String> iter = dataMap.keySet().iterator();
        if (iter.hasNext()) {
            Map<String, Object> jsonDataMap = new HashMap<String, Object>();
            LinkedList<Object> jsonDataList = new LinkedList<Object>();
            Map<String, String> tmpMap = null;
            String key = null;
            while (iter.hasNext()) {
                tmpMap = new LinkedHashMap<String, String>();
                key = iter.next();
                tmpMap.put("valueField", key);
                tmpMap.put("displayField", dataMap.get(key));
                jsonDataList.add(tmpMap);
            }
            jsonDataMap.put("data", jsonDataList);
            jsonData = JSONBean.genMapToJSON(jsonDataMap);
        }
    } catch (Exception e) {
        e.printStackTrace();
        log.error(e.getMessage());
    }
    //      System.out.println(jsonData);
    return jsonData;
}

From source file:com.att.aro.main.PacketPlots.java

/**
 * Creates the plot for the uplink and downlink packets using the specified
 * trace analysis data./*w  w  w.  j  a  v a  2 s .c o m*/
 * 
 * @param analysis
 *            - The trace analysis data.
 */
public void populatePacketPlots(TraceData.Analysis analysis) {

    LinkedHashMap<Color, PacketSeries> ulDatasets = new LinkedHashMap<Color, PacketSeries>();
    LinkedHashMap<Color, PacketSeries> dlDatasets = new LinkedHashMap<Color, PacketSeries>();

    AnalysisFilter filter = null;
    if (analysis != null) {
        filter = analysis.getFilter();

        LinkedHashMap<Color, PacketSeries> datasets;
        for (PacketInfo packet : analysis.getPackets()) {
            if (packet.getDir() == null) {
                continue;
            }
            switch (packet.getDir()) {
            case UPLINK:
                datasets = ulDatasets;
                break;
            case DOWNLINK:
                datasets = dlDatasets;
                break;
            default:
                continue;
            }

            // Add the packet to the proper series based on color
            Color color = filter.getPacketColor(packet);
            PacketSeries series = datasets.get(color);
            if (series == null) {
                series = new PacketSeries(color);
                datasets.put(color, series);
            }
            series.add(new PacketDataItem(packet));

        }
    }

    populatePacketPlot(dlPlot, dlDatasets);
    populatePacketPlot(ulPlot, ulDatasets);
}

From source file:com.intellij.plugins.haxe.haxelib.HaxeLibrary.java

private void collectDependentsInternal(
        /*modifies*/ final @NotNull LinkedHashMap<String, HaxeLibraryDependency> collection) {
    List<HaxeLibraryDependency> dependencies = getDirectDependents();

    for (HaxeLibraryDependency dependency : dependencies) {
        if (!collection.containsKey(dependency.getKey())) { // Don't go down the same path again...
            // TODO: Deal with version mismatches here.  Add multiple versions, but don't add a specific version if the latest version is equal to it.
            collection.put(dependency.getKey(), dependency);
            HaxeLibrary depLib = dependency.getLibrary();
            if (null != depLib) {
                depLib.collectDependentsInternal(collection);
            } // TODO: Else mark dependency unfulfilled somehow??
        } else {//from w ww  .j  av  a 2 s  .co m
            HaxeLibraryDependency contained = collection.get(dependency.getKey());
            LOG.assertLog(contained != null, "Couldn't get a contained object.");
            if (contained != null) {
                contained.addReliant(dependency);
            }
        }
    }
}

From source file:com.inmobi.conduit.AbstractService.java

private String getPartVal(LinkedHashMap<String, String> partSpecs, String partCol) {
    if (partSpecs.containsKey(partCol)) {
        return partSpecs.get(partCol);
    }//from   w  ww  .j a  va 2  s .c  om
    return null;
}

From source file:de.ingrid.importer.udk.strategy.v1.IDCStrategy1_0_4.java

protected void updateSysList() throws Exception {
    if (log.isInfoEnabled()) {
        log.info("Updating sys_list...");
    }//from w  w w  .j a  v  a2s . co  m

    // ---------------------------
    int lstId = 6100;
    if (log.isInfoEnabled()) {
        log.info("Updating syslist " + lstId + " (INSPIRE Themen fr Verschlagwortung)...");
    }

    // clean up, to guarantee no old values !
    sqlStr = "DELETE FROM sys_list where lst_id = " + lstId;
    jdbc.executeUpdate(sqlStr);

    // german syslist
    LinkedHashMap<Integer, String> newSyslist6100_de = UtilsInspireThemes.inspireThemes_de;
    // english syslist
    LinkedHashMap<Integer, String> newSyslist6100_en = UtilsInspireThemes.inspireThemes_en;

    Iterator<Integer> itr = newSyslist6100_de.keySet().iterator();
    while (itr.hasNext()) {
        int key = itr.next();
        // german version
        jdbc.executeUpdate(
                "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default) VALUES ("
                        + getNextId() + ", " + lstId + ", " + key + ", 'de', '" + newSyslist6100_de.get(key)
                        + "', 0, 'N')");
        // english version
        jdbc.executeUpdate(
                "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default) VALUES ("
                        + getNextId() + ", " + lstId + ", " + key + ", 'en', '" + newSyslist6100_en.get(key)
                        + "', 0, 'N')");
    }

    // ---------------------------
    lstId = 527;
    if (log.isInfoEnabled()) {
        log.info("Updating syslist " + lstId
                + " (ISO)Themenkategorie-Codeliste (ISO B.5.27 MD_TopicCategoryCode)...");
    }

    // clean up, to guarantee no old values !
    sqlStr = "DELETE FROM sys_list where lst_id = " + lstId;
    jdbc.executeUpdate(sqlStr);

    // german syslist
    LinkedHashMap<Integer, String> newSyslist527_de = new LinkedHashMap<Integer, String>();
    newSyslist527_de.put(1, "Landwirtschaft");
    newSyslist527_de.put(2, "Biologie");
    newSyslist527_de.put(3, "Grenzen");
    newSyslist527_de.put(4, "Atmosphre");
    newSyslist527_de.put(5, "Wirtschaft");
    newSyslist527_de.put(6, "Hhenangaben");
    newSyslist527_de.put(7, "Umwelt");
    newSyslist527_de.put(8, "Geowissenschaften");
    newSyslist527_de.put(9, "Gesundheitswesen");
    newSyslist527_de.put(10, "Oberflchenbeschreibung");
    newSyslist527_de.put(11, "Militr und Aufklrung");
    newSyslist527_de.put(12, "Binnengewsser");
    newSyslist527_de.put(13, "Ortsangaben");
    newSyslist527_de.put(14, "Meere");
    newSyslist527_de.put(15, "Planungsunterlagen, Kataster");
    newSyslist527_de.put(16, "Gesellschaft");
    newSyslist527_de.put(17, "Bauwerke");
    newSyslist527_de.put(18, "Verkehrswesen");
    newSyslist527_de.put(19, "Ver- und Entsorgung, Kommunikation");
    // english syslist
    LinkedHashMap<Integer, String> newSyslist527_en = new LinkedHashMap<Integer, String>();
    newSyslist527_en.put(1, "farming");
    newSyslist527_en.put(2, "biota");
    newSyslist527_en.put(3, "boundaries");
    newSyslist527_en.put(4, "climatologyMeteorologyAtmosphere");
    newSyslist527_en.put(5, "economy");
    newSyslist527_en.put(6, "elevation");
    newSyslist527_en.put(7, "environment");
    newSyslist527_en.put(8, "geoscientificInformation");
    newSyslist527_en.put(9, "health");
    newSyslist527_en.put(10, "imageryBaseMapsEarthCover");
    newSyslist527_en.put(11, "intelligenceMilitary");
    newSyslist527_en.put(12, "inlandWaters");
    newSyslist527_en.put(13, "location");
    newSyslist527_en.put(14, "oceans");
    newSyslist527_en.put(15, "planningCadastre");
    newSyslist527_en.put(16, "society");
    newSyslist527_en.put(17, "structure");
    newSyslist527_en.put(18, "transportation");
    newSyslist527_en.put(19, "utilitiesCommunication");

    itr = newSyslist527_de.keySet().iterator();
    while (itr.hasNext()) {
        int key = itr.next();
        // german version
        jdbc.executeUpdate(
                "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default) VALUES ("
                        + getNextId() + ", " + lstId + ", " + key + ", 'de', '" + newSyslist527_de.get(key)
                        + "', 0, 'N')");
        // english version
        jdbc.executeUpdate(
                "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default) VALUES ("
                        + getNextId() + ", " + lstId + ", " + key + ", 'en', '" + newSyslist527_en.get(key)
                        + "', 0, 'N')");
    }

    if (log.isInfoEnabled()) {
        log.info("Updating sys_list... done");
    }
}

From source file:com.joyfulmongo.db.JFMongoCmdQuery.java

private void processIncludes(Map<String, LinkedHashMap<String, List<JFMongoObject>>> includeKeyToPointerListMap,
        Map<String, String> includeKeyToPointerColnameMap) {
    for (String includeKey : includeFields) {
        String pointerColName = includeKeyToPointerColnameMap.get(includeKey);
        if (pointerColName != null) {
            LinkedHashMap<String, List<JFMongoObject>> pointerObjectIdToParentObjectsMap = includeKeyToPointerListMap
                    .get(includeKey);/*from  w w  w  . j  a va  2  s . c  o  m*/
            Set<String> referreeObjIds = pointerObjectIdToParentObjectsMap.keySet();

            JFMongoCmdQuery.Builder queryBuilder = new JFMongoCmdQuery.Builder(pointerColName);
            queryBuilder.whereContainedIn(Constants.Props.objectId.toString(), referreeObjIds);
            List<JFMongoObject> refereeObjects = queryBuilder.build().find();

            for (JFMongoObject refereeObj : refereeObjects) {
                String refereeObjId = refereeObj.getObjectId();
                List<JFMongoObject> parentObjs = pointerObjectIdToParentObjectsMap.get(refereeObjId);
                for (JFMongoObject parentObj : parentObjs) {
                    ContainerObjectPointer[] pointers = parentObj.getPointer(includeKey);
                    for (ContainerObjectPointer pointer : pointers) {
                        pointer.replaceObject(refereeObj);
                    }
                }
            }
        }
    }
}

From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    Integer projectId = null;/*from  w  ww.j a va  2 s.  c  o  m*/
    for (SampleId spId : sampleIDs) {
        if (projectId == null)
            projectId = spId.getProject();
        else if (projectId != spId.getProject()) {
            projectId = 0;
            break; // more than one project are involved: no header will be written
        }
    }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    int markerCount = markerCursor.count();
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }
        }

    LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>();
    ArrayList<String> individualList = new ArrayList<String>();
    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    for (int i = 0; i < sampleIDs.size(); i++) {
        String individualId = individuals.get(i).getId();
        sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId);
        if (!individualList.contains(individualId)) {
            individualList.add(individualId);
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".vcf"));

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;

    VariantContextWriter writer = null;
    try {
        List<String> distinctSequenceNames = new ArrayList<String>();

        String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class);
        if (mongoTemplate.collectionExists(sequenceSeqCollName)) {
            DBCursor markerCursorCopy = markerCursor.copy();
            markerCursorCopy.batchSize(nQueryChunkSize);
            while (markerCursorCopy.hasNext()) {
                int nLoadedMarkerCountInLoop = 0;
                boolean fStartingNewChunk = true;
                while (markerCursorCopy.hasNext()
                        && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                    DBObject exportVariant = markerCursorCopy.next();
                    String chr = (String) ((DBObject) exportVariant
                            .get(VariantData.FIELDNAME_REFERENCE_POSITION))
                                    .get(ReferencePosition.FIELDNAME_SEQUENCE);
                    if (!distinctSequenceNames.contains(chr))
                        distinctSequenceNames.add(chr);
                }
            }
            markerCursorCopy.close();
        }

        Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator());
        SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames);
        writer = new CustomVCFWriter(null, zos, dict, false, false, true);
        //         VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
        //         vcwb.unsetOption(Options.INDEX_ON_THE_FLY);
        //         vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES);
        //         vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY);
        //         vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
        //         vcwb.setReferenceDictionary(dict);
        //         writer = vcwb.build();
        //         writer = new AsyncVariantContextWriter(writer, 3000);

        progress.moveToNextStep(); // done with dictionary
        DBCursor headerCursor = mongoTemplate
                .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class))
                .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId));
        Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>();
        boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values

        while (headerCursor.hasNext()) {
            DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next());
            headerLines.addAll(dbVcfHeader.getHeaderLines());

            // Add sequence header lines (not stored in our vcf header collection)
            BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true);
            int nSequenceIndex = 0;
            for (String sequenceName : distinctSequenceNames) {
                String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class);
                boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName);
                if (fCollectionExists) {
                    DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne(
                            new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection);
                    if (record == null) {
                        LOG.warn("Sequence '" + sequenceName + "' not found in collection "
                                + sequenceInfoCollName);
                        continue;
                    }

                    Map<String, String> sequenceLineData = new LinkedHashMap<String, String>();
                    sequenceLineData.put("ID", (String) record.get("_id"));
                    sequenceLineData.put("length",
                            ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString());
                    headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++));
                }
            }
            fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs
            if (!dbVcfHeader.getWriteEngineHeaders())
                fWriteEngineHeaders = false;
        }
        headerCursor.close();

        VCFHeader header = new VCFHeader(headerLines, individualList);
        header.setWriteCommandLine(fWriteCommandLine);
        header.setWriteEngineHeaders(fWriteEngineHeaders);
        writer.writeHeader(header);

        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;
        HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>();

        while (markerCursor.hasNext()) {
            if (progress.hasAborted())
                return;

            int nLoadedMarkerCountInLoop = 0;
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nQueryChunkSize);
            List<Comparable> currentMarkers = new ArrayList<Comparable>();
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                currentMarkers.add((Comparable) exportVariant.get("_id"));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) {
                VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant),
                        !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap,
                        phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter,
                        markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId()));
                try {
                    writer.add(vc);
                } catch (Throwable t) {
                    Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t);
                    LOG.debug("error", e);
                    throw e;
                }

                if (nLoadedMarkerCountInLoop > currentMarkers.size())
                    LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only "
                            + currentMarkers.size() + " variants expected)");
            }

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }
        progress.setCurrentStepProgress((short) 100);

    } catch (Exception e) {
        LOG.error("Error exporting", e);
        progress.setError(e.getMessage());
        return;
    } finally {
        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            BufferedReader in = new BufferedReader(new FileReader(warningFile));
            String sLine;
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();
        if (writer != null)
            try {
                writer.close();
            } catch (Throwable ignored) {
            }
    }
}