Example usage for java.util TreeMap putAll

List of usage examples for java.util TreeMap putAll

Introduction

In this page you can find the example usage for java.util TreeMap putAll.

Prototype

public void putAll(Map<? extends K, ? extends V> map) 

Source Link

Document

Copies all of the mappings from the specified map to this map.

Usage

From source file:hoot.services.command.CommandRunner.java

private void logExec(String pCmdString, Map<String, String> unsortedEnv) {

    if (_log.isInfoEnabled()) {

        TreeMap<String, String> env = new TreeMap<String, String>();
        env.putAll(unsortedEnv);

        _log.info("Executing '" + pCmdString + "'");
        _log.info("Enviroment:");
        FileWriter writer = null;
        try {//from w  ww.  j a va  2s  .c  o  m
            if (_log.isDebugEnabled()) {
                File envvarFile = File.createTempFile("envvars", ".txt");
                writer = new FileWriter(envvarFile);
                _log.debug("ENVVARS will be written to " + envvarFile.getAbsolutePath());
            }
            for (String key : env.keySet()) {
                _log.info(String.format("  %s", new Object[] { key + "=" + env.get(key) }));
                if (_log.isDebugEnabled())
                    writer.write(String.format("  %s%n", new Object[] { key + "=" + env.get(key) }));
            }
            if (_log.isDebugEnabled())
                writer.close();
        } catch (Exception e) {
            _log.error("Unable to log exec call: " + ExceptionUtils.getStackTrace(e));
        }
    }
}

From source file:api.wiki.WikiNameApi2.java

private TreeMap<String, String> getGenderNameContinue(String title, String continueString) {
    String query = BASE_URL + "&list=categorymembers&cmlimit=500&cmtitle=" + title.replaceAll("\\s+", "_")
            + "&cmprop=title&cmcontinue=" + continueString;
    TreeMap<String, String> values = new TreeMap<>();
    try {//from  w  w w.ja va  2 s  . co m
        URL url = new URL(query);
        File file = File.createTempFile("WIKI_", title);
        FileUtils.copyURLToFile(url, file);
        String s = FileUtils.readFileToString(file);
        JSONObject j = new JSONObject(s);
        if (j.has("query-continue")) {
            values.putAll(getGenderNameContinue(title, j.getJSONObject("query-continue")
                    .getJSONObject("categorymembers").getString("cmcontinue")));
        }
        JSONArray json = j.getJSONObject("query").getJSONArray("categorymembers");
        for (int i = 0; i < json.length(); i++) {
            String value = json.getJSONObject(i).getString("title");
            String key = value;
            if (key.contains("(")) {
                key = key.substring(0, key.indexOf("("));
            }
            values.put(key, value);
        }

    } catch (IOException ex) {
        Logger.getLogger(WikiNameApi2.class.getName()).log(Level.SEVERE, null, ex);
    }

    return values;
}

From source file:api.wiki.WikiNameApi2.java

private TreeMap<String, String> getGenderNames(String title) {
    String query = BASE_URL + "&list=categorymembers&cmlimit=500&cmtitle=" + title.replaceAll("\\s+", "_")
            + "&cmprop=title";
    TreeMap<String, String> values = new TreeMap<>();
    try {/*  w w  w  .  j  av a2  s  .  com*/
        URL url = new URL(query);
        File file = File.createTempFile("WIKI_", title);
        FileUtils.copyURLToFile(url, file);
        String s = FileUtils.readFileToString(file);
        JSONObject j = new JSONObject(s);
        if (j.has("query-continue")) {
            values.putAll(getGenderNameContinue(title, j.getJSONObject("query-continue")
                    .getJSONObject("categorymembers").getString("cmcontinue")));
        }
        JSONArray json = j.getJSONObject("query").getJSONArray("categorymembers");
        for (int i = 0; i < json.length(); i++) {
            String value = json.getJSONObject(i).getString("title");
            String key = value;
            if (key.contains("(")) {
                key = key.substring(0, key.indexOf("("));
            }
            values.put(key, value);
        }

    } catch (IOException ex) {
        Logger.getLogger(WikiNameApi2.class.getName()).log(Level.SEVERE, null, ex);
    }

    return values;
}

From source file:org.yccheok.jstock.gui.portfolio.AutoDividendJDialog.java

/**
 * Creates new form AutoDividendJDialog//from ww  w.  j av a  2s. c o m
 */
public AutoDividendJDialog(java.awt.Frame parent, boolean modal, Map<Code, List<Dividend>> dividends) {
    super(parent, modal);
    initComponents();

    JPanel panel = new JPanel();
    panel.setBorder(new EmptyBorder(10, 10, 10, 10));
    panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));

    TreeMap<Code, List<Dividend>> treeMap = new TreeMap<Code, List<Dividend>>(new Comparator<Code>() {
        @Override
        public int compare(Code o1, Code o2) {
            return o1.toString().compareTo(o2.toString());
        }
    });
    treeMap.putAll(dividends);
    for (Map.Entry<Code, List<Dividend>> entry : treeMap.entrySet()) {
        AutoDividendJPanel autoDividendJPanel = new AutoDividendJPanel(this, entry.getValue());
        autoDividendJPanels.add(autoDividendJPanel);
        panel.add(autoDividendJPanel);
        panel.add(Box.createRigidArea(new Dimension(0, 5)));
    }

    this.jScrollPane1.setViewportView(panel);

    updateTotalLabel();
}

From source file:com.telesign.util.TeleSignRequest.java

/**
 * Returns the set of both the standard <u>and</u> the
 * <em>TeleSign-specific</em> Request header fields.
 *
 * @return A sorted key/value mapping that contains all of the Request header fields.
 *//*from www. j a  v a 2  s . c o m*/
public Map<String, String> getAllHeaders() {

    TreeMap<String, String> tmp = new TreeMap<String, String>();

    tmp.putAll(headers);
    tmp.putAll(ts_headers);

    return tmp;
}

From source file:samza.samza_test.SamzaCountWindow.java

@SuppressWarnings("unchecked")
@Override//ww  w . j  a  va  2 s.c  o m
public void process(IncomingMessageEnvelope envelope, MessageCollector collector, TaskCoordinator coordinator) {
    try {
        String input = mapper.readValue((byte[]) envelope.getMessage(), String.class);
        String[] parts = input.split(" ");

        long timestamp = Long.parseLong(parts[1]);
        //long timestamp = System.currentTimeMillis();
        if (timestamp < timeStart) {
            timeStart = timestamp;
        }
        if (timestamp > timeEnd) {
            timeEnd = timestamp;
        }

        if (Integer.parseInt(parts[2]) == 0) {
            startLast = timestamp;
            startLastFlows = totalFlows;
        }

        ////////////////////////////////////////          EMPTY FRAMEWORK          //////////////////////////////////////// 
        if (parts[0].equals("empty")) {
            totalFlows += Integer.parseInt(parts[2]);
            if (totalFlows == windowLimit) {
                long postProcessingTime = System.currentTimeMillis();
                if (timeEnd < postProcessingTime) {
                    timeEnd = postProcessingTime;
                }
                long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu
                String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit)
                        + " toku :), prumerna rychlost zpracovani byla " + String.valueOf(speed)
                        + "k toku za vterinu";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
            if (totalFlows > windowLimit) {
                String msg = "Chyba zpracovani, soucet toku nesedi do count okna!";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
        }

        ////////////////////////////////////////          TEST FILTER          //////////////////////////////////////// 
        if (parts[0].equals("filter")) {
            totalFlows += Integer.parseInt(parts[2]);
            filtered += Integer.parseInt(parts[3]);
            String IP = parts[4];
            if (totalFlows == windowLimit) {
                long postProcessingTime = System.currentTimeMillis();
                if (timeEnd < postProcessingTime) {
                    timeEnd = postProcessingTime;
                }
                long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu
                String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit)
                        + " toku :), IP adresa " + IP + " mela " + String.valueOf(filtered)
                        + " toku. Prumerna rychlost zpracovani byla " + String.valueOf(speed)
                        + "k toku za vterinu";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
            if (totalFlows > windowLimit) {
                String msg = "Chyba zpracovani, soucet toku nesedi do count okna!";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
        }

        ////////////////////////////////////////          TEST COUNT          //////////////////////////////////////// 
        if (parts[0].equals("count")) {
            totalFlows += Integer.parseInt(parts[2]);
            packets += Integer.parseInt(parts[3]);
            String IP = parts[4];
            if (totalFlows == windowLimit) {
                long postProcessingTime = System.currentTimeMillis();
                if (timeEnd < postProcessingTime) {
                    timeEnd = postProcessingTime;
                }
                long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu
                String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit)
                        + " toku :), IP adresa " + IP + " mela " + String.valueOf(packets)
                        + " paketu. Prumerna rychlost zpracovani byla " + String.valueOf(speed)
                        + "k toku za vterinu";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                //speed = (windowLimit-startLastFlows)/(timeEnd-startLast);
                //msg = "Mereni od startu posledniho: , IP adresa " + IP + " mela " + String.valueOf(packets) +" paketu. Prumerna rychlost zpracovani byla "+String.valueOf(speed)+"k toku za vterinu";
                //collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "samza-stats"), mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
            if (totalFlows > windowLimit) {
                String msg = "Chyba zpracovani, soucet toku nesedi do count okna!";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
        }

        ////////////////////////////////////////          TEST AGGREGATE          //////////////////////////////////////// 
        if (parts[0].equals("aggregate")) {
            totalFlows += Integer.parseInt(parts[2]);
            for (String field : parts) {
                String[] divided = field.split("=");
                if (divided.length > 1) {
                    String IP = divided[0];
                    if (IP.charAt(0) == '{') {
                        IP = IP.substring(1);
                    }
                    int packetsCount = Integer.parseInt(divided[1].substring(0, divided[1].length() - 1));
                    if (top.containsKey(IP)) {
                        int packetsFromMap = top.get(IP);
                        top.put(IP, packetsFromMap + packetsCount);
                    } else {
                        top.put(IP, packetsCount);
                    }
                }
            }
            if (totalFlows == windowLimit) {
                Iterator<String> it = top.keySet().iterator();
                StringBuilder sb = new StringBuilder();
                while (it.hasNext()) {
                    String key = it.next();
                    sb.append(key).append(" ").append(String.valueOf(top.get(key))).append(", ");
                }

                long postProcessingTime = System.currentTimeMillis();
                if (timeEnd < postProcessingTime) {
                    timeEnd = postProcessingTime;
                }
                long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu
                //String msg = "CountWindow se dopocital na hodnotu "+String.valueOf(windowLimit)+" toku :). Prumerna rychlost zpracovani byla "+String.valueOf(speed)+"k toku za vterinu. Vypis agregace: "+sb.toString();
                String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit)
                        + " toku :). Prumerna rychlost zpracovani byla " + String.valueOf(speed)
                        + "k toku za vterinu. Vypis agregace: ne v testovacim rezimu, pro IP 141.57.244.116 je paketu:"
                        + String.valueOf(top.get("141.57.244.116"));
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
            if (totalFlows > windowLimit) {
                String msg = "Chyba zpracovani, soucet toku nesedi do count okna!";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
        }

        ////////////////////////////////////////          TEST TOP N          //////////////////////////////////////// 
        if (parts[0].equals("topn")) {
            totalFlows += Integer.parseInt(parts[2]);
            for (String field : parts) {
                String[] divided = field.split("=");
                if (divided.length > 1) {
                    String IP = divided[0];
                    if (IP.charAt(0) == '{') {
                        IP = IP.substring(1);
                    }
                    int packetsCount = Integer.parseInt(divided[1].substring(0, divided[1].length() - 1));
                    if (top.containsKey(IP)) {
                        int packetsFromMap = top.get(IP);
                        top.put(IP, packetsFromMap + packetsCount);
                    } else {
                        top.put(IP, packetsCount);
                    }
                }
            }
            if (totalFlows == windowLimit) {
                ValueComparator bvc = new ValueComparator(top);
                TreeMap<String, Integer> sorted = new TreeMap<>(bvc);
                sorted.putAll(top);
                Iterator<String> it = sorted.keySet().iterator();
                int i = 1;
                StringBuilder sb = new StringBuilder();
                while (it.hasNext()) {
                    String key = it.next();
                    sb.append(String.valueOf(i)).append(" ").append(key).append(" ")
                            .append(String.valueOf(top.get(key))).append(", ");
                    i++;
                    if (i > 10) {
                        break;
                    }

                }

                long postProcessingTime = System.currentTimeMillis();
                if (timeEnd < postProcessingTime) {
                    timeEnd = postProcessingTime;
                }
                long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu
                String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit)
                        + " toku :). Prumerna rychlost zpracovani byla " + String.valueOf(speed)
                        + "k toku za vterinu. Vypis TOP 10: " + sb.toString();
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
            if (totalFlows > windowLimit) {
                String msg = "Chyba zpracovani, soucet toku nesedi do count okna!";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
        }

        ////////////////////////////////////////          TEST SYN SCAN          //////////////////////////////////////// 
        if (parts[0].equals("scan")) {
            totalFlows += Integer.parseInt(parts[2]);
            for (String field : parts) {
                String[] divided = field.split("=");
                if (divided.length > 1) {
                    String IP = divided[0];
                    if (IP.charAt(0) == '{') {
                        IP = IP.substring(1);
                    }
                    int packetsCount = Integer.parseInt(divided[1].substring(0, divided[1].length() - 1));
                    if (top.containsKey(IP)) {
                        int packetsFromMap = top.get(IP);
                        top.put(IP, packetsFromMap + packetsCount);
                    } else {
                        top.put(IP, packetsCount);
                    }
                }
            }
            if (totalFlows == windowLimit) {
                ValueComparator bvc = new ValueComparator(top);
                TreeMap<String, Integer> sorted = new TreeMap<>(bvc);
                sorted.putAll(top);
                Iterator<String> it = sorted.keySet().iterator();
                int i = 1;
                StringBuilder sb = new StringBuilder();
                while (it.hasNext()) {
                    String key = it.next();
                    sb.append(String.valueOf(i)).append(" ").append(key).append(" ")
                            .append(String.valueOf(top.get(key))).append(", ");
                    i++;
                    if (i > 100) {
                        break;
                    }

                }

                long postProcessingTime = System.currentTimeMillis();
                if (timeEnd < postProcessingTime) {
                    timeEnd = postProcessingTime;
                }
                long speed = windowLimit / (timeEnd - timeStart); //rychlost v tocich za milisekundu = prumer v tisicich toku za vterinu
                String msg = "CountWindow se dopocital na hodnotu " + String.valueOf(windowLimit)
                        + " toku :). Prumerna rychlost zpracovani byla " + String.valueOf(speed)
                        + "k toku za vterinu. Vypis TOP 10: " + sb.toString();
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
            if (totalFlows > windowLimit) {
                String msg = "Chyba zpracovani, soucet toku nesedi do count okna!";
                collector.send(new OutgoingMessageEnvelope(new SystemStream("kafka", "out"),
                        mapper.writeValueAsBytes(msg)));
                cleanVars();
            }
        }

    } catch (IOException | NumberFormatException e) {
        Logger.getLogger(SamzaCountWindow.class.getName()).log(Level.SEVERE, null, e);
    }
}

From source file:org.ala.spatial.analysis.layers.SitesBySpeciesTabulated.java

/**
 * write bioregion tabulation./*  w w w.  j av  a 2  s  .c o m*/
 * <p/>
 * Output filename is name + ".csv" and name + ".json".
 *
 * @param name            output filename
 * @param outputDirectory directory for output.
 * @param columns         list of the bioregion names.
 * @param bioMap          data to write.
 * @return
 */
private Map writeBioregions(String name, String outputDirectory, String[] columns,
        HashMap<Integer, Integer>[] bioMap) {
    Map map = new HashMap();
    ArrayList array = new ArrayList();
    try {
        FileWriter fw = new FileWriter(outputDirectory + File.separator + name + ".csv");

        //identify column numbers
        TreeMap<Integer, Integer> tm = new TreeMap();
        for (int i = 0; i < columns.length; i++) {
            tm.putAll(bioMap[i]);
        }
        Integer[] cols = new Integer[tm.size()];
        tm.keySet().toArray(cols);

        ArrayList<Integer> c = new ArrayList<Integer>();
        for (int j = 0; j < cols.length; j++) {
            c.add(cols[j]);
            fw.write(",\"" + cols[j] + "\"");
        }

        //bioregion rows
        for (int i = 0; i < columns.length + 1; i++) {
            if (bioMap[i].size() > 0) {
                ArrayList array2 = new ArrayList();
                String rowname = "Undefined";
                if (i > 0) {
                    rowname = columns[i - 1];
                }
                fw.write("\n\"" + rowname + "\"");
                //count columns
                for (int j = 0; j < cols.length; j++) {
                    Integer v = bioMap[i].get(cols[j]);
                    fw.write(",");
                    if (v != null) {
                        fw.write(v.toString());
                        array2.add(v.toString());
                    } else {
                        array2.add("");
                    }
                }
                Map m3 = new HashMap();
                m3.put("name", rowname);
                m3.put("row", array2);
                array.add(m3);
            }
        }

        Map m4 = new HashMap();
        m4.put("rows", array);
        m4.put("columns", c);
        map.put(name, m4);

        fw.close();

        fw = new FileWriter(outputDirectory + File.separator + name + ".json");
        JSONObject.writeJSONString(map, fw);
        fw.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
    return map;
}

From source file:org.ala.spatial.analysis.layers.SitesBySpeciesTabulated.java

/**
 * write decade counts tabulation.//  ww w  . j av  a 2 s.c o m
 * <p/>
 * Output filename is "decadecounts.csv" and "decadecounts.json".
 *
 * @param outputDirectory path to output directory.
 * @param decadeIdx       array of decades.
 * @param decMap          array of map of values to write.
 * @return
 */
private Map writeDecadeCounts(String outputDirectory, HashMap<Integer, Integer>[] decCountMap) {
    Map map = new HashMap();
    ArrayList array = new ArrayList();

    try {
        FileWriter fw = new FileWriter(outputDirectory + File.separator + "decadecounts.csv");

        //identify column numbers
        TreeMap<Integer, Integer> tm = new TreeMap();
        for (int i = 1; i < decCountMap.length; i++) {
            tm.putAll(decCountMap[i]);
        }
        Integer[] cols = new Integer[tm.size()];
        tm.keySet().toArray(cols);

        ArrayList<Integer> c = new ArrayList<Integer>();
        for (int j = 0; j < cols.length; j++) {
            c.add(cols[j]);
            fw.write(",\"" + cols[j] + "\"");
        }

        //bioregion rows
        for (int i = 1; i < decCountMap.length; i++) {
            if (decCountMap[i].size() > 0) {
                ArrayList array2 = new ArrayList();
                String rowname = i + " Decades";
                fw.write("\n\"" + rowname + "\"");
                //count columns
                for (int j = 0; j < cols.length; j++) {
                    Integer v = decCountMap[i].get(cols[j]);
                    fw.write(",");
                    if (v != null) {
                        fw.write(v.toString());
                        array2.add(v.toString());
                    } else {
                        array2.add("");
                    }
                }
                Map m3 = new HashMap();
                m3.put("name", rowname);
                m3.put("row", array2);
                array.add(m3);
            }
        }

        Map m4 = new HashMap();
        m4.put("rows", array);
        m4.put("columns", c);
        map.put("decadecounts", m4);

        fw.close();

        fw = new FileWriter(outputDirectory + File.separator + "decadecounts.json");
        JSONObject.writeJSONString(map, fw);
        fw.close();

    } catch (Exception e) {
        e.printStackTrace();
    }

    return map;
}

From source file:org.ala.spatial.analysis.layers.SitesBySpeciesTabulated.java

/**
 * write decades tabulation./*from w w  w  .jav a  2 s  .  c om*/
 * <p/>
 * Output filename is "decades.csv" and "decades.json".
 *
 * @param outputDirectory path to output directory.
 * @param decadeIdx       array of decades.
 * @param decMap          array of map of values to write.
 * @return
 */
private Map writeDecades(String outputDirectory, short[] decadeIdx, HashMap<Integer, Integer>[] decMap) {
    Map map = new HashMap();
    ArrayList array = new ArrayList();

    try {
        FileWriter fw = new FileWriter(outputDirectory + File.separator + "decades.csv");

        //identify column numbers
        TreeMap<Integer, Integer> tm = new TreeMap();
        for (int i = 0; i < decMap.length; i++) {
            tm.putAll(decMap[i]);
        }
        Integer[] cols = new Integer[tm.size()];
        tm.keySet().toArray(cols);

        ArrayList<Integer> c = new ArrayList<Integer>();
        for (int j = 0; j < cols.length; j++) {
            c.add(cols[j]);
            fw.write(",\"" + cols[j] + "\"");
        }

        //bioregion rows
        for (int i = 0; i < decMap.length; i++) {
            if (decMap[i].size() > 0) {
                ArrayList array2 = new ArrayList();
                int pos = java.util.Arrays.binarySearch(decadeIdx, (short) i);
                //seek to first
                while (pos > 0 && decadeIdx[pos - 1] == i) {
                    pos--;
                }
                String rowname = "no year recorded";
                if (i > 0) {
                    rowname = pos + " to " + (pos + 9);
                }
                fw.write("\n\"" + rowname + "\"");
                //count columns
                for (int j = 0; j < cols.length; j++) {
                    Integer v = decMap[i].get(cols[j]);
                    fw.write(",");
                    if (v != null) {
                        fw.write(v.toString());
                        array2.add(v.toString());
                    } else {
                        array2.add("");
                    }
                }
                Map m3 = new HashMap();
                m3.put("name", rowname);
                m3.put("row", array2);
                array.add(m3);
            }
        }

        Map m4 = new HashMap();
        m4.put("rows", array);
        m4.put("columns", c);
        map.put("decades", m4);

        fw.close();

        fw = new FileWriter(outputDirectory + File.separator + "decades.json");
        JSONObject.writeJSONString(map, fw);
        fw.close();

    } catch (Exception e) {
        e.printStackTrace();
    }

    return map;
}

From source file:com.evolveum.midpoint.wf.impl.jobs.JobController.java

private Map<String, Object> getVariablesSorted(ProcessEvent event) {
    TreeMap<String, Object> variables = new TreeMap<String, Object>();
    if (event.getVariables() != null) {
        variables.putAll(event.getVariables());
    }//from  ww  w.  j  a  v a 2  s  .c om
    return variables;
}