Example usage for java.util TreeMap entrySet

List of usage examples for java.util TreeMap entrySet

Introduction

In this page you can find the example usage for java.util TreeMap entrySet.

Prototype

EntrySet entrySet

To view the source code for java.util TreeMap entrySet.

Click Source Link

Document

Fields initialized to contain an instance of the entry set view the first time this view is requested.

Usage

From source file:org.kse.crypto.signing.MidletSigner.java

/**
 * Sign a JAD file outputting the modified JAD to a different file.
 *
 * @param jadFile//from w  w w  .  j  av  a2  s .com
 *            JAD file
 * @param outputJadFile
 *            Output JAD file
 * @param jarFile
 *            JAR file
 * @param privateKey
 *            Private RSA key to sign with
 * @param certificateChain
 *            Certificate chain for private key
 * @param certificateNumber
 *            Certificate number
 * @throws IOException
 *             If an I/O problem occurs while signing the MIDlet
 * @throws CryptoException
 *             If a crypto problem occurs while signing the MIDlet
 */
public static void sign(File jadFile, File outputJadFile, File jarFile, RSAPrivateKey privateKey,
        X509Certificate[] certificateChain, int certificateNumber) throws IOException, CryptoException {
    Properties jadProperties = readJadFile(jadFile);

    Properties newJadProperties = new Properties();

    // Copy over existing attrs (excepting digest and any certificates at
    // provided number)
    for (Enumeration<?> enumPropNames = jadProperties.propertyNames(); enumPropNames.hasMoreElements();) {
        String propName = (String) enumPropNames.nextElement();

        // Ignore digest attr
        if (propName.equals(MIDLET_JAR_RSA_SHA1_ATTR)) {
            continue;
        }

        // Ignore certificates at provided number
        if (propName.startsWith(MessageFormat.format(SUB_MIDLET_CERTIFICATE_ATTR, certificateNumber))) {
            continue;
        }

        newJadProperties.put(propName, jadProperties.getProperty(propName));
    }

    // Get certificate attrs
    for (int i = 0; i < certificateChain.length; i++) {
        X509Certificate certificate = certificateChain[i];
        String base64Cert = null;
        try {
            base64Cert = new String(Base64.encode(certificate.getEncoded()));
        } catch (CertificateEncodingException ex) {
            throw new CryptoException(res.getString("Base64CertificateFailed.exception.message"), ex);
        }

        String midletCertificateAttr = MessageFormat.format(MIDLET_CERTIFICATE_ATTR, certificateNumber,
                (i + 1));
        newJadProperties.put(midletCertificateAttr, base64Cert);
    }

    // Get signed Base 64 SHA-1 digest of JAR file as attr
    byte[] signedJarDigest = signJarDigest(jarFile, privateKey);
    String base64SignedJarDigest = new String(Base64.encode(signedJarDigest));
    newJadProperties.put(MIDLET_JAR_RSA_SHA1_ATTR, base64SignedJarDigest);

    // Sort properties alphabetically
    TreeMap<String, String> sortedJadProperties = new TreeMap<String, String>();

    for (Enumeration<?> names = newJadProperties.propertyNames(); names.hasMoreElements();) {
        String name = (String) names.nextElement();
        String value = newJadProperties.getProperty(name);

        sortedJadProperties.put(name, value);
    }

    // Write out new JAD properties to JAD file
    FileWriter fw = null;

    try {
        fw = new FileWriter(outputJadFile);

        for (Iterator<Entry<String, String>> itrSorted = sortedJadProperties.entrySet().iterator(); itrSorted
                .hasNext();) {
            Entry<String, String> property = itrSorted.next();

            fw.write(MessageFormat.format(JAD_ATTR_TEMPLATE, property.getKey(), property.getValue()));
            fw.write(CRLF);
        }
    } finally {
        IOUtils.closeQuietly(fw);
    }
}

From source file:org.apache.hadoop.hbase.index.mapreduce.IndexLoadIncrementalHFile.java

public static byte[][] inferBoundaries(TreeMap<byte[], Integer> bdryMap) {
    ArrayList<byte[]> keysArray = new ArrayList<byte[]>();
    int runningValue = 0;
    byte[] currStartKey = null;
    boolean firstBoundary = true;

    for (Map.Entry<byte[], Integer> item : bdryMap.entrySet()) {
        if (runningValue == 0)
            currStartKey = item.getKey();
        runningValue += item.getValue();
        if (runningValue == 0) {
            if (!firstBoundary)
                keysArray.add(currStartKey);
            firstBoundary = false;//from  w  w w  .j  a va 2  s .  c  om
        }
    }

    return keysArray.toArray(new byte[0][0]);
}

From source file:module.entities.NameFinder.DB.java

public static void InsertJsonLemmas(TreeMap<EntityEntry, Integer> docEntities, int text_id, int jsonKey)
        throws SQLException {
    String insertSQL = "INSERT INTO json_annotated_lemmas "
            + "(lemma_text,lemma_category,lemma_text_id,lemma_jsonKey,lemma_count) VALUES" + "(?,?,?,?,?)";
    PreparedStatement prepStatement = connection.prepareStatement(insertSQL);
    for (Map.Entry<EntityEntry, Integer> ent : docEntities.entrySet()) {

        prepStatement.setString(1, ent.getKey().text);
        prepStatement.setString(2, ent.getKey().category);
        prepStatement.setInt(3, text_id);
        prepStatement.setInt(4, jsonKey);
        prepStatement.setInt(5, ent.getValue().intValue());
        prepStatement.addBatch();/*  w  w w. ja  v  a  2s  . c om*/
    }
    prepStatement.executeBatch();
    prepStatement.close();

}

From source file:com.sangupta.jerry.oauth.OAuthUtils.java

/**
 * Get a list of all non-aouth params from the given map.
 * //from  w w w.j  a  v  a2 s.com
 * @param params
 * @return
 */
private static List<NameValuePair> getBodyParams(TreeMap<String, String> params,
        boolean includeOAuthParamsInBody) {
    final List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();

    for (Entry<String, String> entry : params.entrySet()) {
        String key = entry.getKey();
        if (!includeOAuthParamsInBody && key.startsWith("oauth_")) {
            continue;
        }

        nameValuePairs.add(new BasicNameValuePair(key, entry.getValue()));
    }

    return nameValuePairs;
}

From source file:Main.java

/**
 * Gather all the namespaces defined on a node
 *
 * @return//from w w  w  .  j a  v  a  2s. c  om
 */
public static Iterable<Entry<String, String>> getNamespaces(Element element) {
    TreeMap<String, String> map = new TreeMap<String, String>();
    do {
        NamedNodeMap attributes = element.getAttributes();
        for (int i = 0; i < attributes.getLength(); i++) {
            Attr attr = (Attr) attributes.item(i);
            final String name = attr.getLocalName();

            if (attr.getPrefix() != null) {
                if ("xmlns".equals(attr.getPrefix()))
                    if (!map.containsKey(name))
                        map.put(name, attr.getValue());
            } else if ("xmlns".equals(name)) {
                if (!map.containsKey(""))
                    map.put("", attr.getValue());
            }
        }
        if (element.getParentNode() == null || element.getParentNode().getNodeType() != Node.ELEMENT_NODE)
            break;
        element = (Element) element.getParentNode();
    } while (true);
    return map.entrySet();
}

From source file:anslab2.AnsLab2.java

public static DefaultTableModel generateTable(boolean fdt, boolean rft, int dataType, ArrayList labels,
        String label) {//  w ww. ja  va  2  s.c o  m
    //DefaultTableModel model = new DefaultTableModel();

    HashMap<String, Integer> map = new HashMap<String, Integer>();

    for (Object temp : labels) {
        Integer count = map.get(String.valueOf(temp));
        map.put(String.valueOf(temp), (count == null) ? 1 : count + 1);
    }

    Vector _label = new Vector();
    Vector _freq = new Vector();
    Vector _rel_freq = new Vector();

    if (dataType == 1 || dataType == 3) {
        for (Map.Entry<String, Integer> entry : map.entrySet()) {
            _label.add(entry.getKey());
            _freq.add(entry.getValue());
            _rel_freq.add(((double) entry.getValue() / labels.size()) * (100));
        }
        string_maps = map;
        model.addColumn(label, _label);
    }

    else if (dataType == 2) {
        TreeMap<Double, Integer> num_map = new TreeMap<Double, Integer>();

        for (Map.Entry<String, Integer> entry : map.entrySet()) {
            num_map.put(Double.valueOf(entry.getKey()), entry.getValue());
        }

        for (Map.Entry<Double, Integer> entry1 : num_map.entrySet()) {
            _label.add(entry1.getKey());
            _freq.add(entry1.getValue());
            _rel_freq.add(round(((double) entry1.getValue() / labels.size()) * (100), 2));
        }
        double_maps = num_map;
        model.addColumn(label, _label);
    }

    if (fdt == true) {
        model.addColumn("Frequency", _freq);
    }
    if (rft == true) {
        model.addColumn("Relative Frequency", _rel_freq);
    }

    return model;
}

From source file:voldemort.store.readonly.mr.utils.HadoopUtils.java

/**
 * Read the metadata from a hadoop SequenceFile
 * //from ww w.  ja va  2s. c o m
 * @param fs The filesystem to read from
 * @param fileName The file to read from
 * @return The metadata from this file
 */
public static Map<String, String> getMetadataFromSequenceFile(FileSystem fs, Path path) {
    try {
        Configuration conf = new Configuration();
        conf.setInt("io.file.buffer.size", 4096);
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, new Configuration());
        SequenceFile.Metadata meta = reader.getMetadata();
        reader.close();
        TreeMap<Text, Text> map = meta.getMetadata();
        Map<String, String> values = new HashMap<String, String>();
        for (Map.Entry<Text, Text> entry : map.entrySet())
            values.put(entry.getKey().toString(), entry.getValue().toString());

        return values;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:snapshot.java

public static Object respond(final RequestHeader header, serverObjects post, final serverSwitch env) {
    final Switchboard sb = (Switchboard) env;

    final serverObjects defaultResponse = new serverObjects();

    final boolean authenticated = sb.adminAuthenticated(header) >= 2;
    final String ext = header.get(HeaderFramework.CONNECTION_PROP_EXT, "");

    if (ext.isEmpty()) {
        throw new TemplateProcessingException(
                "Missing extension. Try with rss, xml, json, pdf, png or jpg." + ext,
                HttpStatus.SC_BAD_REQUEST);
    }/*w  w  w.j av a  2s .c o m*/

    if (ext.equals("rss")) {
        // create a report about the content of the snapshot directory
        if (!authenticated) {
            defaultResponse.authenticationRequired();
            return defaultResponse;
        }
        int maxcount = post == null ? 10 : post.getInt("maxcount", 10);
        int depthx = post == null ? -1 : post.getInt("depth", -1);
        Integer depth = depthx == -1 ? null : depthx;
        String orderx = post == null ? "ANY" : post.get("order", "ANY");
        Snapshots.Order order = Snapshots.Order.valueOf(orderx);
        String statex = post == null ? Transactions.State.INVENTORY.name()
                : post.get("state", Transactions.State.INVENTORY.name());
        Transactions.State state = Transactions.State.valueOf(statex);
        String host = post == null ? null : post.get("host");
        Map<String, Revisions> iddate = Transactions.select(host, depth, order, maxcount, state);
        // now select the URL from the index for these ids in iddate and make an RSS feed
        RSSFeed rssfeed = new RSSFeed(Integer.MAX_VALUE);
        rssfeed.setChannel(new RSSMessage("Snapshot list for host = " + host + ", depth = " + depth
                + ", order = " + order + ", maxcount = " + maxcount, "", ""));
        for (Map.Entry<String, Revisions> e : iddate.entrySet()) {
            try {
                DigestURL u = e.getValue().url == null ? sb.index.fulltext().getURL(e.getKey())
                        : new DigestURL(e.getValue().url);
                if (u == null)
                    continue;
                RSSMessage message = new RSSMessage(u.toNormalform(true), "", u, e.getKey());
                message.setPubDate(e.getValue().dates[0]);
                rssfeed.addMessage(message);
            } catch (IOException ee) {
                ConcurrentLog.logException(ee);
            }
        }
        byte[] rssBinary = UTF8.getBytes(rssfeed.toString());
        return new ByteArrayInputStream(rssBinary);
    }

    // for the following methods we (mostly) need an url or a url hash
    if (post == null)
        post = new serverObjects();
    final boolean xml = ext.equals("xml");
    final boolean pdf = ext.equals("pdf");
    if (pdf && !authenticated) {
        defaultResponse.authenticationRequired();
        return defaultResponse;
    }
    final boolean pngjpg = ext.equals("png") || ext.equals(DEFAULT_EXT);
    String urlhash = post.get("urlhash", "");
    String url = post.get("url", "");
    DigestURL durl = null;
    if (urlhash.length() == 0 && url.length() > 0) {
        try {
            durl = new DigestURL(url);
            urlhash = ASCII.String(durl.hash());
        } catch (MalformedURLException e) {
        }
    }
    if (durl == null && urlhash.length() > 0) {
        try {
            durl = sb.index.fulltext().getURL(urlhash);
        } catch (IOException e) {
            ConcurrentLog.logException(e);
        }
    }

    if (ext.equals("json")) {
        // command interface: view and change a transaction state, get metadata about transactions in the past
        String command = post.get("command", "metadata");
        String statename = post.get("state");
        JSONObject result = new JSONObject();
        try {
            if (command.equals("status")) {
                // return a status of the transaction archive
                JSONObject sizes = new JSONObject();
                for (Map.Entry<String, Integer> state : Transactions.sizes().entrySet())
                    sizes.put(state.getKey(), state.getValue());
                result.put("size", sizes);
            } else if (command.equals("list")) {
                if (!authenticated) {
                    defaultResponse.authenticationRequired();
                    return defaultResponse;
                }
                // return a status of the transaction archive
                String host = post.get("host");
                String depth = post.get("depth");
                int depthi = depth == null ? -1 : Integer.parseInt(depth);
                for (Transactions.State state : statename == null
                        ? new Transactions.State[] { Transactions.State.INVENTORY, Transactions.State.ARCHIVE }
                        : new Transactions.State[] { Transactions.State.valueOf(statename) }) {
                    if (host == null) {
                        JSONObject hostCountInventory = new JSONObject();
                        for (String h : Transactions.listHosts(state)) {
                            int size = Transactions.listIDsSize(h, depthi, state);
                            if (size > 0)
                                hostCountInventory.put(h, size);
                        }
                        result.put("count." + state.name(), hostCountInventory);
                    } else {
                        TreeMap<Integer, Collection<Revisions>> ids = Transactions.listIDs(host, depthi, state);
                        if (ids == null) {
                            result.put("result", "fail");
                            result.put("comment", "no entries for host " + host + " found");
                        } else {
                            for (Map.Entry<Integer, Collection<Revisions>> entry : ids.entrySet()) {
                                for (Revisions r : entry.getValue()) {
                                    try {
                                        JSONObject metadata = new JSONObject();
                                        DigestURL u = r.url != null ? new DigestURL(r.url)
                                                : sb.index.fulltext().getURL(r.urlhash);
                                        metadata.put("url", u == null ? "unknown" : u.toNormalform(true));
                                        metadata.put("dates", r.dates);
                                        assert r.depth == entry.getKey().intValue();
                                        metadata.put("depth", entry.getKey().intValue());
                                        result.put(r.urlhash, metadata);
                                    } catch (IOException e) {
                                    }
                                }
                            }
                        }
                    }
                }
            } else if (command.equals("commit")) {
                if (!authenticated) {
                    defaultResponse.authenticationRequired();
                    return defaultResponse;
                }
                Revisions r = Transactions.commit(urlhash);
                if (r != null) {
                    result.put("result", "success");
                    result.put("depth", r.depth);
                    result.put("url", r.url);
                    result.put("dates", r.dates);
                } else {
                    result.put("result", "fail");
                }
                result.put("urlhash", urlhash);
            } else if (command.equals("rollback")) {
                if (!authenticated) {
                    defaultResponse.authenticationRequired();
                    return defaultResponse;
                }
                Revisions r = Transactions.rollback(urlhash);
                if (r != null) {
                    result.put("result", "success");
                    result.put("depth", r.depth);
                    result.put("url", r.url);
                    result.put("dates", r.dates);
                } else {
                    result.put("result", "fail");
                }
                result.put("urlhash", urlhash);
            } else if (command.equals("metadata")) {
                try {
                    Revisions r;
                    Transactions.State state = statename == null || statename.length() == 0 ? null
                            : Transactions.State.valueOf(statename);
                    if (state == null) {
                        r = Transactions.getRevisions(Transactions.State.INVENTORY, urlhash);
                        if (r != null)
                            state = Transactions.State.INVENTORY;
                        r = Transactions.getRevisions(Transactions.State.ARCHIVE, urlhash);
                        if (r != null)
                            state = Transactions.State.ARCHIVE;
                    } else {
                        r = Transactions.getRevisions(state, urlhash);
                    }
                    if (r != null) {
                        JSONObject metadata = new JSONObject();
                        DigestURL u;
                        u = r.url != null ? new DigestURL(r.url) : sb.index.fulltext().getURL(r.urlhash);
                        metadata.put("url", u == null ? "unknown" : u.toNormalform(true));
                        metadata.put("dates", r.dates);
                        metadata.put("depth", r.depth);
                        metadata.put("state", state.name());
                        result.put(r.urlhash, metadata);
                    }
                } catch (IOException | IllegalArgumentException e) {
                }
            }
        } catch (JSONException e) {
            ConcurrentLog.logException(e);
        }
        String json = result.toString();
        if (post.containsKey("callback"))
            json = post.get("callback") + "([" + json + "]);";
        return new ByteArrayInputStream(UTF8.getBytes(json));
    }

    // for the following methods we always need the durl to fetch data
    if (durl == null) {
        throw new TemplateMissingParameterException("Missing valid url or urlhash parameter");
    }

    if (xml) {
        Collection<File> xmlSnapshots = Transactions.findPaths(durl, "xml", Transactions.State.ANY);
        File xmlFile = null;
        if (xmlSnapshots.isEmpty()) {
            throw new TemplateProcessingException("Could not find the xml snapshot file.",
                    HttpStatus.SC_NOT_FOUND);
        }
        xmlFile = xmlSnapshots.iterator().next();
        try {
            byte[] xmlBinary = FileUtils.read(xmlFile);
            return new ByteArrayInputStream(xmlBinary);
        } catch (final IOException e) {
            ConcurrentLog.logException(e);
            throw new TemplateProcessingException("Could not read the xml snapshot file.");
        }
    }

    if (pdf || pngjpg) {
        Collection<File> pdfSnapshots = Transactions.findPaths(durl, "pdf", Transactions.State.INVENTORY);
        File pdfFile = null;
        if (pdfSnapshots.isEmpty()) {
            // if the client is authenticated, we create the pdf on the fly!
            if (!authenticated) {
                throw new TemplateProcessingException(
                        "Could not find the pdf snapshot file. You must be authenticated to generate one on the fly.",
                        HttpStatus.SC_NOT_FOUND);
            }
            SolrDocument sd = sb.index.fulltext().getMetadata(durl.hash());
            boolean success = false;
            if (sd == null) {
                success = Transactions.store(durl, new Date(), 99, false, true,
                        sb.getConfigBool(SwitchboardConstants.PROXY_TRANSPARENT_PROXY, false)
                                ? "http://127.0.0.1:" + sb.getConfigInt(SwitchboardConstants.SERVER_PORT, 8090)
                                : null,
                        sb.getConfig("crawler.http.acceptLanguage", null));
            } else {
                SolrInputDocument sid = sb.index.fulltext().getDefaultConfiguration().toSolrInputDocument(sd);
                success = Transactions.store(sid, false, true, true,
                        sb.getConfigBool(SwitchboardConstants.PROXY_TRANSPARENT_PROXY, false)
                                ? "http://127.0.0.1:" + sb.getConfigInt(SwitchboardConstants.SERVER_PORT, 8090)
                                : null,
                        sb.getConfig("crawler.http.acceptLanguage", null));
            }
            if (success) {
                pdfSnapshots = Transactions.findPaths(durl, "pdf", Transactions.State.ANY);
                if (!pdfSnapshots.isEmpty()) {
                    pdfFile = pdfSnapshots.iterator().next();
                }
            }
        } else {
            pdfFile = pdfSnapshots.iterator().next();
        }
        if (pdfFile == null) {
            throw new TemplateProcessingException(
                    "Could not find the pdf snapshot file and could not generate one on the fly.",
                    HttpStatus.SC_NOT_FOUND);
        }
        if (pdf) {
            try {
                byte[] pdfBinary = FileUtils.read(pdfFile);
                return new ByteArrayInputStream(pdfBinary);
            } catch (final IOException e) {
                ConcurrentLog.logException(e);
                throw new TemplateProcessingException("Could not read the pdf snapshot file.");
            }
        }

        if (pngjpg) {
            int width = Math.min(post.getInt("width", DEFAULT_WIDTH), DEFAULT_WIDTH);
            int height = Math.min(post.getInt("height", DEFAULT_HEIGHT), DEFAULT_HEIGHT);
            String imageFileStub = pdfFile.getAbsolutePath();
            imageFileStub = imageFileStub.substring(0, imageFileStub.length() - 3); // cut off extension
            File imageFile = new File(imageFileStub + DEFAULT_WIDTH + "." + DEFAULT_HEIGHT + "." + ext);
            if (!imageFile.exists() && authenticated) {
                if (!Html2Image.pdf2image(pdfFile, imageFile, DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_DENSITY,
                        DEFAULT_QUALITY)) {
                    throw new TemplateProcessingException(
                            "Could not generate the " + ext + " image snapshot file.");
                }
            }
            if (!imageFile.exists()) {
                throw new TemplateProcessingException(
                        "Could not find the " + ext
                                + " image snapshot file. You must be authenticated to generate one on the fly.",
                        HttpStatus.SC_NOT_FOUND);
            }
            if (width == DEFAULT_WIDTH && height == DEFAULT_HEIGHT) {
                try {
                    byte[] imageBinary = FileUtils.read(imageFile);
                    return new ByteArrayInputStream(imageBinary);
                } catch (final IOException e) {
                    ConcurrentLog.logException(e);
                    throw new TemplateProcessingException(
                            "Could not read the " + ext + " image snapshot file.");
                }
            }
            // lets read the file and scale
            Image image;
            try {
                image = ImageParser.parse(imageFile.getAbsolutePath(), FileUtils.read(imageFile));
                if (image == null) {
                    throw new TemplateProcessingException(
                            "Could not parse the " + ext + " image snapshot file.");
                }
                final Image scaled = image.getScaledInstance(width, height, Image.SCALE_AREA_AVERAGING);
                final MediaTracker mediaTracker = new MediaTracker(new Container());
                mediaTracker.addImage(scaled, 0);
                try {
                    mediaTracker.waitForID(0);
                } catch (final InterruptedException e) {
                }

                /*
                 * Ensure there is no alpha component on the ouput image, as it is pointless
                 * here and it is not well supported by the JPEGImageWriter from OpenJDK
                 */
                BufferedImage scaledBufferedImg = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
                scaledBufferedImg.createGraphics().drawImage(scaled, 0, 0, width, height, null);
                return new EncodedImage(scaledBufferedImg, ext, true);
            } catch (final IOException e) {
                ConcurrentLog.logException(e);
                throw new TemplateProcessingException("Could not scale the " + ext + " image snapshot file.");
            }

        }
    }

    throw new TemplateProcessingException(
            "Unsupported extension : " + ext + ". Try with rss, xml, json, pdf, png or jpg.",
            HttpStatus.SC_BAD_REQUEST);
}

From source file:org.ow2.proactive.scheduler.authentication.ManageUsers.java

/**
 * Stores the logins into login.cfg//ww  w.jav a  2s  . c  o m
 */
private static void storeLoginFile(String loginFilePath, Properties props) throws IOException {
    try (BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(loginFilePath)))) {
        props.store(writer, null);
    }
    List<String> lines = null;

    try (FileInputStream stream = new FileInputStream(loginFilePath)) {
        lines = IOUtils.readLines(stream);
    }

    TreeMap<String, String> sortedUsers = new TreeMap<>();
    for (String line : lines) {
        if (!(line.isEmpty() || line.startsWith("#"))) {
            String[] loginAndPwd = line.split("=", 2);
            sortedUsers.put(loginAndPwd[0], loginAndPwd[1]);
        }
    }
    List<String> modifiedLines = new ArrayList<>(sortedUsers.size());
    for (Map.Entry entry : sortedUsers.entrySet()) {
        modifiedLines.add(entry.getKey() + ":" + entry.getValue());
    }
    try (BufferedWriter writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(loginFilePath)))) {
        IOUtils.writeLines(modifiedLines, System.getProperty("line.separator"), writer);
    }
    System.out.println("Stored login file in " + loginFilePath);
}

From source file:org.sonar.server.platform.monitoring.SettingsSection.java

@Override
public ProtobufSystemInfo.Section toProtobuf() {
    ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder();
    protobuf.setName("Settings");

    PropertyDefinitions definitions = settings.getDefinitions();
    TreeMap<String, String> orderedProps = new TreeMap<>(settings.getProperties());
    for (Map.Entry<String, String> prop : orderedProps.entrySet()) {
        String key = prop.getKey();
        String value = obfuscateValue(definitions, key, prop.getValue());
        setAttribute(protobuf, key, value);
    }/*from   w  w  w  . ja v a2 s.  c o  m*/
    return protobuf.build();
}