Example usage for java.io BufferedWriter flush

List of usage examples for java.io BufferedWriter flush

Introduction

In this page you can find the example usage for java.io BufferedWriter flush.

Prototype

public void flush() throws IOException 

Source Link

Document

Flushes the stream.

Usage

From source file:data_gen.Data_gen.java

private static void Build_json_file(String config_dir, long startTime) throws IOException {
    File f = new File(output_dir + "/" + Default_DataSet_name);
    BufferedWriter wr = new BufferedWriter(new FileWriter(f));

    wr.write("{\"docs\":[");
    wr.write("\n");

    ObjectMapper objectMapper = new ObjectMapper();

    //////////////////////////////////////////////////// flow control: (for loop) for number of
    //////////////////////////////////////////////////// documents and (while) for each field in document
    for (int i = 0; i <= documents_count; i++) {

        fields = Parse_Document_fields(config_dir);
        Iterator iterator = fields.keySet().iterator();
        while (iterator.hasNext()) {

            String key = (String) iterator.next();
            String v = (String) fields.get(key);
            String value = generate_facet_value(v);
            if (value.startsWith("integer.key")) {
                integer_key(fields, key, value);
            }/* w w  w .java  2s .c om*/
            if (value.startsWith("seq.integer")) {
                seq_integer(fields, key, value);
            }

            if (value.startsWith("range")) {
                range_integer(fields, key, value);
            }
            if (value.charAt(0) == '[') {
                single_enum(fields, key, value);
            }
            if (value.startsWith("multi")) {
                multi_enum_json(fields, key, value);
            }
            if (value.startsWith("date")) {
                generate_date(fields, key, value);
            }

            if (value.equals("text.key")) {
                generate_Text_json(fields, key);
            }

            if (value.equals("text")) {
                generate_Text_json(fields, key);
            }

            if (value.startsWith("(")) {
                String VALUE = value.substring(1, value.length() - 1);
                fields.put(key, VALUE);
            }
        }

        objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, false);

        String s = objectMapper.writeValueAsString(fields);
        wr.write(s);
        wr.write(",\n");

        fields.clear();

        if (i == count_check) {
            System.out.println("Number of Documents created: " + count_check);
            System.out.println("Reading from file: (" + listOfFiles[file_index] + ")");
            System.out.println("Size of all documents so far: (" + total + ") Bytes");
            System.out.println("\n");

            count_check += 1000;
        }
        cnt = i;
    }

    System.out.println("Total Number of Documents created: " + cnt);
    System.out.println("Total size of Dataset created: " + total);

    wr.write("]}");
    wr.flush();

    wr.close();
    long endTime = System.nanoTime();
    long duration = endTime - startTime;
    System.out.println("Total execuion time: " + (double) duration / 1000000000.0 + " Seconds" + "\n");
}

From source file:ffx.ui.KeywordPanel.java

/**
 * <p>/*from  w w  w . j a  va2s .  com*/
 * saveKeywords</p>
 *
 * @param keyFile a {@link java.io.File} object.
 * @param keywordHashMap a {@link java.util.LinkedHashMap} object.
 * @param comments a {@link java.lang.StringBuilder} object.
 * @return a boolean.
 */
public boolean saveKeywords(File keyFile, LinkedHashMap<String, KeywordComponent> keywordHashMap,
        StringBuilder comments) {
    synchronized (this) {
        FileWriter fw = null;
        BufferedWriter bw = null;
        try {
            fw = new FileWriter(keyFile);
            bw = new BufferedWriter(fw);
            boolean writegroup = false;
            String pgroup = null;
            // Write out keywords in groups
            for (KeywordComponent keyword : keywordHashMap.values()) {
                String group = keyword.getKeywordGroup();
                if (pgroup == null || !group.equalsIgnoreCase(pgroup)) {
                    writegroup = true;
                    pgroup = group;
                }
                String line = keyword.toString();
                if (line != null) {
                    if (writegroup == true) {
                        bw.newLine();
                        bw.write("# " + group);
                        bw.newLine();
                        writegroup = false;
                    }
                    bw.write(line);
                    bw.newLine();
                }
            }
            bw.newLine();
            String s = comments.toString();
            if (s != null && !s.trim().equals("")) {
                bw.write(s.trim());
            }
            bw.newLine();
            bw.flush();
            KeywordComponent.setKeywordModified(false);
        } catch (FileNotFoundException e) {
            logger.warning(e.toString());
            return false;
        } catch (IOException e) {
            logger.warning(e.toString());
            return false;
        } finally {
            try {
                if (bw != null) {
                    bw.close();
                }
                if (fw != null) {
                    fw.close();
                }
            } catch (Exception e) {
                logger.warning(e.toString());
            }
        }
        return true;
    }
}

From source file:de.dfki.km.perspecting.obie.corpus.TextCorpus.java

public LabeledTextCorpus labelRDFTypes(final File corpus, final Pipeline pipeline, final String template)
        throws Exception {

    final BufferedWriter writer = new BufferedWriter(new FileWriter(corpus));

    this.forEach(new DocumentProcedure<String>() {
        @Override/*w  w  w  .  java  2  s . co  m*/
        public String process(Reader doc, URI uri) throws Exception {

            Document document = pipeline.createDocument(doc, uri, corpusMediaType, template, language);

            for (int step = 0; pipeline.hasNext(step); step = pipeline.execute(step, document))
                ;

            TIntHashSet sentenceBoundaries = new TIntHashSet();
            for (TokenSequence<Integer> sentence : document.getSentences()) {
                sentenceBoundaries.add(sentence.getEnd());
            }

            for (Token token : document) {
                String word = token.toString();
                String pos = token.getPartOfSpeechTag();
                String phrase = token.getNounPhraseTag();
                int label = -1;

                int[] types = token.getTypes(0.0).toArray();
                if (types.length > 0) {
                    label = pipeline.getKnowledgeBase().getCluster(types);
                    // System.out.println(word + " " + kb.getURI(label));
                }

                // int[] subjects = token.getSubjects().toArray();
                // if (subjects.length > 0) {
                // System.out.println(word + " " +
                // Arrays.toString(subjects));
                // }
                writer.append(word);
                writer.append(SPACE);
                writer.append(pos);
                writer.append(SPACE);
                writer.append(phrase);
                writer.append(SPACE);

                if (label > 0) {
                    writer.append(Integer.toString(label));
                } else {
                    writer.append(LabeledTextCorpus.OUTSIDE_ANY_LABEL);
                }

                writer.newLine();

                if (sentenceBoundaries.contains(token.getEnd())) {
                    writer.newLine();
                }
            }

            writer.flush();
            return uri.toString();
        }

    });
    writer.close();
    return new LabeledTextCorpus(corpus, MediaType.TEXT, this);

}

From source file:com.matthewmitchell.peercoin_android_wallet.ui.WalletActivity.java

public void handleExportTransactions() {

    // Create CSV file from transactions

    final File file = new File(Constants.Files.EXTERNAL_WALLET_BACKUP_DIR,
            Constants.Files.TX_EXPORT_NAME + "-" + getFileDate() + ".csv");

    try {//from   w  w  w  .  jav  a 2 s. c  o m

        final BufferedWriter writer = new BufferedWriter(new FileWriter(file));
        writer.append("Date,Label,Amount (" + MonetaryFormat.CODE_PPC + "),Fee (" + MonetaryFormat.CODE_PPC
                + "),Address,Transaction Hash,Confirmations\n");

        if (txListAdapter == null || txListAdapter.transactions.isEmpty()) {
            longToast(R.string.export_transactions_mail_intent_failed);
            log.error("exporting transactions failed");
            return;
        }

        final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm z");
        dateFormat.setTimeZone(TimeZone.getDefault());

        for (Transaction tx : txListAdapter.transactions) {

            TransactionsListAdapter.TransactionCacheEntry txCache = txListAdapter.getTxCache(tx);
            String memo = tx.getMemo() == null ? "" : StringEscapeUtils.escapeCsv(tx.getMemo());
            String fee = tx.getFee() == null ? "" : tx.getFee().toPlainString();
            String address = txCache.address == null ? getString(R.string.export_transactions_unknown)
                    : txCache.address.toString();

            writer.append(dateFormat.format(tx.getUpdateTime()) + ",");
            writer.append(memo + ",");
            writer.append(txCache.value.toPlainString() + ",");
            writer.append(fee + ",");
            writer.append(address + ",");
            writer.append(tx.getHash().toString() + ",");
            writer.append(tx.getConfidence().getDepthInBlocks() + "\n");

        }

        writer.flush();
        writer.close();

    } catch (IOException x) {
        longToast(R.string.export_transactions_mail_intent_failed);
        log.error("exporting transactions failed", x);
        return;
    }

    final DialogBuilder dialog = new DialogBuilder(this);
    dialog.setMessage(Html.fromHtml(getString(R.string.export_transactions_dialog_success, file)));

    dialog.setPositiveButton(WholeStringBuilder.bold(getString(R.string.export_keys_dialog_button_archive)),
            new OnClickListener() {

                @Override
                public void onClick(final DialogInterface dialog, final int which) {

                    final Intent intent = new Intent(Intent.ACTION_SEND);
                    intent.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.export_transactions_mail_subject));
                    intent.putExtra(Intent.EXTRA_TEXT,
                            makeEmailText(getString(R.string.export_transactions_mail_text)));
                    intent.setType(Constants.MIMETYPE_TX_EXPORT);
                    intent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(file));

                    try {
                        startActivity(Intent.createChooser(intent,
                                getString(R.string.export_transactions_mail_intent_chooser)));
                        log.info("invoked chooser for exporting transactions");
                    } catch (final Exception x) {
                        longToast(R.string.export_transactions_mail_intent_failed);
                        log.error("exporting transactions failed", x);
                    }

                }

            });

    dialog.setNegativeButton(R.string.button_dismiss, null);
    dialog.show();

}

From source file:net.sourceforge.eclipsetrader.core.internal.XMLRepository.java

void saveDocument(Document document, String path, String name) {
    try {/*from w  w w.j  av a 2s . c o  m*/
        TransformerFactory factory = TransformerFactory.newInstance();
        try {
            factory.setAttribute("indent-number", new Integer(4)); //$NON-NLS-1$
        } catch (Exception e) {
        }
        Transformer transformer = factory.newTransformer();
        transformer.setOutputProperty(OutputKeys.METHOD, "xml"); //$NON-NLS-1$
        transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); //$NON-NLS-1$
        transformer.setOutputProperty(OutputKeys.INDENT, "yes"); //$NON-NLS-1$
        transformer.setOutputProperty("{http\u003a//xml.apache.org/xslt}indent-amount", "4"); //$NON-NLS-1$ //$NON-NLS-2$
        DOMSource source = new DOMSource(document);

        File file = new File(Platform.getLocation().toFile(), path);
        file.mkdirs();
        file = new File(file, name);

        BufferedWriter out = new BufferedWriter(new FileWriter(file));
        StreamResult result = new StreamResult(out);
        transformer.transform(source, result);
        out.flush();
        out.close();
    } catch (Exception e) {
        log.error(e.toString(), e);
    }
}

From source file:com.photon.phresco.framework.impl.ProjectManagerImpl.java

private void updateJobTemplates(File moduleDir, ModuleInfo module) throws PhrescoException {
    FileWriter fw = null;/*from w  w  w  . j  a  v a2 s  .  c o  m*/
    BufferedWriter bw = null;
    try {
        String path = moduleDir.getPath() + File.separator + DOT_PHRESCO_FOLDER + File.separator
                + CI_JOB_TEMPLATE_NAME;
        List<CIJobTemplate> jobTemplates = getJobTemplates(path);
        if (CollectionUtils.isNotEmpty(jobTemplates)) {
            for (CIJobTemplate ciJobTemplate : jobTemplates) {
                ciJobTemplate.setModule(module.getCode());
                ciJobTemplate.setName(ciJobTemplate.getName() + HYPHEN + module.getCode());
            }
        }

        Gson gson = new Gson();
        fw = new FileWriter(path);
        bw = new BufferedWriter(fw);
        String templatesJson = gson.toJson(jobTemplates);
        bw.write(templatesJson);
        bw.flush();
        bw.close();
    } catch (Exception e) {
        throw new PhrescoException(e);
    } finally {
        try {
            if (bw != null) {
                bw.close();
            }
        } catch (IOException e) {
            throw new PhrescoException(e);
        }
        Utility.closeStream(fw);
    }
}

From source file:ie.pars.nlp.sketchengine.interactions.WordlistSKEInteraction.java

/**
 * Get all the results and dump it into files this method can be changed so
 * Here the difference is that all the res are written into one
 *
 * The same parser as freq list can be used here too
 *
 * @throws UnsupportedEncodingException//w w  w . j a va 2 s. co m
 * @throws IOExceptionsket
 * @throws Exception
 */
public void getItemFrequencyList() throws UnsupportedEncodingException, IOException, Exception {
    HttpClient sessionID = super.getSessionID();

    BufferedWriter writer;// this.writer;

    OutputStreamWriter outputStreamWriter = new OutputStreamWriter(new FileOutputStream(this.fileOutput),
            StandardCharsets.UTF_8);
    writer = new BufferedWriter(outputStreamWriter);
    if (!writeParsed) {
        writer.append("{\"results\": [\n");
    }
    int pageNumer = 1;
    while (true) {
        JSONObject jsonObjP = super.getHTTP(sessionID, encodeFreqQuery(pageNumer));
        //System.out.println(jsonObjP);
        if (!writeParsed) {
            writer.append(jsonObjP.toString(1));
        } else {
            WordlistMethodJsonParser fjpm = new WordlistMethodJsonParser(jsonObjP.toString());
            FrequencyLine fl;

            while ((fl = fjpm.getNext()) != null) {
                totalFreq += fl.getFreq();
                countItemFetched++;
                writer.append(fl.toStringLine()).append("\n");

            }
            //   writer.flush();

        }
        boolean hasError = jsonObjP.has("error");
        if (hasError) {
            String message = (String) jsonObjP.get("error");
            if ("Empty list".equals(message)) {
                System.out.println("No result for current query: " + this.query); // retrun null etc
                break;
            } else {
                System.out.println("* NOT SEEN * " + jsonObjP.toString(1));
                throw new Exception("not seen " + jsonObjP.toString(1));
            }
        } else {
            int isLastPage = 0;

            //                int finished = (int) jsonObjP.get("finished");
            if (jsonObjP.has("lastpage")) {
                isLastPage = (int) jsonObjP.get("lastpage");
                //System.out.println("** IS Last TO GO  " + isLastPage);
                if (isLastPage == 0) {
                    if (!writeParsed) {
                        writer.append(",");
                    }
                    pageNumer++;
                } else {
                    //System.out.println("Going to break because last page is not 0");
                    break;
                }
            }

        }
    }
    if (!writeParsed) {
        writer.append("]" + "}"); // to the end the json file}
    }

    writer.flush();
    writer.close();

}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.processors.FilePackager.java

private void writeManifest() throws IOException {
    BufferedWriter writer = null;
    try {//from   ww w  .ja va 2s .c  om
        final String tempDir = getTempfileDirectory();
        if (tempDir == null || tempDir.length() == 0) {
            throw new IOException("FilePackagerFactory.tempfileDirectory is null");
        }
        manifestTempfileName = tempDir + ConstantValues.SEPARATOR + UUID.randomUUID();
        writer = new BufferedWriter(new FileWriter(manifestTempfileName));
        final StringBuilder line = new StringBuilder();
        line.append("Platform Type").append('\t').append("Center").append('\t').append("Platform").append('\t')
                .append("Level").append('\t').append("Sample").append('\t') //todo: add batch?
                .append("Barcode").append('\t').append("File Name").append('\n');
        writer.write(line.toString());
        for (final DataFile df : filePackagerBean.getSelectedFiles()) {
            line.setLength(0);
            line.append(DataAccessMatrixJSPUtil.lookupPlatformTypeName(df.getPlatformTypeId())).append('\t');
            line.append(DataAccessMatrixJSPUtil.lookupCenterName(df.getCenterId())).append('\t');
            if (df instanceof DataFileClinical) {
                line.append("n/a\tn/a\t"); //no platform or level
            } else {
                line.append(DataAccessMatrixJSPUtil.lookupPlatformName(df.getPlatformId())).append('\t');
                if (DataAccessMatrixQueries.LEVEL_METADATA.equals(df.getLevel())) {
                    line.append("n/a\t");
                } else {
                    line.append(df.getLevel()).append('\t');
                }
            }
            line.append(df.getDisplaySample()).append('\t');
            line.append(df.getDisplayBarcodes()).append('\t');
            line.append(df.getFileName()).append('\n');
            writer.write(line.toString());
        }
        writer.flush();
    } finally {
        if (writer != null) {
            writer.close();
        }
    }
}