Example usage for java.nio.file Files newBufferedWriter

List of usage examples for java.nio.file Files newBufferedWriter

Introduction

In this page you can find the example usage for java.nio.file Files newBufferedWriter.

Prototype

public static BufferedWriter newBufferedWriter(Path path, OpenOption... options) throws IOException 

Source Link

Document

Opens or creates a file for writing, returning a BufferedWriter to write text to the file in an efficient manner.

Usage

From source file:io.github.alechenninger.monarch.Main.java

public void run(String[] args) throws ParseException, IOException {
    try {//  w  ww  .ja  v a  2  s . c  o m
        CliInputs cliInputs = CliInputs.parse(args);

        if (cliInputs.helpRequested()) {
            System.out.print(cliInputs.helpMessage());
            return;
        }

        MonarchOptions options = getOptionsFromInputsAndConfigFiles(cliInputs, fileSystem, parsers);

        Path outputDir = options.outputDir().orElseThrow(missingOptionException("output directory"));
        Path dataDir = options.dataDir().orElseThrow(missingOptionException("data directory"));
        Hierarchy hierarchy = options.hierarchy().orElseThrow(missingOptionException("hierarchy"));
        String target = options.target().orElseThrow(missingOptionException("target"));
        Iterable<Change> changes = options.changes();
        Set<String> mergeKeys = options.mergeKeys();

        if (!changes.iterator().hasNext()) {
            System.out.println("No changes provided; formatting target.");
        }

        List<String> affectedSources = hierarchy.hierarchyOf(target).orElseThrow(
                () -> new IllegalArgumentException("Target source not found in hierarchy: " + options.target()))
                .descendants();

        Map<String, Map<String, Object>> currentData = readDataForHierarchy(dataDir, hierarchy);

        Map<String, Map<String, Object>> result = monarch.generateSources(hierarchy, changes, target,
                currentData, mergeKeys);

        for (Map.Entry<String, Map<String, Object>> sourceToData : result.entrySet()) {
            String source = sourceToData.getKey();

            if (!affectedSources.contains(source)) {
                continue;
            }

            Path sourcePath = outputDir.resolve(source);
            ensureParentDirectories(sourcePath);

            SortedMap<String, Object> sorted = new TreeMap<>(sourceToData.getValue());

            if (sorted.isEmpty()) {
                Files.write(sourcePath, new byte[] {});
            } else {
                yaml.dump(sorted, Files.newBufferedWriter(sourcePath, UTF_8));
            }
        }
    } catch (MonarchException | ParseException e) {
        e.printStackTrace();
        System.out.print(CliInputs.parse(new String[0]).helpMessage());
    }
}

From source file:org.mda.bcb.tcgagsdata.create.Metadata.java

public void writeBarcodeDataFile(String theIdColumn, String theDataColumn, String theOutputFile,
        File[] theDiseaseSamplesFiles) throws IOException {
    // TODO: theDiseaseSampleFile - disease in first column, rest of row is SAMPLE barcode
    TcgaGSData.printWithFlag("Metadata::writeBarcodeDataFile - start " + theOutputFile);
    TreeSet<String> processedBarcode = new TreeSet<>();
    try (BufferedReader br = Files.newBufferedReader(Paths.get(mMetadataFile),
            Charset.availableCharsets().get("ISO-8859-1"))) {
        try (BufferedWriter bw = Files.newBufferedWriter(Paths.get(theOutputFile),
                Charset.availableCharsets().get("ISO-8859-1"))) {
            // read header/write header
            int indexId = -1;
            int indexData = -1;
            {//from w  ww .ja v a  2  s  .co  m
                String line = br.readLine();
                ArrayList<String> headerArray = new ArrayList<>();
                headerArray.addAll(Arrays.asList(line.split("\t", -1)));
                indexId = headerArray.indexOf(theIdColumn);
                indexData = headerArray.indexOf(theDataColumn);
            }
            bw.write("ID\tDATA");
            bw.newLine();
            //
            for (String line = br.readLine(); null != line; line = br.readLine()) {
                String[] splitted = line.split("\t", -1);
                bw.write(splitted[indexId] + "\t" + splitted[indexData]);
                processedBarcode.add(splitted[indexId]);
                bw.newLine();
            }
            TcgaGSData.printWithFlag("Metadata::writeBarcodeDataFile - processed file " + theOutputFile);
            for (File file : theDiseaseSamplesFiles) {
                TreeSet<String> barcodes = getDiseaseSampleData(file, true);
                for (String barcode : barcodes) {
                    if (false == processedBarcode.contains(barcode)) {
                        bw.write(barcode + "\t" + MetadataTcgaNames.M_UNKNOWN);
                        processedBarcode.add(barcode);
                        bw.newLine();
                    }
                }
            }
        }
    }
    TcgaGSData.printWithFlag("Metadata::writeBarcodeDataFile - finished");
}

From source file:keywhiz.cli.ClientUtils.java

/**
 * Serialize the cookies to JSON from the given CookieManager to a file at the specified path.
 * Output file will have 660 permissions (owner-read, owner-write).
 *
 * @param cookieManager CookieManager that contains cookies to be serialized.
 * @param path Location to serialize cookies to file.
 *///  w  w w .j  a va  2 s.co m
public static void saveCookies(CookieManager cookieManager, Path path) {
    List<HttpCookie> cookies = cookieManager.getCookieStore().getCookies();
    try (BufferedWriter writer = Files.newBufferedWriter(path, CREATE)) {
        Files.setPosixFilePermissions(path, ImmutableSet.of(OWNER_READ, OWNER_WRITE));
        writer.write(Jackson.newObjectMapper().writeValueAsString(
                cookies.stream().map(c -> JsonCookie.fromHttpCookie(c)).collect(Collectors.toList())));
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}

From source file:io.anserini.search.SearchWebCollection.java

/**
 * Prints TREC submission file to the standard output stream.
 *
 * @param topics     queries//from w w w  .j ava2 s . c o  m
 * @param similarity similarity
 * @throws IOException
 * @throws ParseException
 */

public void search(SortedMap<Integer, String> topics, String submissionFile, Similarity similarity, int numHits,
        RerankerCascade cascade, boolean useQueryParser, boolean keepstopwords)
        throws IOException, ParseException {

    IndexSearcher searcher = new IndexSearcher(reader);
    searcher.setSimilarity(similarity);

    final String runTag = "BM25_EnglishAnalyzer_" + (keepstopwords ? "KeepStopwords_" : "") + FIELD_BODY + "_"
            + similarity.toString();

    PrintWriter out = new PrintWriter(
            Files.newBufferedWriter(Paths.get(submissionFile), StandardCharsets.US_ASCII));

    EnglishAnalyzer ea = keepstopwords ? new EnglishAnalyzer(CharArraySet.EMPTY_SET) : new EnglishAnalyzer();
    QueryParser queryParser = new QueryParser(FIELD_BODY, ea);
    queryParser.setDefaultOperator(QueryParser.Operator.OR);

    for (Map.Entry<Integer, String> entry : topics.entrySet()) {

        int qID = entry.getKey();
        String queryString = entry.getValue();
        Query query = useQueryParser ? queryParser.parse(queryString)
                : AnalyzerUtils.buildBagOfWordsQuery(FIELD_BODY, ea, queryString);

        /**
         * For Web Tracks 2010,2011,and 2012; an experimental run consists of the top 10,000 documents for each topic query.
         */
        TopDocs rs = searcher.search(query, numHits);
        ScoreDoc[] hits = rs.scoreDocs;
        List<String> queryTokens = AnalyzerUtils.tokenize(ea, queryString);
        RerankerContext context = new RerankerContext(searcher, query, String.valueOf(qID), queryString,
                queryTokens, FIELD_BODY, null);
        ScoredDocuments docs = cascade.run(ScoredDocuments.fromTopDocs(rs, searcher), context);

        /**
         * the first column is the topic number.
         * the second column is currently unused and should always be "Q0".
         * the third column is the official document identifier of the retrieved document.
         * the fourth column is the rank the document is retrieved.
         * the fifth column shows the score (integer or floating point) that generated the ranking.
         * the sixth column is called the "run tag" and should be a unique identifier for your
         */
        for (int i = 0; i < docs.documents.length; i++) {
            out.println(String.format("%d Q0 %s %d %f %s", qID,
                    docs.documents[i].getField(FIELD_ID).stringValue(), (i + 1), docs.scores[i], runTag));
        }
    }
    out.flush();
    out.close();
}

From source file:eu.itesla_project.modules.simulation.ImpactAnalysisTool.java

private static void writeCsv(Map<String, Map<SecurityIndexId, SecurityIndex>> securityIndexesPerCase,
        Path outputCsvFile) throws IOException {
    Objects.requireNonNull(outputCsvFile);

    Set<SecurityIndexId> securityIndexIds = new LinkedHashSet<>();
    for (Map<SecurityIndexId, SecurityIndex> securityIndexesPerId : securityIndexesPerCase.values()) {
        if (securityIndexesPerId != null) {
            securityIndexIds.addAll(securityIndexesPerId.keySet());
        }/*from  w  w w . j  a  v  a 2 s.co m*/
    }

    try (BufferedWriter writer = Files.newBufferedWriter(outputCsvFile, StandardCharsets.UTF_8)) {
        writer.write("Base case");
        for (SecurityIndexId securityIndexId : securityIndexIds) {
            writer.write(CSV_SEPARATOR);
            writer.write(securityIndexId.toString());
        }
        writer.newLine();

        for (Map.Entry<String, Map<SecurityIndexId, SecurityIndex>> entry : securityIndexesPerCase.entrySet()) {
            String baseCaseName = entry.getKey();
            writer.write(baseCaseName);

            Map<SecurityIndexId, SecurityIndex> securityIndexes = entry.getValue();
            for (SecurityIndexId securityIndexId : securityIndexIds) {
                Boolean b = null;
                if (securityIndexes != null) {
                    SecurityIndex securityIndex = securityIndexes.get(securityIndexId);
                    if (securityIndex != null) {
                        b = securityIndex.isOk();
                    }
                }
                writer.write(CSV_SEPARATOR);
                writer.write(okToStr(b));
            }

            writer.newLine();
        }
    }
}

From source file:com.relicum.ipsum.io.PropertyIO.java

/**
 * Write the properties object to the specified string file path.
 *
 * @param properties an instance of a {@link java.util.Properties} object.
 * @param path       the  {@link java.nio.file.Path} that the properties will be written to.
 * @param message    the message that is included in the header of properties files.
 * @throws IOException an {@link java.io.IOException} if their was a problem writing to the file.
 *//* ww  w . ja v a 2 s . co  m*/
default void writeToFile(Properties properties, Path path, String message) throws IOException {
    Validate.notNull(properties);
    Validate.notNull(path);
    Validate.notNull(message);
    System.out.println(path);

    Files.deleteIfExists(path);

    try {

        properties.store(Files.newBufferedWriter(path, Charset.defaultCharset()), message);
    } catch (IOException e) {
        Logger.getGlobal().log(Level.SEVERE, e.getMessage(), e.getCause());
        throw e;
    }

}

From source file:org.omegat.util.FileUtil.java

/**
 * Copy file and create output directory if need. EOL will be converted into target-specific or into
 * platform-specific if target doesn't exist.
 *///from   w ww.j  a  v  a2 s . c om
public static void copyFileWithEolConversion(File inFile, File outFile, Charset charset) throws IOException {
    File dir = outFile.getParentFile();
    if (!dir.exists()) {
        dir.mkdirs();
    }
    String eol;
    if (outFile.exists()) {
        // file exist - read EOL from file
        eol = getEOL(outFile, charset);
    } else {
        // file not exist - use system-dependent
        eol = System.lineSeparator();
    }
    try (BufferedReader in = Files.newBufferedReader(inFile.toPath(), charset)) {
        try (BufferedWriter out = Files.newBufferedWriter(outFile.toPath(), charset)) {
            String s;
            while ((s = in.readLine()) != null) {
                // copy using known EOL
                out.write(s);
                out.write(eol);
            }
        }
    }
}

From source file:org.mda.bcb.tcgagsdata.create.Compilation.java

public void process() throws IOException, Exception {
    TcgaGSData.printWithFlag("Compilation::process - mClinicalDir=" + mClinicalDir);
    TcgaGSData.printWithFlag("Compilation::process - mInputFiles=" + mInputFiles);
    TcgaGSData.printWithFlag("Compilation::process - mOutputFile=" + mOutputFile);
    Collection<File> results = FileUtils.listFiles(new File(mClinicalDir),
            FileFilterUtils.nameFileFilter(mInputFiles), TrueFileFilter.INSTANCE);
    ArrayList<String> headers = getHeaders(results);
    TreeSet<String> patients = new TreeSet<>();
    TreeSet<String> lines = new TreeSet<>();
    String headerLine = null;/*from w  ww. ja  v  a 2s . c om*/
    for (String header : headers) {
        if (null == headerLine) {
            headerLine = header;
        } else {
            headerLine = headerLine + "\t" + header;
        }
    }
    boolean headersNeeded = true;
    for (File clinFile : results) {
        TcgaGSData.printWithFlag("Compilation::process - clinFile=" + clinFile.getAbsolutePath());
        try (BufferedReader br = Files.newBufferedReader(Paths.get(clinFile.getAbsolutePath()),
                Charset.availableCharsets().get("ISO-8859-1"))) {
            String line = br.readLine();
            ArrayList<String> currentHeaders = new ArrayList<>();
            currentHeaders.addAll(Arrays.asList(line.split("\t", -1)));
            for (line = br.readLine(); null != line; line = br.readLine()) {
                String newLine = null;
                String[] splitted = line.split("\t", -1);
                for (String header : headers) {
                    String token = "NA";
                    int index = currentHeaders.indexOf(header);
                    if (index > -1) {
                        token = splitted[index];
                    }
                    if (null == newLine) {
                        newLine = token;
                    } else {
                        newLine = newLine + "\t" + token;
                    }
                }
                lines.add(newLine);
                String patient = GSStringUtils.beforeTab(newLine);
                if (false == patients.add(patient)) {
                    throw new Exception("ERROR - patient duplicated " + patient);
                }
            }
        }
    }
    try (BufferedWriter bw = Files.newBufferedWriter(Paths.get(mOutputFile),
            Charset.availableCharsets().get("ISO-8859-1"))) {
        bw.write(headerLine);
        bw.newLine();
        for (String line : lines) {
            bw.write(line);
            bw.newLine();
        }
    }
}

From source file:eu.itesla_project.security.SecurityAnalysisTool.java

@Override
public void run(CommandLine line) throws Exception {
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    Set<LimitViolationType> limitViolationTypes = line.hasOption("limit-types")
            ? Arrays.stream(line.getOptionValue("limit-types").split(",")).map(LimitViolationType::valueOf)
                    .collect(Collectors.toSet())
            : EnumSet.allOf(LimitViolationType.class);
    Path csvFile = null;/*  w ww. j  a  v  a 2 s. co m*/
    if (line.hasOption("output-csv")) {
        csvFile = Paths.get(line.getOptionValue("output-csv"));
    }

    System.out.println("Loading network '" + caseFile + "'");

    // load network
    Network network = Importers.loadNetwork(caseFile);
    if (network == null) {
        throw new RuntimeException("Case '" + caseFile + "' not found");
    }
    network.getStateManager().allowStateMultiThreadAccess(true);

    ComponentDefaultConfig defaultConfig = new ComponentDefaultConfig();
    SecurityAnalysisFactory securityAnalysisFactory = defaultConfig
            .findFactoryImplClass(SecurityAnalysisFactory.class).newInstance();
    SecurityAnalysis securityAnalysis = securityAnalysisFactory.create(network,
            LocalComputationManager.getDefault(), 0);

    ContingenciesProviderFactory contingenciesProviderFactory = defaultConfig
            .findFactoryImplClass(ContingenciesProviderFactory.class).newInstance();
    ContingenciesProvider contingenciesProvider = contingenciesProviderFactory.create();

    // run security analysis on all N-1 lines
    SecurityAnalysisResult result = securityAnalysis.runAsync(contingenciesProvider).join();

    if (!result.getPreContingencyResult().isComputationOk()) {
        System.out.println("Pre-contingency state divergence");
    }
    LimitViolationFilter limitViolationFilter = new LimitViolationFilter(limitViolationTypes);
    if (csvFile != null) {
        System.out.println("Writing results to '" + csvFile + "'");
        CsvTableFormatterFactory csvTableFormatterFactory = new CsvTableFormatterFactory();
        Security.printPreContingencyViolations(result, Files.newBufferedWriter(csvFile, StandardCharsets.UTF_8),
                csvTableFormatterFactory, limitViolationFilter);
        Security.printPostContingencyViolations(result,
                Files.newBufferedWriter(csvFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND),
                csvTableFormatterFactory, limitViolationFilter);
    } else {
        SystemOutStreamWriter soutWriter = new SystemOutStreamWriter();
        AsciiTableFormatterFactory asciiTableFormatterFactory = new AsciiTableFormatterFactory();
        Security.printPreContingencyViolations(result, soutWriter, asciiTableFormatterFactory,
                limitViolationFilter);
        Security.printPostContingencyViolations(result, soutWriter, asciiTableFormatterFactory,
                limitViolationFilter);
    }
}

From source file:com.ignorelist.kassandra.steam.scraper.Configuration.java

public void writeProperties(Path file) throws IOException {
    Writer propertiesWriter = Files.newBufferedWriter(file, Charsets.UTF_8);
    try {//  w w  w .  jav a 2s. c o m
        toProperties().store(propertiesWriter, null);
    } finally {
        IOUtils.closeQuietly(propertiesWriter);
    }
}